language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
quarkusio__quarkus
|
extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/HandshakeRequest.java
|
{
"start": 171,
"end": 3026
}
|
interface ____ {
/**
* The name is case insensitive.
*
* @param name
* @return the first header value for the given header name, or {@code null}
* @see HandshakeRequest#SEC_WEBSOCKET_KEY
* @see HandshakeRequest#SEC_WEBSOCKET_ACCEPT
* @see HandshakeRequest#SEC_WEBSOCKET_EXTENSIONS
* @see HandshakeRequest#SEC_WEBSOCKET_PROTOCOL
* @see HandshakeRequest#SEC_WEBSOCKET_VERSION
*/
String header(String name);
/**
* The name is case insensitive.
*
* @param name
* @return an immutable list of header values for the given header name, never {@code null}
* @see HandshakeRequest#SEC_WEBSOCKET_KEY
* @see HandshakeRequest#SEC_WEBSOCKET_ACCEPT
* @see HandshakeRequest#SEC_WEBSOCKET_EXTENSIONS
* @see HandshakeRequest#SEC_WEBSOCKET_PROTOCOL
* @see HandshakeRequest#SEC_WEBSOCKET_VERSION
*/
List<String> headers(String name);
/**
* Returned header names are lower case.
*
* @return an immutable map of header names to header values
*/
Map<String, List<String>> headers();
/**
*
* @return the scheme component of the server endpoint URL
*/
String scheme();
/**
*
* @return the host component of the server endpoint URL
*/
String host();
/**
*
* @return the port number of the server endpoint URL
*/
int port();
/**
*
* @return the path component of the server endpoint URL
*/
String path();
/**
* @return the query string of the server endpoint URL
*/
String query();
/**
*
* @return the local IP address and port for this connection, or {@code null} if not available
*/
String localAddress();
/**
* @return the remote IP address and port for this connection, or {@code null} if not available
*/
String remoteAddress();
/**
* See <a href="https://datatracker.ietf.org/doc/html/rfc6455#section-11.3.1">Sec-WebSocket-Key</a>.
*/
String SEC_WEBSOCKET_KEY = "Sec-WebSocket-Key";
/**
* See <a href="https://datatracker.ietf.org/doc/html/rfc6455#section-11.3.2">Sec-WebSocket-Extensions</a>.
*/
String SEC_WEBSOCKET_EXTENSIONS = "Sec-WebSocket-Extensions";
/**
* See <a href="https://datatracker.ietf.org/doc/html/rfc6455#section-11.3.3">Sec-WebSocket-Accept</a>.
*/
String SEC_WEBSOCKET_ACCEPT = "Sec-WebSocket-Accept";
/**
* See <a href="https://datatracker.ietf.org/doc/html/rfc6455#section-11.3.4">Sec-WebSocket-Protocol</a>.
*/
String SEC_WEBSOCKET_PROTOCOL = "Sec-WebSocket-Protocol";
/**
* See <a href="https://datatracker.ietf.org/doc/html/rfc6455#section-11.3.5">Sec-WebSocket-Version</a>.
*/
String SEC_WEBSOCKET_VERSION = "Sec-WebSocket-Version";
}
|
HandshakeRequest
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/config/WebFluxConfigurationSupportTests.java
|
{
"start": 14105,
"end": 14500
}
|
class ____ extends WebFluxConfigurationSupport {
@Override
protected void configureViewResolvers(ViewResolverRegistry registry) {
registry.freeMarker();
registry.defaultViews(new HttpMessageWriterView(new JacksonJsonEncoder()));
}
@Bean
public FreeMarkerConfigurer freeMarkerConfig() {
return new FreeMarkerConfigurer();
}
}
@Configuration
static
|
CustomViewResolverConfig
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/scanner/Scanner.java
|
{
"start": 1844,
"end": 5068
}
|
class ____ extends TreePathScanner<Void, VisitorState> {
private SuppressionInfo currentSuppressions = SuppressionInfo.EMPTY;
/** Scan a tree from a position identified by a TreePath. */
@Override
public Void scan(TreePath path, VisitorState state) {
SuppressionInfo prevSuppressionInfo = updateSuppressions(path.getLeaf(), state);
try {
return super.scan(path, state);
} finally {
// Restore old suppression state.
currentSuppressions = prevSuppressionInfo;
}
}
/** Scan a single node. The current path is updated for the duration of the scan. */
@Override
public Void scan(Tree tree, VisitorState state) {
if (tree == null) {
return null;
}
SuppressionInfo prevSuppressionInfo = updateSuppressions(tree, state);
try {
return super.scan(tree, state);
} finally {
// Restore old suppression state.
currentSuppressions = prevSuppressionInfo;
}
}
/**
* Updates current suppression state with information for the given {@code tree}. Returns the
* previous suppression state so that it can be restored when going up the tree.
*/
private SuppressionInfo updateSuppressions(Tree tree, VisitorState state) {
SuppressionInfo prevSuppressionInfo = currentSuppressions;
if (tree instanceof CompilationUnitTree compilationUnitTree) {
currentSuppressions = currentSuppressions.forCompilationUnit(compilationUnitTree, state);
} else {
Symbol sym = ASTHelpers.getDeclaredSymbol(tree);
if (sym != null) {
currentSuppressions =
currentSuppressions.withExtendedSuppressions(
sym, state, getCustomSuppressionAnnotations(state));
}
}
return prevSuppressionInfo;
}
/**
* Returns if this checker should be suppressed on the current tree path.
*
* @param suppressible holds information about the suppressibility of a checker
* @param errorProneOptions Options object configuring whether or not to suppress non-errors in
*/
protected SuppressedState isSuppressed(
Suppressible suppressible, ErrorProneOptions errorProneOptions, VisitorState state) {
boolean suppressedInGeneratedCode =
errorProneOptions.disableWarningsInGeneratedCode()
&& severityMap().get(suppressible.canonicalName()) != SeverityLevel.ERROR;
return currentSuppressions.suppressedState(suppressible, suppressedInGeneratedCode, state);
}
/**
* Returns a set of all the custom suppression annotation types used by the {@code BugChecker}s in
* this{@code Scanner}.
*/
protected Set<? extends Name> getCustomSuppressionAnnotations(VisitorState state) {
return ImmutableSet.of();
}
protected void reportMatch(Description description, VisitorState state) {
checkNotNull(description, "Use Description.NO_MATCH to denote an absent finding.");
state.reportMatch(description);
}
/** Handles an exception thrown by an individual check. */
protected void handleError(Suppressible s, Throwable t) {}
/** Returns a mapping between the canonical names of checks and their {@link SeverityLevel}. */
public Map<String, SeverityLevel> severityMap() {
return Collections.emptyMap();
}
}
|
Scanner
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/ProdTest.java
|
{
"start": 904,
"end": 2354
}
|
class ____ {
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.persist( new Person( 1, "Luigi ", 42 ) );
}
);
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
@SkipForDialect(dialectClass = CockroachDialect.class, reason = "https://github.com/cockroachdb/cockroach/issues/82478")
public void testCriteriaMod(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Number> query = criteriaBuilder.createQuery( Number.class );
final Root<Person> person = query.from( Person.class );
query.select( criteriaBuilder.prod( person.get( "age" ), 1F ) );
final Number id = entityManager.createQuery( query ).getSingleResult();
assertInstanceOf( Float.class, id );
assertEquals( 42F, id.floatValue() );
}
);
}
@Test
public void testQueryMod(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager -> {
final Object id = entityManager.createQuery( "select p.age * 1F from Person p" )
.getSingleResult();
assertInstanceOf( Float.class, id );
assertEquals( 42F, id );
}
);
}
@Entity(name = "Person")
@Table(name = "PERSON_TABLE")
public static
|
ProdTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FirstLongByTimestampGroupingAggregatorFunctionTests.java
|
{
"start": 892,
"end": 2889
}
|
class ____ extends GroupingAggregatorFunctionTestCase {
@Override
protected SourceOperator simpleInput(BlockFactory blockFactory, int size) {
TimestampGen tsgen = randomFrom(TimestampGen.values());
return new ListRowsBlockSourceOperator(
blockFactory,
List.of(ElementType.LONG, ElementType.LONG, ElementType.LONG),
IntStream.range(0, size).mapToObj(l -> List.of(randomLongBetween(0, 4), randomLong(), tsgen.gen())).toList()
);
}
@Override
protected int inputCount() {
return 2;
}
@Override
protected AggregatorFunctionSupplier aggregatorFunction() {
return new FirstLongByTimestampAggregatorFunctionSupplier();
}
@Override
protected String expectedDescriptionOfAggregator() {
return "first_long_by_timestamp";
}
@Override
protected void assertSimpleGroup(List<Page> input, Block result, int position, Long group) {
ExpectedWork work = new ExpectedWork(true);
for (Page page : input) {
matchingGroups(page, group).forEach(p -> {
LongBlock values = page.getBlock(1);
LongBlock timestamps = page.getBlock(2);
int tsStart = timestamps.getFirstValueIndex(p);
int tsEnd = tsStart + timestamps.getValueCount(p);
for (int tsOffset = tsStart; tsOffset < tsEnd; tsOffset++) {
long timestamp = timestamps.getLong(tsOffset);
int vStart = values.getFirstValueIndex(p);
int vEnd = vStart + values.getValueCount(p);
for (int vOffset = vStart; vOffset < vEnd; vOffset++) {
long value = values.getLong(vOffset);
work.add(timestamp, value);
}
}
});
}
work.check(BlockUtils.toJavaObject(result, position));
}
static
|
FirstLongByTimestampGroupingAggregatorFunctionTests
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/TemporalAssert_usingComparator_Test.java
|
{
"start": 937,
"end": 1419
}
|
class ____ extends AbstractTemporalAssertBaseTest {
@Mock
private Comparator<Temporal> comparator;
@Override
protected ConcreteTemporalAssert invoke_api_method() {
return assertions.usingComparator(comparator);
}
@Override
protected void verify_internal_effects() {
assertThat(getComparables(assertions).getComparator()).isSameAs(comparator);
assertThat(getObjects(assertions).getComparator()).isSameAs(comparator);
}
}
|
TemporalAssert_usingComparator_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/foreignkeys/JoinedInheritanceForeignKeyTest.java
|
{
"start": 4664,
"end": 4698
}
|
class ____ extends Role {
}
}
|
Person
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/core/session/SessionRegistryImpl.java
|
{
"start": 4342,
"end": 4622
}
|
interface ____");
SessionInformation info = getSessionInformation(sessionId);
if (info != null) {
info.refreshLastRequest();
}
}
@Override
public void registerNewSession(String sessionId, Object principal) {
Assert.hasText(sessionId, "SessionId required as per
|
contract
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/model/profile/JdkVersionProfileActivator.java
|
{
"start": 1589,
"end": 6443
}
|
class ____ implements ProfileActivator {
private static final Pattern FILTER_1 = Pattern.compile("[^\\d._-]");
private static final Pattern FILTER_2 = Pattern.compile("[._-]");
private static final Pattern FILTER_3 = Pattern.compile("\\."); // used for split now
@Override
public boolean isActive(Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
Activation activation = profile.getActivation();
if (activation == null) {
return false;
}
String jdk = activation.getJdk();
if (jdk == null) {
return false;
}
String version = context.getSystemProperty("java.version");
if (version == null || version.isEmpty()) {
problems.add(
BuilderProblem.Severity.ERROR,
ModelProblem.Version.BASE,
"Failed to determine Java version for profile " + profile.getId(),
activation.getLocation("jdk"));
return false;
}
try {
return isJavaVersionCompatible(jdk, version);
} catch (NumberFormatException e) {
problems.add(
BuilderProblem.Severity.WARNING,
ModelProblem.Version.BASE,
"Failed to determine JDK activation for profile " + profile.getId() + " due invalid JDK version: '"
+ version + "'",
profile.getLocation("jdk"));
return false;
}
}
public static boolean isJavaVersionCompatible(String requiredJdkRange, String currentJavaVersion) {
if (requiredJdkRange.startsWith("!")) {
return !currentJavaVersion.startsWith(requiredJdkRange.substring(1));
} else if (isRange(requiredJdkRange)) {
return isInRange(currentJavaVersion, getRange(requiredJdkRange));
} else {
return currentJavaVersion.startsWith(requiredJdkRange);
}
}
@Override
public boolean presentInConfig(Profile profile, ProfileActivationContext context, ModelProblemCollector problems) {
Activation activation = profile.getActivation();
if (activation == null) {
return false;
}
String jdk = activation.getJdk();
return jdk != null;
}
private static boolean isInRange(String value, List<RangeValue> range) {
int leftRelation = getRelationOrder(value, range.get(0), true);
if (leftRelation == 0) {
return true;
}
if (leftRelation < 0) {
return false;
}
return getRelationOrder(value, range.get(1), false) <= 0;
}
private static int getRelationOrder(String value, RangeValue rangeValue, boolean isLeft) {
if (rangeValue.value.isEmpty()) {
return isLeft ? 1 : -1;
}
value = FILTER_1.matcher(value).replaceAll("");
List<String> valueTokens = new ArrayList<>(Arrays.asList(FILTER_2.split(value)));
List<String> rangeValueTokens = new ArrayList<>(Arrays.asList(FILTER_3.split(rangeValue.value)));
addZeroTokens(valueTokens, 3);
addZeroTokens(rangeValueTokens, 3);
for (int i = 0; i < 3; i++) {
int x = Integer.parseInt(valueTokens.get(i));
int y = Integer.parseInt(rangeValueTokens.get(i));
if (x < y) {
return -1;
} else if (x > y) {
return 1;
}
}
if (!rangeValue.closed) {
return isLeft ? -1 : 1;
}
return 0;
}
private static void addZeroTokens(List<String> tokens, int max) {
while (tokens.size() < max) {
tokens.add("0");
}
}
private static boolean isRange(String value) {
return value.startsWith("[") || value.startsWith("(");
}
private static List<RangeValue> getRange(String range) {
List<RangeValue> ranges = new ArrayList<>();
for (String token : range.split(",")) {
if (token.startsWith("[")) {
ranges.add(new RangeValue(token.replace("[", ""), true));
} else if (token.startsWith("(")) {
ranges.add(new RangeValue(token.replace("(", ""), false));
} else if (token.endsWith("]")) {
ranges.add(new RangeValue(token.replace("]", ""), true));
} else if (token.endsWith(")")) {
ranges.add(new RangeValue(token.replace(")", ""), false));
} else if (token.isEmpty()) {
ranges.add(new RangeValue("", false));
}
}
if (ranges.size() < 2) {
ranges.add(new RangeValue("99999999", false));
}
return ranges;
}
private static
|
JdkVersionProfileActivator
|
java
|
elastic__elasticsearch
|
test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/resource/Resource.java
|
{
"start": 757,
"end": 1549
}
|
interface ____ {
InputStream asStream();
static Resource fromString(String text) {
return new StringResource(text);
}
static Resource fromString(Supplier<String> supplier) {
return new StringResource(supplier);
}
static Resource fromClasspath(String path) {
return new ClasspathResource(path);
}
static Resource fromFile(Path file) {
return fromFile(() -> file);
}
static Resource fromFile(Supplier<Path> file) {
return new FileResource(file);
}
default void writeTo(Path path) {
try (InputStream is = asStream()) {
Files.copy(is, path, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
|
Resource
|
java
|
google__dagger
|
javatests/dagger/functional/generictypes/GenericTest.java
|
{
"start": 1150,
"end": 6285
}
|
class ____ {
@Test public void testGenericComponentCreate() {
GenericComponent component = DaggerGenericComponent.create();
assertThat(component).isNotNull();
}
@Test public void testGenericSimpleReferences() {
GenericComponent component = DaggerGenericComponent.create();
assertThat(component.referencesGeneric().genericA.t).isNotNull();
}
@Test public void testGenericDoubleReferences() {
GenericComponent component = DaggerGenericComponent.create();
GenericDoubleReferences<A> doubleA = component.doubleGenericA();
assertThat(doubleA.a).isNotNull();
assertThat(doubleA.a2).isNotNull();
assertThat(doubleA.t).isNotNull();
assertThat(doubleA.t2).isNotNull();
GenericDoubleReferences<B> doubleB = component.doubleGenericB();
assertThat(doubleB.a).isNotNull();
assertThat(doubleB.a2).isNotNull();
assertThat(doubleB.t).isNotNull();
assertThat(doubleB.t2).isNotNull();
}
@Test public void complexGenerics() {
GenericComponent component = DaggerGenericComponent.create();
// validate these can be called w/o exceptions.
component.complexGenerics();
}
@Test public void noDepsGenerics() {
GenericComponent component = DaggerGenericComponent.create();
// validate these can be called w/o exceptions.
component.noDepsA();
component.noDepsB();
}
@Test public void boundedGenerics() {
BoundedGenericModule expected = new BoundedGenericModule();
BoundedGenericComponent component = DaggerBoundedGenericComponent.create();
BoundedGenerics<Integer, ArrayList<String>, LinkedList<CharSequence>, Integer, List<Integer>>
b1 = component.bounds1();
assertEquals(expected.provideInteger(), b1.t1);
assertEquals(expected.provideArrayListString(), b1.t2);
assertEquals(expected.provideLinkedListCharSeq(), b1.t3);
assertEquals(expected.provideInteger(), b1.t4);
assertEquals(expected.provideListOfInteger(), b1.t5);
BoundedGenerics<Double, LinkedList<String>, LinkedList<Comparable<String>>, Double, Set<Double>>
b2 = component.bounds2();
assertEquals(expected.provideDouble(), b2.t1);
assertEquals(expected.provideLinkedListString(), b2.t2);
assertEquals(expected.provideArrayListOfComparableString(), b2.t3);
assertEquals(expected.provideDouble(), b2.t4);
assertEquals(expected.provideSetOfDouble(), b2.t5);
}
@Test public void membersInjections() {
GenericComponent component = DaggerGenericComponent.create();
GenericChild<A> childA = new GenericChild<A>();
component.injectA(childA);
assertThat(childA.a).isNotNull();
assertThat(childA.b).isNotNull();
assertThat(childA.registeredA).isNotNull();
assertThat(childA.registeredB).isNotNull();
assertThat(childA.registeredT).isNotNull();
assertThat(childA.registeredX).isNotNull();
assertThat(childA.registeredY).isNotNull();
GenericChild<B> childB = new GenericChild<B>();
component.injectB(childB);
assertThat(childB.a).isNotNull();
assertThat(childB.b).isNotNull();
assertThat(childB.registeredA).isNotNull();
assertThat(childB.registeredB).isNotNull();
assertThat(childB.registeredT).isNotNull();
assertThat(childB.registeredX).isNotNull();
assertThat(childB.registeredY).isNotNull();
}
@Test public void packagePrivateTypeParameterDependencies() {
GenericComponent component = DaggerGenericComponent.create();
Exposed exposed = component.exposed();
assertThat(exposed.gpp.t).isNotNull();
assertThat(exposed.gpp2).isNotNull();
}
@SuppressWarnings("rawtypes")
@Test public void publicSubclassWithPackagePrivateTypeParameterOfSuperclass() {
GenericComponent component = DaggerGenericComponent.create();
PublicSubclass publicSubclass = component.publicSubclass();
assertThat(((Generic)publicSubclass).t).isNotNull();
}
@Test public void singletonScopesAppliesToEachResolvedType() {
SingletonGenericComponent component = DaggerSingletonGenericComponent.create();
ScopedGeneric<A> a = component.scopedGenericA();
assertThat(a).isSameInstanceAs(component.scopedGenericA());
assertThat(a.t).isNotNull();
ScopedGeneric<B> b = component.scopedGenericB();
assertThat(b).isSameInstanceAs(component.scopedGenericB());
assertThat(b.t).isNotNull();
assertThat(a).isNotSameInstanceAs(b);
}
@Test // See https://github.com/google/dagger/issues/671
public void scopedSimpleGenerics() {
SingletonGenericComponent component = DaggerSingletonGenericComponent.create();
ScopedSimpleGeneric<A> a = component.scopedSimpleGenericA();
assertThat(a).isSameInstanceAs(component.scopedSimpleGenericA());
ScopedSimpleGeneric<B> b = component.scopedSimpleGenericB();
assertThat(b).isSameInstanceAs(component.scopedSimpleGenericB());
assertThat(a).isNotSameInstanceAs(b);
}
@Test public void genericModules() {
GenericComponent component = DaggerGenericComponent.create();
assertThat(component.iterableInt()).containsExactly(1, 2).inOrder();
assertThat(component.iterableDouble()).containsExactly(3d, 4d).inOrder();
}
}
|
GenericTest
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/blocking/TransactionalAsBlockingTest.java
|
{
"start": 3917,
"end": 4654
}
|
class ____ extends MutinyBlocking2Grpc.Blocking2ImplBase {
@Override
public Uni<ThreadName> returnThread1(com.dam.Blocking.Empty request) {
String message = Thread.currentThread().getName();
return Uni.createFrom().item(
ThreadName.newBuilder().setName(message).build());
}
@Override
@NonBlocking
public Uni<ThreadName> returnThread2(com.dam.Blocking.Empty request) {
String message = Thread.currentThread().getName();
return Uni.createFrom().item(
ThreadName.newBuilder().setName(message).build());
}
}
@GrpcService
@NonBlocking
@Transactional
public static
|
Blocking2Service
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/specific/TestUnionRecord.java
|
{
"start": 6429,
"end": 9609
}
|
class ____ extends SpecificRecordBuilderBase<TestUnionRecord>
implements org.apache.avro.data.RecordBuilder<TestUnionRecord> {
private java.math.BigDecimal amount;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(Builder other) {
super(other);
if (isValidValue(fields()[0], other.amount)) {
this.amount = data().deepCopy(fields()[0].schema(), other.amount);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
}
/**
* Creates a Builder by copying an existing TestUnionRecord instance
*
* @param other The existing instance to copy.
*/
private Builder(TestUnionRecord other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.amount)) {
this.amount = data().deepCopy(fields()[0].schema(), other.amount);
fieldSetFlags()[0] = true;
}
}
/**
* Gets the value of the 'amount' field.
*
* @return The value.
*/
public java.math.BigDecimal getAmount() {
return amount;
}
/**
* Sets the value of the 'amount' field.
*
* @param value The value of 'amount'.
* @return This builder.
*/
public Builder setAmount(java.math.BigDecimal value) {
validate(fields()[0], value);
this.amount = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'amount' field has been set.
*
* @return True if the 'amount' field has been set, false otherwise.
*/
public boolean hasAmount() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'amount' field.
*
* @return This builder.
*/
public Builder clearAmount() {
amount = null;
fieldSetFlags()[0] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TestUnionRecord build() {
try {
TestUnionRecord record = new TestUnionRecord();
record.amount = fieldSetFlags()[0] ? this.amount : (java.math.BigDecimal) defaultValue(fields()[0]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<TestUnionRecord> WRITER$ = (org.apache.avro.io.DatumWriter<TestUnionRecord>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<TestUnionRecord> READER$ = (org.apache.avro.io.DatumReader<TestUnionRecord>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
}
|
Builder
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/tests/parsetools/FakeStream.java
|
{
"start": 700,
"end": 2360
}
|
class ____ implements ReadStream<Buffer> {
private long demand = Long.MAX_VALUE;
private Handler<Buffer> eventHandler;
private Handler<Void> endHandler;
private Handler<Throwable> exceptionHandler;
private volatile int pauseCount;
private volatile int resumeCount;
@Override
public ReadStream<Buffer> exceptionHandler(Handler<Throwable> handler) {
exceptionHandler = handler;
return this;
}
@Override
public ReadStream<Buffer> handler(Handler<Buffer> handler) {
eventHandler = handler;
return this;
}
@Override
public ReadStream<Buffer> fetch(long amount) {
Arguments.require(amount > 0, "Fetch amount must be > 0L");
demand += amount;
if (demand < 0L) {
demand = Long.MAX_VALUE;
}
return this;
}
@Override
public ReadStream<Buffer> pause() {
demand = 0L;
pauseCount++;
return this;
}
@Override
public ReadStream<Buffer> resume() {
resumeCount++;
return fetch(Long.MAX_VALUE);
}
@Override
public ReadStream<Buffer> endHandler(Handler<Void> handler) {
endHandler = handler;
return this;
}
boolean isPaused() {
return demand == 0L;
}
void handle(String s) {
handle(Buffer.buffer(s));
}
void handle(Buffer buff) {
if (demand == 0L) {
throw new IllegalStateException();
}
if (demand != Long.MAX_VALUE) {
demand--;
}
eventHandler.handle(buff);
}
void fail(Throwable err) {
exceptionHandler.handle(err);
}
void end() {
endHandler.handle(null);
}
public int pauseCount() {
return pauseCount;
}
public int resumeCount() {
return resumeCount;
}
}
|
FakeStream
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/introspect/TestNamingStrategyStd.java
|
{
"start": 2913,
"end": 3022
}
|
class ____ {
public String id;
public ObjectNode json;
}
static
|
ClassWithObjectNodeField
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/inject/BeanDefinition.java
|
{
"start": 14866,
"end": 15647
}
|
class ____ for the given type.
* @param type The type
* @return The type parameters
*/
default @NonNull Class<?>[] getTypeParameters(@Nullable Class<?> type) {
if (type == null) {
return ReflectionUtils.EMPTY_CLASS_ARRAY;
} else {
final List<Argument<?>> typeArguments = getTypeArguments(type);
if (typeArguments.isEmpty()) {
return ReflectionUtils.EMPTY_CLASS_ARRAY;
}
Class<?>[] params = new Class<?>[typeArguments.size()];
int i = 0;
for (Argument<?> argument : typeArguments) {
params[i++] = argument.getType();
}
return params;
}
}
/**
*
* Returns the type parameters as a
|
array
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingLeTableWithEmbeddedIdTest.java
|
{
"start": 2765,
"end": 3052
}
|
class ____ {
@Id
Long id;
@Column
String field;
public Entity2() {
}
public Entity2(Long id, String field) {
this.id = id;
this.field = field;
}
public Long getId() {
return id;
}
public String getField() {
return field;
}
}
public static
|
Entity2
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoBuilderCompilationTest.java
|
{
"start": 13745,
"end": 14067
}
|
class ____");
}
@Test
public void autoBuilderMissingBuildMethod() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoBuilder;",
"",
"public
|
Builder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java
|
{
"start": 940,
"end": 1282
}
|
class ____ extends ActionType<GetSnapshotLifecycleAction.Response> {
public static final GetSnapshotLifecycleAction INSTANCE = new GetSnapshotLifecycleAction();
public static final String NAME = "cluster:admin/slm/get";
protected GetSnapshotLifecycleAction() {
super(NAME);
}
public static
|
GetSnapshotLifecycleAction
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/issues/Issue5951.java
|
{
"start": 491,
"end": 1263
}
|
class ____ {
@Test
public void test_parse_show() {
for (DbType dbType : new DbType[]{DbType.mysql}) {
for (String sql : new String[]{
"show variables;",
"show global variables;",
"show global variables where name ='sync_binlog';",
"show global variables like '%sync_binlog%';",
}) {
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
System.out.println(statementList);
assertEquals(1, statementList.size());
SQLParseAssertUtil.assertParseSql(sql, dbType);
}
}
}
}
|
Issue5951
|
java
|
junit-team__junit5
|
junit-platform-commons/src/main/java/org/junit/platform/commons/support/AnnotationSupport.java
|
{
"start": 26515,
"end": 26643
}
|
interface ____ are annotated or <em>meta-annotated</em> with the specified
* {@code annotationType}.
*
* @param clazz the
|
that
|
java
|
apache__spark
|
resource-managers/yarn/src/main/java/org/apache/spark/deploy/yarn/ProxyUtils.java
|
{
"start": 1365,
"end": 1528
}
|
class ____ copied from Hadoop 3.4.0
// org.apache.hadoop.yarn.server.webproxy.ProxyUtils
//
// Modification:
// Migrate from javax.servlet to jakarta.servlet
public
|
is
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloClientEndpointBuilderFactory.java
|
{
"start": 1476,
"end": 1620
}
|
interface ____ {
/**
* Builder for endpoint consumers for the OPC UA Client component.
*/
public
|
MiloClientEndpointBuilderFactory
|
java
|
square__retrofit
|
retrofit/java-test/src/test/java/retrofit2/RetrofitTest.java
|
{
"start": 18773,
"end": 19551
}
|
class ____ extends Converter.Factory {
@Override
public @Nullable Converter<?, String> stringConverter(
Type type, Annotation[] annotations, Retrofit retrofit) {
factoryCalled.set(true);
return null;
}
}
Retrofit retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new MyConverterFactory())
.build();
CallMethod service = retrofit.create(CallMethod.class);
Call<ResponseBody> call = service.queryString(null);
assertThat(call).isNotNull();
assertThat(factoryCalled.get()).isTrue();
}
@Test
public void stringConverterReturningNullResultsInDefault() {
final AtomicBoolean factoryCalled = new AtomicBoolean();
|
MyConverterFactory
|
java
|
apache__dubbo
|
dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/ServiceNameMapping.java
|
{
"start": 1841,
"end": 4531
}
|
interface ____ extends Destroyable {
String DEFAULT_MAPPING_GROUP = "mapping";
/**
* Map the specified Dubbo service interface, group, version and protocol to current Dubbo service name
*/
boolean map(URL url);
boolean hasValidMetadataCenter();
/**
* Get the default extension of {@link ServiceNameMapping}
*
* @return non-null {@link ServiceNameMapping}
*/
static ServiceNameMapping getDefaultExtension(ScopeModel scopeModel) {
return ScopeModelUtil.getApplicationModel(scopeModel).getDefaultExtension(ServiceNameMapping.class);
}
static String buildMappingKey(URL url) {
return buildGroup(url.getServiceInterface());
}
static String buildGroup(String serviceInterface) {
// the issue : https://github.com/apache/dubbo/issues/4671
// return DEFAULT_MAPPING_GROUP + SLASH + serviceInterface;
return serviceInterface;
}
static String toStringKeys(Set<String> serviceNames) {
if (CollectionUtils.isEmpty(serviceNames)) {
return "";
}
StringBuilder builder = new StringBuilder();
for (String n : serviceNames) {
builder.append(n);
builder.append(COMMA_SEPARATOR);
}
builder.deleteCharAt(builder.length() - 1);
return builder.toString();
}
static Set<String> getAppNames(String content) {
if (StringUtils.isBlank(content)) {
return emptySet();
}
return new TreeSet<>(of(content.split(COMMA_SEPARATOR))
.map(String::trim)
.filter(StringUtils::isNotEmpty)
.collect(toSet()));
}
static Set<String> getMappingByUrl(URL consumerURL) {
String providedBy = consumerURL.getParameter(RegistryConstants.PROVIDED_BY);
if (StringUtils.isBlank(providedBy)) {
return null;
}
return AbstractServiceNameMapping.parseServices(providedBy);
}
/**
* Get the latest mapping result from remote center and register listener at the same time to get notified once mapping changes.
*
* @param listener listener that will be notified on mapping change
* @return the latest mapping result from remote center
*/
Set<String> getAndListen(URL registryURL, URL subscribedURL, MappingListener listener);
MappingListener stopListen(URL subscribeURL, MappingListener listener);
void putCachedMapping(String serviceKey, Set<String> apps);
Set<String> getMapping(URL consumerURL);
Set<String> getRemoteMapping(URL consumerURL);
Set<String> removeCachedMapping(String serviceKey);
}
|
ServiceNameMapping
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/consumer/internals/OffsetFetcherTest.java
|
{
"start": 5000,
"end": 80337
}
|
class ____ {
private final String topicName = "test";
private final Uuid topicId = Uuid.randomUuid();
private final Map<String, Uuid> topicIds = new HashMap<>() {
{
put(topicName, topicId);
}
};
private final TopicPartition tp0 = new TopicPartition(topicName, 0);
private final TopicPartition tp1 = new TopicPartition(topicName, 1);
private final TopicPartition tp2 = new TopicPartition(topicName, 2);
private final TopicPartition tp3 = new TopicPartition(topicName, 3);
private final int validLeaderEpoch = 0;
private final MetadataResponse initialUpdateResponse =
RequestTestUtils.metadataUpdateWithIds(1, singletonMap(topicName, 4), topicIds);
private final int requestTimeoutMs = 30000;
private final long retryBackoffMs = 100;
private MockTime time = new MockTime(1);
private SubscriptionState subscriptions;
private ConsumerMetadata metadata;
private MockClient client;
private Metrics metrics;
private final ApiVersions apiVersions = new ApiVersions();
private ConsumerNetworkClient consumerClient;
private OffsetFetcher offsetFetcher;
@BeforeEach
public void setup() {
}
private void assignFromUser(Set<TopicPartition> partitions) {
subscriptions.assignFromUser(partitions);
client.updateMetadata(initialUpdateResponse);
// A dummy metadata update to ensure valid leader epoch.
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), singletonMap(topicName, 4),
tp -> validLeaderEpoch, topicIds), false, 0L);
}
@AfterEach
public void teardown() throws Exception {
if (metrics != null)
this.metrics.close();
}
@Test
public void testUpdateFetchPositionNoOpWithPositionSet() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 5L);
offsetFetcher.resetPositionsIfNeeded();
assertFalse(client.hasInFlightRequests());
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testUpdateFetchPositionResetToDefaultOffset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.EARLIEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testUpdateFetchPositionResetToLatestOffset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.updateMetadata(initialUpdateResponse);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testUpdateFetchPositionResetToDurationOffset() {
long timestamp = Instant.now().toEpochMilli();
AutoOffsetResetStrategy durationStrategy = mock(AutoOffsetResetStrategy.class);
when(durationStrategy.timestamp()).thenReturn(Optional.of(timestamp));
buildFetcher(durationStrategy);
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, durationStrategy);
client.updateMetadata(initialUpdateResponse);
client.prepareResponse(listOffsetRequestMatcher(timestamp),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
/**
* Make sure the client behaves appropriately when receiving an exception for unavailable offsets
*/
@Test
public void testFetchOffsetErrors() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Fail with OFFSET_NOT_AVAILABLE
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.OFFSET_NOT_AVAILABLE, 1L, 5L), false);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0));
// Fail with LEADER_NOT_AVAILABLE
time.sleep(retryBackoffMs);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.LEADER_NOT_AVAILABLE, 1L, 5L), false);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0));
// Back to normal
time.sleep(retryBackoffMs);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L), false);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertTrue(subscriptions.hasValidPosition(tp0));
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5L, subscriptions.position(tp0).offset);
}
@Test
public void testListOffsetSendsReadUncommitted() {
testListOffsetsSendsIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
}
@Test
public void testListOffsetSendsReadCommitted() {
testListOffsetsSendsIsolationLevel(IsolationLevel.READ_COMMITTED);
}
private void testListOffsetsSendsIsolationLevel(IsolationLevel isolationLevel) {
buildFetcher(isolationLevel);
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.prepareResponse(body -> {
ListOffsetsRequest request = (ListOffsetsRequest) body;
assertEquals(requestTimeoutMs, request.timeoutMs());
return request.isolationLevel() == isolationLevel;
}, listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testresetPositionsSkipsBlackedOutConnections() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
// Check that we skip sending the ListOffset request when the node is blacked out
client.updateMetadata(initialUpdateResponse);
Node node = initialUpdateResponse.brokers().iterator().next();
client.backoff(node, 500);
offsetFetcher.resetPositionsIfNeeded();
assertEquals(0, consumerClient.pendingRequestCount());
consumerClient.pollNoWakeup();
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertEquals(AutoOffsetResetStrategy.EARLIEST, subscriptions.resetStrategy(tp0));
time.sleep(500);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.EARLIEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testUpdateFetchPositionResetToEarliestOffset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.EARLIEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testresetPositionsMetadataRefresh() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// First fetch fails with stale metadata
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.NOT_LEADER_OR_FOLLOWER, 1L, 5L), false);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
// Expect a metadata refresh
client.prepareMetadataUpdate(initialUpdateResponse);
consumerClient.pollNoWakeup();
assertFalse(client.hasPendingMetadataUpdates());
// Next fetch succeeds
time.sleep(retryBackoffMs);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testListOffsetNoUpdateMissingEpoch() {
buildFetcher();
// Set up metadata with no leader epoch
subscriptions.assignFromUser(singleton(tp0));
MetadataResponse metadataWithNoLeaderEpochs = RequestTestUtils.metadataUpdateWithIds(
"kafka-cluster", 1, Collections.emptyMap(), singletonMap(topicName, 4), tp -> null, topicIds);
client.updateMetadata(metadataWithNoLeaderEpochs);
// Return a ListOffsets response with leaderEpoch=1, we should ignore it
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(tp0, Errors.NONE, 1L, 5L, 1));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
// Reset should be satisfied and no metadata update requested
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(metadata.updateRequested());
assertFalse(metadata.lastSeenLeaderEpoch(tp0).isPresent());
}
@Test
public void testListOffsetUpdateEpoch() {
buildFetcher();
// Set up metadata with leaderEpoch=1
subscriptions.assignFromUser(singleton(tp0));
MetadataResponse metadataWithLeaderEpochs = RequestTestUtils.metadataUpdateWithIds(
"kafka-cluster", 1, Collections.emptyMap(), singletonMap(topicName, 4), tp -> 1, topicIds);
client.updateMetadata(metadataWithLeaderEpochs);
// Reset offsets to trigger ListOffsets call
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Now we see a ListOffsets with leaderEpoch=2 epoch, we trigger a metadata update
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP, 1),
listOffsetResponse(tp0, Errors.NONE, 1L, 5L, 2));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(metadata.updateRequested());
assertOptional(metadata.lastSeenLeaderEpoch(tp0), epoch -> assertEquals(2, (long) epoch));
}
@Test
public void testUpdateFetchPositionDisconnect() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// First request gets a disconnect
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.NONE, 1L, 5L), true);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
// Expect a metadata refresh
client.prepareMetadataUpdate(initialUpdateResponse);
consumerClient.pollNoWakeup();
assertFalse(client.hasPendingMetadataUpdates());
// No retry until the backoff passes
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(client.hasInFlightRequests());
assertFalse(subscriptions.hasValidPosition(tp0));
// Next one succeeds
time.sleep(retryBackoffMs);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testAssignmentChangeWithInFlightReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Send the ListOffsets request to reset the position
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(client.hasInFlightRequests());
// Now we have an assignment change
assignFromUser(singleton(tp1));
// The response returns and is discarded
client.respond(listOffsetResponse(Errors.NONE, 1L, 5L));
consumerClient.pollNoWakeup();
assertFalse(client.hasPendingResponses());
assertFalse(client.hasInFlightRequests());
assertFalse(subscriptions.isAssigned(tp0));
}
@Test
public void testSeekWithInFlightReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Send the ListOffsets request to reset the position
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(client.hasInFlightRequests());
// Now we get a seek from the user
subscriptions.seek(tp0, 237);
// The response returns and is discarded
client.respond(listOffsetResponse(Errors.NONE, 1L, 5L));
consumerClient.pollNoWakeup();
assertFalse(client.hasPendingResponses());
assertFalse(client.hasInFlightRequests());
assertEquals(237L, subscriptions.position(tp0).offset);
}
private boolean listOffsetMatchesExpectedReset(
TopicPartition tp,
AutoOffsetResetStrategy strategy,
AbstractRequest request
) {
assertInstanceOf(ListOffsetsRequest.class, request);
ListOffsetsRequest req = (ListOffsetsRequest) request;
assertEquals(singleton(tp.topic()), req.data().topics().stream()
.map(ListOffsetsTopic::name).collect(Collectors.toSet()));
ListOffsetsTopic listTopic = req.data().topics().get(0);
assertEquals(singleton(tp.partition()), listTopic.partitions().stream()
.map(ListOffsetsPartition::partitionIndex).collect(Collectors.toSet()));
ListOffsetsPartition listPartition = listTopic.partitions().get(0);
if (strategy == AutoOffsetResetStrategy.EARLIEST) {
assertEquals(ListOffsetsRequest.EARLIEST_TIMESTAMP, listPartition.timestamp());
} else if (strategy == AutoOffsetResetStrategy.LATEST) {
assertEquals(ListOffsetsRequest.LATEST_TIMESTAMP, listPartition.timestamp());
}
return true;
}
@Test
public void testEarlierOffsetResetArrivesLate() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
offsetFetcher.resetPositionsIfNeeded();
client.prepareResponse(req -> {
if (listOffsetMatchesExpectedReset(tp0, AutoOffsetResetStrategy.EARLIEST, req)) {
// Before the response is handled, we get a request to reset to the latest offset
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
return true;
} else {
return false;
}
}, listOffsetResponse(Errors.NONE, 1L, 0L));
consumerClient.pollNoWakeup();
// The list offset result should be ignored
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertEquals(AutoOffsetResetStrategy.LATEST, subscriptions.resetStrategy(tp0));
offsetFetcher.resetPositionsIfNeeded();
client.prepareResponse(
req -> listOffsetMatchesExpectedReset(tp0, AutoOffsetResetStrategy.LATEST, req),
listOffsetResponse(Errors.NONE, 1L, 10L)
);
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertEquals(10, subscriptions.position(tp0).offset);
}
@Test
public void testChangeResetWithInFlightReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Send the ListOffsets request to reset the position
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(client.hasInFlightRequests());
// Now we get a seek from the user
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.EARLIEST);
// The response returns and is discarded
client.respond(listOffsetResponse(Errors.NONE, 1L, 5L));
consumerClient.pollNoWakeup();
assertFalse(client.hasPendingResponses());
assertFalse(client.hasInFlightRequests());
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertEquals(AutoOffsetResetStrategy.EARLIEST, subscriptions.resetStrategy(tp0));
}
@Test
public void testIdempotentResetWithInFlightReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// Send the ListOffsets request to reset the position
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
assertTrue(client.hasInFlightRequests());
// Now we get a seek from the user
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.respond(listOffsetResponse(Errors.NONE, 1L, 5L));
consumerClient.pollNoWakeup();
assertFalse(client.hasInFlightRequests());
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertEquals(5L, subscriptions.position(tp0).offset);
}
@Test
public void testResetOffsetsAuthorizationFailure() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
// First request gets a disconnect
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.TOPIC_AUTHORIZATION_FAILED, -1, -1), false);
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.hasValidPosition(tp0));
try {
offsetFetcher.resetPositionsIfNeeded();
fail("Expected authorization error to be raised");
} catch (TopicAuthorizationException e) {
assertEquals(singleton(tp0.topic()), e.unauthorizedTopics());
}
// The exception should clear after being raised, but no retry until the backoff
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(client.hasInFlightRequests());
assertFalse(subscriptions.hasValidPosition(tp0));
// Next one succeeds
time.sleep(retryBackoffMs);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(Errors.NONE, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertTrue(subscriptions.isFetchable(tp0));
assertEquals(5, subscriptions.position(tp0).offset);
}
@Test
public void testFetchingPendingPartitionsBeforeAndAfterSubscriptionReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 100);
assertEquals(100, subscriptions.position(tp0).offset);
assertTrue(subscriptions.isFetchable(tp0));
subscriptions.markPendingRevocation(singleton(tp0));
offsetFetcher.resetPositionsIfNeeded();
// once a partition is marked pending, it should not be fetchable
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0));
assertTrue(subscriptions.hasValidPosition(tp0));
assertEquals(100, subscriptions.position(tp0).offset);
subscriptions.seek(tp0, 100);
assertEquals(100, subscriptions.position(tp0).offset);
// reassignment should enable fetching of the same partition
subscriptions.unsubscribe();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 100);
assertEquals(100, subscriptions.position(tp0).offset);
assertTrue(subscriptions.isFetchable(tp0));
}
@Test
public void testUpdateFetchPositionOfPausedPartitionsRequiringOffsetReset() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.pause(tp0); // paused partition does not have a valid position
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP,
validLeaderEpoch), listOffsetResponse(Errors.NONE, 1L, 10L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0)); // because tp is paused
assertTrue(subscriptions.hasValidPosition(tp0));
assertEquals(10, subscriptions.position(tp0).offset);
}
@Test
public void testUpdateFetchPositionOfPausedPartitionsWithoutAValidPosition() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0);
subscriptions.pause(tp0); // paused partition does not have a valid position
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0)); // because tp is paused
assertFalse(subscriptions.hasValidPosition(tp0));
}
@Test
public void testUpdateFetchPositionOfPausedPartitionsWithAValidPosition() {
buildFetcher();
assignFromUser(singleton(tp0));
subscriptions.seek(tp0, 10);
subscriptions.pause(tp0); // paused partition already has a valid position
offsetFetcher.resetPositionsIfNeeded();
assertFalse(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0)); // because tp is paused
assertTrue(subscriptions.hasValidPosition(tp0));
assertEquals(10, subscriptions.position(tp0).offset);
}
@Test
public void testGetOffsetsForTimesTimeout() {
buildFetcher();
assertThrows(TimeoutException.class, () -> offsetFetcher.offsetsForTimes(
Collections.singletonMap(new TopicPartition(topicName, 2), 1000L), time.timer(100L)));
}
@Test
public void testGetOffsetsForTimes() {
buildFetcher();
// Empty map
assertTrue(offsetFetcher.offsetsForTimes(new HashMap<>(), time.timer(100L)).isEmpty());
// Unknown Offset
testGetOffsetsForTimesWithUnknownOffset();
// Error code none with unknown offset
testGetOffsetsForTimesWithError(Errors.NONE, Errors.NONE, -1L, null);
// Error code none with known offset
testGetOffsetsForTimesWithError(Errors.NONE, Errors.NONE, 10L, 10L);
// Test both of partition has error.
testGetOffsetsForTimesWithError(Errors.NOT_LEADER_OR_FOLLOWER, Errors.INVALID_REQUEST, 10L, 10L);
// Test the second partition has error.
testGetOffsetsForTimesWithError(Errors.NONE, Errors.NOT_LEADER_OR_FOLLOWER, 10L, 10L);
// Test different errors.
testGetOffsetsForTimesWithError(Errors.NOT_LEADER_OR_FOLLOWER, Errors.NONE, 10L, 10L);
testGetOffsetsForTimesWithError(Errors.UNKNOWN_TOPIC_OR_PARTITION, Errors.NONE, 10L, 10L);
testGetOffsetsForTimesWithError(Errors.UNSUPPORTED_FOR_MESSAGE_FORMAT, Errors.NONE, 10L, null);
testGetOffsetsForTimesWithError(Errors.BROKER_NOT_AVAILABLE, Errors.NONE, 10L, 10L);
}
@Test
public void testGetOffsetsFencedLeaderEpoch() {
buildFetcher();
subscriptions.assignFromUser(singleton(tp0));
client.updateMetadata(initialUpdateResponse);
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.prepareResponse(listOffsetResponse(Errors.FENCED_LEADER_EPOCH, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0));
assertFalse(subscriptions.hasValidPosition(tp0));
assertEquals(0L, metadata.timeToNextUpdate(time.milliseconds()));
}
@Test
public void testGetOffsetByTimeWithPartitionsRetryCouldTriggerMetadataUpdate() {
List<Errors> retriableErrors = Arrays.asList(Errors.NOT_LEADER_OR_FOLLOWER,
Errors.REPLICA_NOT_AVAILABLE, Errors.KAFKA_STORAGE_ERROR, Errors.OFFSET_NOT_AVAILABLE,
Errors.LEADER_NOT_AVAILABLE, Errors.FENCED_LEADER_EPOCH, Errors.UNKNOWN_LEADER_EPOCH);
final int newLeaderEpoch = 3;
MetadataResponse updatedMetadata = RequestTestUtils.metadataUpdateWithIds("dummy", 3,
singletonMap(topicName, Errors.NONE), singletonMap(topicName, 4), tp -> newLeaderEpoch, topicIds);
Node originalLeader = initialUpdateResponse.buildCluster().leaderFor(tp1);
Node newLeader = updatedMetadata.buildCluster().leaderFor(tp1);
assertNotEquals(originalLeader, newLeader);
for (Errors retriableError : retriableErrors) {
buildFetcher();
subscriptions.assignFromUser(Set.of(tp0, tp1));
client.updateMetadata(initialUpdateResponse);
final long fetchTimestamp = 10L;
ListOffsetsPartitionResponse tp0NoError = new ListOffsetsPartitionResponse()
.setPartitionIndex(tp0.partition())
.setErrorCode(Errors.NONE.code())
.setTimestamp(fetchTimestamp)
.setOffset(4L);
List<ListOffsetsTopicResponse> topics = Collections.singletonList(
new ListOffsetsTopicResponse()
.setName(tp0.topic())
.setPartitions(Arrays.asList(
tp0NoError,
new ListOffsetsPartitionResponse()
.setPartitionIndex(tp1.partition())
.setErrorCode(retriableError.code())
.setTimestamp(ListOffsetsRequest.LATEST_TIMESTAMP)
.setOffset(-1L))));
ListOffsetsResponseData data = new ListOffsetsResponseData()
.setThrottleTimeMs(0)
.setTopics(topics);
client.prepareResponseFrom(body -> {
boolean isListOffsetRequest = body instanceof ListOffsetsRequest;
if (isListOffsetRequest) {
ListOffsetsRequest request = (ListOffsetsRequest) body;
List<ListOffsetsTopic> expectedTopics = Collections.singletonList(
new ListOffsetsTopic()
.setName(tp0.topic())
.setPartitions(Arrays.asList(
new ListOffsetsPartition()
.setPartitionIndex(tp1.partition())
.setTimestamp(fetchTimestamp)
.setCurrentLeaderEpoch(ListOffsetsResponse.UNKNOWN_EPOCH),
new ListOffsetsPartition()
.setPartitionIndex(tp0.partition())
.setTimestamp(fetchTimestamp)
.setCurrentLeaderEpoch(ListOffsetsResponse.UNKNOWN_EPOCH))));
return request.topics().equals(expectedTopics);
} else {
return false;
}
}, new ListOffsetsResponse(data), originalLeader);
client.prepareMetadataUpdate(updatedMetadata);
// If the metadata wasn't updated before retrying, the fetcher would consult the original leader and hit a NOT_LEADER exception.
// We will count the answered future response in the end to verify if this is the case.
List<ListOffsetsTopicResponse> topicsWithFatalError = Collections.singletonList(
new ListOffsetsTopicResponse()
.setName(tp0.topic())
.setPartitions(Arrays.asList(
tp0NoError,
new ListOffsetsPartitionResponse()
.setPartitionIndex(tp1.partition())
.setErrorCode(Errors.NOT_LEADER_OR_FOLLOWER.code())
.setTimestamp(ListOffsetsRequest.LATEST_TIMESTAMP)
.setOffset(-1L))));
ListOffsetsResponseData dataWithFatalError = new ListOffsetsResponseData()
.setThrottleTimeMs(0)
.setTopics(topicsWithFatalError);
client.prepareResponseFrom(new ListOffsetsResponse(dataWithFatalError), originalLeader);
// The request to new leader must only contain one partition tp1 with error.
client.prepareResponseFrom(body -> {
boolean isListOffsetRequest = body instanceof ListOffsetsRequest;
if (isListOffsetRequest) {
ListOffsetsRequest request = (ListOffsetsRequest) body;
ListOffsetsTopic requestTopic = request.topics().get(0);
ListOffsetsPartition expectedPartition = new ListOffsetsPartition()
.setPartitionIndex(tp1.partition())
.setTimestamp(fetchTimestamp)
.setCurrentLeaderEpoch(newLeaderEpoch);
return expectedPartition.equals(requestTopic.partitions().get(0));
} else {
return false;
}
}, listOffsetResponse(tp1, Errors.NONE, fetchTimestamp, 5L), newLeader);
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestampMap =
offsetFetcher.offsetsForTimes(
Utils.mkMap(Utils.mkEntry(tp0, fetchTimestamp),
Utils.mkEntry(tp1, fetchTimestamp)), time.timer(Integer.MAX_VALUE));
assertEquals(Utils.mkMap(
Utils.mkEntry(tp0, new OffsetAndTimestamp(4L, fetchTimestamp)),
Utils.mkEntry(tp1, new OffsetAndTimestamp(5L, fetchTimestamp))), offsetAndTimestampMap);
// The NOT_LEADER exception future should not be cleared as we already refreshed the metadata before
// first retry, thus never hitting.
assertEquals(1, client.numAwaitingResponses());
}
}
@Test
public void testGetOffsetsUnknownLeaderEpoch() {
buildFetcher();
subscriptions.assignFromUser(singleton(tp0));
subscriptions.requestOffsetReset(tp0, AutoOffsetResetStrategy.LATEST);
client.prepareResponse(listOffsetResponse(Errors.UNKNOWN_LEADER_EPOCH, 1L, 5L));
offsetFetcher.resetPositionsIfNeeded();
consumerClient.pollNoWakeup();
assertTrue(subscriptions.isOffsetResetNeeded(tp0));
assertFalse(subscriptions.isFetchable(tp0));
assertFalse(subscriptions.hasValidPosition(tp0));
assertEquals(0L, metadata.timeToNextUpdate(time.milliseconds()));
}
@Test
public void testGetOffsetsIncludesLeaderEpoch() {
buildFetcher();
subscriptions.assignFromUser(singleton(tp0));
client.updateMetadata(initialUpdateResponse);
// Metadata update with leader epochs
MetadataResponse metadataResponse = RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), Collections.singletonMap(topicName, 4), tp -> 99, topicIds);
client.updateMetadata(metadataResponse);
// Request latest offset
subscriptions.requestOffsetReset(tp0);
offsetFetcher.resetPositionsIfNeeded();
// Check for epoch in outgoing request
MockClient.RequestMatcher matcher = body -> {
if (body instanceof ListOffsetsRequest) {
ListOffsetsRequest offsetRequest = (ListOffsetsRequest) body;
int epoch = offsetRequest.topics().get(0).partitions().get(0).currentLeaderEpoch();
assertTrue(epoch != ListOffsetsResponse.UNKNOWN_EPOCH, "Expected Fetcher to set leader epoch in request");
assertEquals(99, epoch, "Expected leader epoch to match epoch from metadata update");
return true;
} else {
fail("Should have seen ListOffsetRequest");
return false;
}
};
client.prepareResponse(matcher, listOffsetResponse(Errors.NONE, 1L, 5L));
consumerClient.pollNoWakeup();
}
@Test
public void testGetOffsetsForTimesWhenSomeTopicPartitionLeadersNotKnownInitially() {
buildFetcher();
subscriptions.assignFromUser(Set.of(tp0, tp1));
final String anotherTopic = "another-topic";
final TopicPartition t2p0 = new TopicPartition(anotherTopic, 0);
client.reset();
// Metadata initially has one topic
MetadataResponse initialMetadata = RequestTestUtils.metadataUpdateWithIds(3, singletonMap(topicName, 2), topicIds);
client.updateMetadata(initialMetadata);
// The first metadata refresh should contain one topic
client.prepareMetadataUpdate(initialMetadata);
client.prepareResponseFrom(listOffsetResponse(tp0, Errors.NONE, 1000L, 11L),
metadata.fetch().leaderFor(tp0));
client.prepareResponseFrom(listOffsetResponse(tp1, Errors.NONE, 1000L, 32L),
metadata.fetch().leaderFor(tp1));
// Second metadata refresh should contain two topics
Map<String, Integer> partitionNumByTopic = new HashMap<>();
partitionNumByTopic.put(topicName, 2);
partitionNumByTopic.put(anotherTopic, 1);
topicIds.put("another-topic", Uuid.randomUuid());
MetadataResponse updatedMetadata = RequestTestUtils.metadataUpdateWithIds(3, partitionNumByTopic, topicIds);
client.prepareMetadataUpdate(updatedMetadata);
client.prepareResponseFrom(listOffsetResponse(t2p0, Errors.NONE, 1000L, 54L),
metadata.fetch().leaderFor(t2p0));
Map<TopicPartition, Long> timestampToSearch = new HashMap<>();
timestampToSearch.put(tp0, ListOffsetsRequest.LATEST_TIMESTAMP);
timestampToSearch.put(tp1, ListOffsetsRequest.LATEST_TIMESTAMP);
timestampToSearch.put(t2p0, ListOffsetsRequest.LATEST_TIMESTAMP);
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestampMap =
offsetFetcher.offsetsForTimes(timestampToSearch, time.timer(Long.MAX_VALUE));
assertNotNull(offsetAndTimestampMap.get(tp0), "Expect MetadataFetcher.offsetsForTimes() to return non-null result for " + tp0);
assertNotNull(offsetAndTimestampMap.get(tp1), "Expect MetadataFetcher.offsetsForTimes() to return non-null result for " + tp1);
assertNotNull(offsetAndTimestampMap.get(t2p0), "Expect MetadataFetcher.offsetsForTimes() to return non-null result for " + t2p0);
assertEquals(11L, offsetAndTimestampMap.get(tp0).offset());
assertEquals(32L, offsetAndTimestampMap.get(tp1).offset());
assertEquals(54L, offsetAndTimestampMap.get(t2p0).offset());
}
@Test
public void testGetOffsetsForTimesWhenSomeTopicPartitionLeadersDisconnectException() {
buildFetcher();
final String anotherTopic = "another-topic";
final TopicPartition t2p0 = new TopicPartition(anotherTopic, 0);
subscriptions.assignFromUser(Set.of(tp0, t2p0));
client.reset();
MetadataResponse initialMetadata = RequestTestUtils.metadataUpdateWithIds(1, singletonMap(topicName, 1), topicIds);
client.updateMetadata(initialMetadata);
Map<String, Integer> partitionNumByTopic = new HashMap<>();
partitionNumByTopic.put(topicName, 1);
partitionNumByTopic.put(anotherTopic, 1);
topicIds.put("another-topic", Uuid.randomUuid());
MetadataResponse updatedMetadata = RequestTestUtils.metadataUpdateWithIds(1, partitionNumByTopic, topicIds);
client.prepareMetadataUpdate(updatedMetadata);
client.prepareResponse(listOffsetRequestMatcher(ListOffsetsRequest.LATEST_TIMESTAMP),
listOffsetResponse(tp0, Errors.NONE, 1000L, 11L), true);
client.prepareResponseFrom(listOffsetResponse(tp0, Errors.NONE, 1000L, 11L), metadata.fetch().leaderFor(tp0));
Map<TopicPartition, Long> timestampToSearch = new HashMap<>();
timestampToSearch.put(tp0, ListOffsetsRequest.LATEST_TIMESTAMP);
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestampMap = offsetFetcher.offsetsForTimes(timestampToSearch, time.timer(Long.MAX_VALUE));
assertNotNull(offsetAndTimestampMap.get(tp0), "Expect MetadataFetcher.offsetsForTimes() to return non-null result for " + tp0);
assertEquals(11L, offsetAndTimestampMap.get(tp0).offset());
assertNotNull(metadata.fetch().partitionCountForTopic(anotherTopic));
}
@Test
public void testListOffsetsWithZeroTimeout() {
buildFetcher();
Map<TopicPartition, Long> offsetsToSearch = new HashMap<>();
offsetsToSearch.put(tp0, ListOffsetsRequest.EARLIEST_TIMESTAMP);
offsetsToSearch.put(tp1, ListOffsetsRequest.EARLIEST_TIMESTAMP);
Map<TopicPartition, OffsetAndTimestamp> offsetsToExpect = new HashMap<>();
offsetsToExpect.put(tp0, null);
offsetsToExpect.put(tp1, null);
assertEquals(offsetsToExpect, offsetFetcher.offsetsForTimes(offsetsToSearch, time.timer(0)));
}
@Test
public void testBatchedListOffsetsMetadataErrors() {
buildFetcher();
ListOffsetsResponseData data = new ListOffsetsResponseData()
.setThrottleTimeMs(0)
.setTopics(Collections.singletonList(new ListOffsetsTopicResponse()
.setName(tp0.topic())
.setPartitions(Arrays.asList(
new ListOffsetsPartitionResponse()
.setPartitionIndex(tp0.partition())
.setErrorCode(Errors.NOT_LEADER_OR_FOLLOWER.code())
.setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP)
.setOffset(ListOffsetsResponse.UNKNOWN_OFFSET),
new ListOffsetsPartitionResponse()
.setPartitionIndex(tp1.partition())
.setErrorCode(Errors.UNKNOWN_TOPIC_OR_PARTITION.code())
.setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP)
.setOffset(ListOffsetsResponse.UNKNOWN_OFFSET)))));
client.prepareResponse(new ListOffsetsResponse(data));
Map<TopicPartition, Long> offsetsToSearch = new HashMap<>();
offsetsToSearch.put(tp0, ListOffsetsRequest.EARLIEST_TIMESTAMP);
offsetsToSearch.put(tp1, ListOffsetsRequest.EARLIEST_TIMESTAMP);
assertThrows(TimeoutException.class, () -> offsetFetcher.offsetsForTimes(offsetsToSearch, time.timer(1)));
}
private void testGetOffsetsForTimesWithError(Errors errorForP0,
Errors errorForP1,
long offsetForP0,
Long expectedOffsetForP0) {
long offsetForP1 = 100L;
long expectedOffsetForP1 = 100L;
client.reset();
String topicName2 = "topic2";
TopicPartition t2p0 = new TopicPartition(topicName2, 0);
// Expect a metadata refresh.
metadata.bootstrap(ClientUtils.parseAndValidateAddresses(Collections.singletonList("1.1.1.1:1111"),
ClientDnsLookup.USE_ALL_DNS_IPS));
Map<String, Integer> partitionNumByTopic = new HashMap<>();
partitionNumByTopic.put(topicName, 2);
partitionNumByTopic.put(topicName2, 1);
MetadataResponse updateMetadataResponse = RequestTestUtils.metadataUpdateWithIds(2, partitionNumByTopic, topicIds);
Cluster updatedCluster = updateMetadataResponse.buildCluster();
// The metadata refresh should contain all the topics.
client.prepareMetadataUpdate(updateMetadataResponse, true);
// First try should fail due to metadata error.
client.prepareResponseFrom(listOffsetResponse(t2p0, errorForP0, offsetForP0, offsetForP0),
updatedCluster.leaderFor(t2p0));
client.prepareResponseFrom(listOffsetResponse(tp1, errorForP1, offsetForP1, offsetForP1),
updatedCluster.leaderFor(tp1));
// Second try should succeed.
client.prepareResponseFrom(listOffsetResponse(t2p0, Errors.NONE, offsetForP0, offsetForP0),
updatedCluster.leaderFor(t2p0));
client.prepareResponseFrom(listOffsetResponse(tp1, Errors.NONE, offsetForP1, offsetForP1),
updatedCluster.leaderFor(tp1));
Map<TopicPartition, Long> timestampToSearch = new HashMap<>();
timestampToSearch.put(t2p0, 0L);
timestampToSearch.put(tp1, 0L);
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestampMap =
offsetFetcher.offsetsForTimes(timestampToSearch, time.timer(Long.MAX_VALUE));
if (expectedOffsetForP0 == null)
assertNull(offsetAndTimestampMap.get(t2p0));
else {
assertEquals(expectedOffsetForP0.longValue(), offsetAndTimestampMap.get(t2p0).timestamp());
assertEquals(expectedOffsetForP0.longValue(), offsetAndTimestampMap.get(t2p0).offset());
}
assertEquals(expectedOffsetForP1, offsetAndTimestampMap.get(tp1).timestamp());
assertEquals(expectedOffsetForP1, offsetAndTimestampMap.get(tp1).offset());
}
private void testGetOffsetsForTimesWithUnknownOffset() {
client.reset();
// Ensure metadata has both partitions.
MetadataResponse initialMetadataUpdate = RequestTestUtils.metadataUpdateWithIds(1, singletonMap(topicName, 1), topicIds);
client.updateMetadata(initialMetadataUpdate);
ListOffsetsResponseData data = new ListOffsetsResponseData()
.setThrottleTimeMs(0)
.setTopics(Collections.singletonList(new ListOffsetsTopicResponse()
.setName(tp0.topic())
.setPartitions(Collections.singletonList(new ListOffsetsPartitionResponse()
.setPartitionIndex(tp0.partition())
.setErrorCode(Errors.NONE.code())
.setTimestamp(ListOffsetsResponse.UNKNOWN_TIMESTAMP)
.setOffset(ListOffsetsResponse.UNKNOWN_OFFSET)))));
client.prepareResponseFrom(new ListOffsetsResponse(data),
metadata.fetch().leaderFor(tp0));
Map<TopicPartition, Long> timestampToSearch = new HashMap<>();
timestampToSearch.put(tp0, 0L);
Map<TopicPartition, OffsetAndTimestamp> offsetAndTimestampMap =
offsetFetcher.offsetsForTimes(timestampToSearch, time.timer(Long.MAX_VALUE));
assertTrue(offsetAndTimestampMap.containsKey(tp0));
assertNull(offsetAndTimestampMap.get(tp0));
}
@Test
public void testOffsetValidationRequestGrouping() {
buildFetcher();
assignFromUser(Set.of(tp0, tp1, tp2, tp3));
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 3,
Collections.emptyMap(), singletonMap(topicName, 4),
tp -> 5, topicIds), false, 0L);
for (TopicPartition tp : subscriptions.assignedPartitions()) {
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(
metadata.currentLeader(tp).leader, Optional.of(4));
subscriptions.seekUnvalidated(tp,
new SubscriptionState.FetchPosition(0, Optional.of(4), leaderAndEpoch));
}
Set<TopicPartition> allRequestedPartitions = new HashSet<>();
for (Node node : metadata.fetch().nodes()) {
apiVersions.update(node.idString(), NodeApiVersions.create());
Set<TopicPartition> expectedPartitions = subscriptions.assignedPartitions().stream()
.filter(tp ->
metadata.currentLeader(tp).leader.equals(Optional.of(node)))
.collect(Collectors.toSet());
assertTrue(expectedPartitions.stream().noneMatch(allRequestedPartitions::contains));
assertFalse(expectedPartitions.isEmpty());
allRequestedPartitions.addAll(expectedPartitions);
OffsetForLeaderEpochResponseData data = new OffsetForLeaderEpochResponseData();
expectedPartitions.forEach(tp -> {
OffsetForLeaderTopicResult topic = data.topics().find(tp.topic());
if (topic == null) {
topic = new OffsetForLeaderTopicResult().setTopic(tp.topic());
data.topics().add(topic);
}
topic.partitions().add(new EpochEndOffset()
.setPartition(tp.partition())
.setErrorCode(Errors.NONE.code())
.setLeaderEpoch(4)
.setEndOffset(0));
});
OffsetsForLeaderEpochResponse response = new OffsetsForLeaderEpochResponse(data);
client.prepareResponseFrom(body -> {
OffsetsForLeaderEpochRequest request = (OffsetsForLeaderEpochRequest) body;
return expectedPartitions.equals(offsetForLeaderPartitionMap(request.data()).keySet());
}, response, node);
}
assertEquals(subscriptions.assignedPartitions(), allRequestedPartitions);
offsetFetcher.validatePositionsIfNeeded();
consumerClient.pollNoWakeup();
assertTrue(subscriptions.assignedPartitions()
.stream().noneMatch(subscriptions::awaitingValidation));
}
@Test
public void testOffsetValidationAwaitsNodeApiVersion() {
buildFetcher();
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
Node node = metadata.fetch().nodes().get(0);
assertFalse(client.isConnected(node.idString()));
// Seek with a position and leader+epoch
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(
metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(20L, Optional.of(epochOne), leaderAndEpoch));
assertFalse(client.isConnected(node.idString()));
assertTrue(subscriptions.awaitingValidation(tp0));
// No version information is initially available, but the node is now connected
offsetFetcher.validatePositionsIfNeeded();
assertTrue(subscriptions.awaitingValidation(tp0));
assertTrue(client.isConnected(node.idString()));
apiVersions.update(node.idString(), NodeApiVersions.create());
// On the next call, the OffsetForLeaderEpoch request is sent and validation completes
client.prepareResponseFrom(
prepareOffsetsForLeaderEpochResponse(tp0, epochOne, 30L),
node);
offsetFetcher.validatePositionsIfNeeded();
consumerClient.pollNoWakeup();
assertFalse(subscriptions.awaitingValidation(tp0));
assertEquals(20L, subscriptions.position(tp0).offset);
}
@Test
public void testOffsetValidationSkippedForOldBroker() {
// Old brokers may require CLUSTER permission to use the OffsetForLeaderEpoch API,
// so we should skip offset validation and not send the request.
IsolationLevel isolationLevel = IsolationLevel.READ_UNCOMMITTED;
int maxPollRecords = Integer.MAX_VALUE;
long metadataExpireMs = Long.MAX_VALUE;
AutoOffsetResetStrategy offsetResetStrategy = AutoOffsetResetStrategy.EARLIEST;
int minBytes = 1;
int maxBytes = Integer.MAX_VALUE;
int maxWaitMs = 0;
int fetchSize = 1000;
MetricConfig metricConfig = new MetricConfig();
LogContext logContext = new LogContext();
SubscriptionState subscriptionState = new SubscriptionState(logContext, offsetResetStrategy);
buildFetcher(metricConfig, isolationLevel, metadataExpireMs, subscriptionState, logContext);
FetchMetricsRegistry metricsRegistry = new FetchMetricsRegistry(metricConfig.tags().keySet(), "consumertest-group");
FetchConfig fetchConfig = new FetchConfig(
minBytes,
maxBytes,
maxWaitMs,
fetchSize,
maxPollRecords,
true, // check crc
CommonClientConfigs.DEFAULT_CLIENT_RACK,
isolationLevel);
Fetcher<byte[], byte[]> fetcher = new Fetcher<>(
logContext,
consumerClient,
metadata,
subscriptions,
fetchConfig,
new Deserializers<>(new ByteArrayDeserializer(), new ByteArrayDeserializer(), metrics),
new FetchMetricsManager(metrics, metricsRegistry),
time,
apiVersions);
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
final int epochTwo = 2;
// Start with metadata, epoch=1
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
// Offset validation requires OffsetForLeaderEpoch request v3 or higher
Node node = metadata.fetch().nodes().get(0);
apiVersions.update(node.idString(), NodeApiVersions.create(
ApiKeys.OFFSET_FOR_LEADER_EPOCH.id, (short) 0, (short) 2));
{
// Seek with a position and leader+epoch
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(
metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(0, Optional.of(epochOne), leaderAndEpoch));
// Update metadata to epoch=2, enter validation
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochTwo, topicIds), false, 0L);
offsetFetcher.validatePositionsIfNeeded();
// Offset validation is skipped
assertFalse(subscriptions.awaitingValidation(tp0));
}
{
// Seek with a position and leader+epoch
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(
metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(0, Optional.of(epochOne), leaderAndEpoch));
// Update metadata to epoch=2, enter validation
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochTwo, topicIds), false, 0L);
// Subscription should not stay in AWAITING_VALIDATION in prepareFetchRequest
offsetFetcher.validatePositionsOnMetadataChange();
assertEquals(1, fetcher.sendFetches());
assertFalse(subscriptions.awaitingValidation(tp0));
}
}
@Test
public void testOffsetValidationSkippedForOldResponse() {
// Old responses may provide unreliable leader epoch,
// so we should skip offset validation and not send the request.
buildFetcher();
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
Node node = metadata.fetch().nodes().get(0);
assertFalse(client.isConnected(node.idString()));
// Seek with a position and leader+epoch
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(
metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(20L, Optional.of(epochOne), leaderAndEpoch));
assertFalse(client.isConnected(node.idString()));
assertTrue(subscriptions.awaitingValidation(tp0));
// Inject an older version of the metadata response
final short responseVersion = 8;
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWith("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> null, MetadataResponse.PartitionMetadata::new, responseVersion, topicIds), false, 0L);
offsetFetcher.validatePositionsIfNeeded();
// Offset validation is skipped
assertFalse(subscriptions.awaitingValidation(tp0));
}
@Test
public void testOffsetValidationresetPositionForUndefinedEpochWithDefinedResetPolicy() {
testOffsetValidationWithGivenEpochOffset(
UNDEFINED_EPOCH, 0L, AutoOffsetResetStrategy.EARLIEST);
}
@Test
public void testOffsetValidationresetPositionForUndefinedOffsetWithDefinedResetPolicy() {
testOffsetValidationWithGivenEpochOffset(
2, UNDEFINED_EPOCH_OFFSET, AutoOffsetResetStrategy.EARLIEST);
}
@Test
public void testOffsetValidationresetPositionForUndefinedEpochWithUndefinedResetPolicy() {
testOffsetValidationWithGivenEpochOffset(
UNDEFINED_EPOCH, 0L, AutoOffsetResetStrategy.NONE);
}
@Test
public void testOffsetValidationresetPositionForUndefinedOffsetWithUndefinedResetPolicy() {
testOffsetValidationWithGivenEpochOffset(
2, UNDEFINED_EPOCH_OFFSET, AutoOffsetResetStrategy.NONE);
}
@Test
public void testOffsetValidationTriggerLogTruncationForBadOffsetWithUndefinedResetPolicy() {
testOffsetValidationWithGivenEpochOffset(
1, 1L, AutoOffsetResetStrategy.NONE);
}
private void testOffsetValidationWithGivenEpochOffset(int leaderEpoch,
long endOffset,
AutoOffsetResetStrategy offsetResetStrategy) {
buildFetcher(offsetResetStrategy);
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
final long initialOffset = 5;
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
// Offset validation requires OffsetForLeaderEpoch request v3 or higher
Node node = metadata.fetch().nodes().get(0);
apiVersions.update(node.idString(), NodeApiVersions.create());
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(initialOffset, Optional.of(epochOne), leaderAndEpoch));
offsetFetcher.validatePositionsIfNeeded();
consumerClient.poll(time.timer(Duration.ZERO));
assertTrue(subscriptions.awaitingValidation(tp0));
assertTrue(client.hasInFlightRequests());
client.respond(
offsetsForLeaderEpochRequestMatcher(tp0),
prepareOffsetsForLeaderEpochResponse(tp0, leaderEpoch, endOffset));
consumerClient.poll(time.timer(Duration.ZERO));
if (offsetResetStrategy == AutoOffsetResetStrategy.NONE) {
LogTruncationException thrown =
assertThrows(LogTruncationException.class, () -> offsetFetcher.validatePositionsIfNeeded());
assertEquals(singletonMap(tp0, initialOffset), thrown.offsetOutOfRangePartitions());
if (endOffset == UNDEFINED_EPOCH_OFFSET || leaderEpoch == UNDEFINED_EPOCH) {
assertEquals(Collections.emptyMap(), thrown.divergentOffsets());
} else {
OffsetAndMetadata expectedDivergentOffset = new OffsetAndMetadata(
endOffset, Optional.of(leaderEpoch), "");
assertEquals(singletonMap(tp0, expectedDivergentOffset), thrown.divergentOffsets());
}
assertTrue(subscriptions.awaitingValidation(tp0));
} else {
offsetFetcher.validatePositionsIfNeeded();
assertFalse(subscriptions.awaitingValidation(tp0));
}
}
@Test
public void testOffsetValidationHandlesSeekWithInflightOffsetForLeaderRequest() {
buildFetcher();
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
final Optional<Integer> epochOneOpt = Optional.of(epochOne);
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
// Offset validation requires OffsetForLeaderEpoch request v3 or higher
Node node = metadata.fetch().nodes().get(0);
apiVersions.update(node.idString(), NodeApiVersions.create());
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(metadata.currentLeader(tp0).leader, epochOneOpt);
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(0, epochOneOpt, leaderAndEpoch));
offsetFetcher.validatePositionsIfNeeded();
consumerClient.poll(time.timer(Duration.ZERO));
assertTrue(subscriptions.awaitingValidation(tp0));
assertTrue(client.hasInFlightRequests());
// While the OffsetForLeaderEpoch request is in-flight, we seek to a different offset.
subscriptions.seekUnvalidated(tp0, new SubscriptionState.FetchPosition(5, epochOneOpt, leaderAndEpoch));
assertTrue(subscriptions.awaitingValidation(tp0));
client.respond(
offsetsForLeaderEpochRequestMatcher(tp0),
prepareOffsetsForLeaderEpochResponse(tp0, 0, 0L));
consumerClient.poll(time.timer(Duration.ZERO));
// The response should be ignored since we were validating a different position.
assertTrue(subscriptions.awaitingValidation(tp0));
}
@Test
public void testOffsetValidationFencing() {
buildFetcher();
assignFromUser(singleton(tp0));
Map<String, Integer> partitionCounts = new HashMap<>();
partitionCounts.put(tp0.topic(), 4);
final int epochOne = 1;
final int epochTwo = 2;
final int epochThree = 3;
// Start with metadata, epoch=1
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochOne, topicIds), false, 0L);
// Offset validation requires OffsetForLeaderEpoch request v3 or higher
Node node = metadata.fetch().nodes().get(0);
apiVersions.update(node.idString(), NodeApiVersions.create());
// Seek with a position and leader+epoch
Metadata.LeaderAndEpoch leaderAndEpoch = new Metadata.LeaderAndEpoch(metadata.currentLeader(tp0).leader, Optional.of(epochOne));
subscriptions.seekValidated(tp0, new SubscriptionState.FetchPosition(0, Optional.of(epochOne), leaderAndEpoch));
// Update metadata to epoch=2, enter validation
metadata.updateWithCurrentRequestVersion(RequestTestUtils.metadataUpdateWithIds("dummy", 1,
Collections.emptyMap(), partitionCounts, tp -> epochTwo, topicIds), false, 0L);
offsetFetcher.validatePositionsIfNeeded();
assertTrue(subscriptions.awaitingValidation(tp0));
// Update the position to epoch=3, as we would from a fetch
subscriptions.completeValidation(tp0);
SubscriptionState.FetchPosition nextPosition = new SubscriptionState.FetchPosition(
10,
Optional.of(epochTwo),
new Metadata.LeaderAndEpoch(leaderAndEpoch.leader, Optional.of(epochTwo)));
subscriptions.position(tp0, nextPosition);
subscriptions.maybeValidatePositionForCurrentLeader(apiVersions, tp0, new Metadata.LeaderAndEpoch(leaderAndEpoch.leader, Optional.of(epochThree)));
// Prepare offset list response from async validation with epoch=2
client.prepareResponse(prepareOffsetsForLeaderEpochResponse(tp0, epochTwo, 10L));
consumerClient.pollNoWakeup();
assertTrue(subscriptions.awaitingValidation(tp0), "Expected validation to fail since leader epoch changed");
// Next round of validation, should succeed in validating the position
offsetFetcher.validatePositionsIfNeeded();
client.prepareResponse(prepareOffsetsForLeaderEpochResponse(tp0, epochThree, 10L));
consumerClient.pollNoWakeup();
assertFalse(subscriptions.awaitingValidation(tp0), "Expected validation to succeed with latest epoch");
}
@Test
public void testBeginningOffsets() {
buildFetcher();
assignFromUser(singleton(tp0));
client.prepareResponse(listOffsetResponse(tp0, Errors.NONE, ListOffsetsRequest.EARLIEST_TIMESTAMP, 2L));
assertEquals(singletonMap(tp0, 2L), offsetFetcher.beginningOffsets(singleton(tp0), time.timer(5000L)));
}
@Test
public void testBeginningOffsetsDuplicateTopicPartition() {
buildFetcher();
assignFromUser(singleton(tp0));
client.prepareResponse(listOffsetResponse(tp0, Errors.NONE, ListOffsetsRequest.EARLIEST_TIMESTAMP, 2L));
assertEquals(singletonMap(tp0, 2L), offsetFetcher.beginningOffsets(asList(tp0, tp0), time.timer(5000L)));
}
@Test
public void testBeginningOffsetsMultipleTopicPartitions() {
buildFetcher();
Map<TopicPartition, Long> expectedOffsets = new HashMap<>();
expectedOffsets.put(tp0, 2L);
expectedOffsets.put(tp1, 4L);
expectedOffsets.put(tp2, 6L);
assignFromUser(expectedOffsets.keySet());
client.prepareResponse(listOffsetResponse(expectedOffsets, Errors.NONE, ListOffsetsRequest.EARLIEST_TIMESTAMP, ListOffsetsResponse.UNKNOWN_EPOCH));
assertEquals(expectedOffsets, offsetFetcher.beginningOffsets(asList(tp0, tp1, tp2), time.timer(5000L)));
}
@Test
public void testBeginningOffsetsEmpty() {
buildFetcher();
assertEquals(emptyMap(), offsetFetcher.beginningOffsets(emptyList(), time.timer(5000L)));
}
@Test
public void testEndOffsets() {
buildFetcher();
assignFromUser(singleton(tp0));
client.prepareResponse(listOffsetResponse(tp0, Errors.NONE, ListOffsetsRequest.LATEST_TIMESTAMP, 5L));
assertEquals(singletonMap(tp0, 5L), offsetFetcher.endOffsets(singleton(tp0), time.timer(5000L)));
}
@Test
public void testEndOffsetsDuplicateTopicPartition() {
buildFetcher();
assignFromUser(singleton(tp0));
client.prepareResponse(listOffsetResponse(tp0, Errors.NONE, ListOffsetsRequest.LATEST_TIMESTAMP, 5L));
assertEquals(singletonMap(tp0, 5L), offsetFetcher.endOffsets(asList(tp0, tp0), time.timer(5000L)));
}
@Test
public void testEndOffsetsMultipleTopicPartitions() {
buildFetcher();
Map<TopicPartition, Long> expectedOffsets = new HashMap<>();
expectedOffsets.put(tp0, 5L);
expectedOffsets.put(tp1, 7L);
expectedOffsets.put(tp2, 9L);
assignFromUser(expectedOffsets.keySet());
client.prepareResponse(listOffsetResponse(expectedOffsets, Errors.NONE, ListOffsetsRequest.LATEST_TIMESTAMP, ListOffsetsResponse.UNKNOWN_EPOCH));
assertEquals(expectedOffsets, offsetFetcher.endOffsets(asList(tp0, tp1, tp2), time.timer(5000L)));
}
@Test
public void testEndOffsetsEmpty() {
buildFetcher();
assertEquals(emptyMap(), offsetFetcher.endOffsets(emptyList(), time.timer(5000L)));
}
private MockClient.RequestMatcher offsetsForLeaderEpochRequestMatcher(TopicPartition topicPartition) {
int currentLeaderEpoch = 1;
int leaderEpoch = 1;
return request -> {
OffsetsForLeaderEpochRequest epochRequest = (OffsetsForLeaderEpochRequest) request;
OffsetForLeaderPartition partition = offsetForLeaderPartitionMap(epochRequest.data())
.get(topicPartition);
return partition != null
&& partition.currentLeaderEpoch() == currentLeaderEpoch
&& partition.leaderEpoch() == leaderEpoch;
};
}
private OffsetsForLeaderEpochResponse prepareOffsetsForLeaderEpochResponse(
TopicPartition topicPartition,
int leaderEpoch,
long endOffset
) {
OffsetForLeaderEpochResponseData data = new OffsetForLeaderEpochResponseData();
data.topics().add(new OffsetForLeaderTopicResult()
.setTopic(topicPartition.topic())
.setPartitions(Collections.singletonList(new EpochEndOffset()
.setPartition(topicPartition.partition())
.setErrorCode(Errors.NONE.code())
.setLeaderEpoch(leaderEpoch)
.setEndOffset(endOffset))));
return new OffsetsForLeaderEpochResponse(data);
}
private Map<TopicPartition, OffsetForLeaderPartition> offsetForLeaderPartitionMap(
OffsetForLeaderEpochRequestData data
) {
Map<TopicPartition, OffsetForLeaderPartition> result = new HashMap<>();
data.topics().forEach(topic ->
topic.partitions().forEach(partition ->
result.put(new TopicPartition(topic.topic(), partition.partition()), partition)));
return result;
}
private MockClient.RequestMatcher listOffsetRequestMatcher(final long timestamp) {
return listOffsetRequestMatcher(timestamp, ListOffsetsResponse.UNKNOWN_EPOCH);
}
private MockClient.RequestMatcher listOffsetRequestMatcher(final long timestamp, final int leaderEpoch) {
// matches any list offset request with the provided timestamp
return body -> {
ListOffsetsRequest req = (ListOffsetsRequest) body;
ListOffsetsTopic topic = req.topics().get(0);
ListOffsetsPartition partition = topic.partitions().get(0);
assertEquals(requestTimeoutMs, req.timeoutMs());
return tp0.topic().equals(topic.name())
&& tp0.partition() == partition.partitionIndex()
&& timestamp == partition.timestamp()
&& leaderEpoch == partition.currentLeaderEpoch();
};
}
private ListOffsetsResponse listOffsetResponse(Errors error, long timestamp, long offset) {
return listOffsetResponse(tp0, error, timestamp, offset);
}
private ListOffsetsResponse listOffsetResponse(TopicPartition tp, Errors error, long timestamp, long offset) {
return listOffsetResponse(tp, error, timestamp, offset, ListOffsetsResponse.UNKNOWN_EPOCH);
}
private ListOffsetsResponse listOffsetResponse(TopicPartition tp, Errors error, long timestamp, long offset, int leaderEpoch) {
Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(tp, offset);
return listOffsetResponse(offsets, error, timestamp, leaderEpoch);
}
private ListOffsetsResponse listOffsetResponse(Map<TopicPartition, Long> offsets, Errors error, long timestamp, int leaderEpoch) {
Map<String, List<ListOffsetsPartitionResponse>> responses = new HashMap<>();
for (Map.Entry<TopicPartition, Long> entry : offsets.entrySet()) {
TopicPartition tp = entry.getKey();
responses.putIfAbsent(tp.topic(), new ArrayList<>());
responses.get(tp.topic()).add(new ListOffsetsPartitionResponse()
.setPartitionIndex(tp.partition())
.setErrorCode(error.code())
.setOffset(entry.getValue())
.setTimestamp(timestamp)
.setLeaderEpoch(leaderEpoch));
}
List<ListOffsetsTopicResponse> topics = new ArrayList<>();
for (Map.Entry<String, List<ListOffsetsPartitionResponse>> response : responses.entrySet()) {
topics.add(new ListOffsetsTopicResponse()
.setName(response.getKey())
.setPartitions(response.getValue()));
}
ListOffsetsResponseData data = new ListOffsetsResponseData().setTopics(topics);
return new ListOffsetsResponse(data);
}
private void buildFetcher() {
buildFetcher(IsolationLevel.READ_UNCOMMITTED);
}
private void buildFetcher(AutoOffsetResetStrategy offsetResetStrategy) {
buildFetcher(new MetricConfig(), offsetResetStrategy, IsolationLevel.READ_UNCOMMITTED);
}
private void buildFetcher(IsolationLevel isolationLevel) {
buildFetcher(new MetricConfig(), AutoOffsetResetStrategy.EARLIEST, isolationLevel);
}
private void buildFetcher(MetricConfig metricConfig,
AutoOffsetResetStrategy offsetResetStrategy,
IsolationLevel isolationLevel) {
long metadataExpireMs = Long.MAX_VALUE;
LogContext logContext = new LogContext();
SubscriptionState subscriptionState = new SubscriptionState(logContext, offsetResetStrategy);
buildFetcher(metricConfig, isolationLevel, metadataExpireMs,
subscriptionState, logContext);
}
private void buildFetcher(MetricConfig metricConfig,
IsolationLevel isolationLevel,
long metadataExpireMs,
SubscriptionState subscriptionState,
LogContext logContext) {
buildDependencies(metricConfig, metadataExpireMs, subscriptionState, logContext);
offsetFetcher = new OffsetFetcher(logContext,
consumerClient,
metadata,
subscriptions,
time,
retryBackoffMs,
requestTimeoutMs,
isolationLevel,
apiVersions);
}
private void buildDependencies(MetricConfig metricConfig,
long metadataExpireMs,
SubscriptionState subscriptionState,
LogContext logContext) {
time = new MockTime(1);
subscriptions = subscriptionState;
metadata = new ConsumerMetadata(0, 0, metadataExpireMs, false, false,
subscriptions, logContext, new ClusterResourceListeners());
client = new MockClient(time, metadata);
metrics = new Metrics(metricConfig, time);
consumerClient = new ConsumerNetworkClient(logContext, client, metadata, time,
100, 1000, Integer.MAX_VALUE);
}
}
|
OffsetFetcherTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java
|
{
"start": 2752,
"end": 19845
}
|
class ____ extends ESAllocationTestCase {
private AllocationService strategy;
private ProjectId projectId;
@Override
public void setUp() throws Exception {
super.setUp();
strategy = new AllocationService(
new AllocationDeciders(Collections.singleton(new ResizeAllocationDecider())),
new TestGatewayAllocator(),
new BalancedShardsAllocator(Settings.EMPTY),
EmptyClusterInfoService.INSTANCE,
EmptySnapshotsInfoService.INSTANCE,
TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY
);
}
private ClusterState createInitialClusterState(boolean startShards) {
Metadata.Builder metaBuilder = Metadata.builder();
projectId = randomUniqueProjectId();
metaBuilder.put(
ProjectMetadata.builder(projectId)
.put(
IndexMetadata.builder("source")
.settings(settings(IndexVersion.current()))
.numberOfShards(2)
.numberOfReplicas(0)
.setRoutingNumShards(16)
)
);
Metadata metadata = metaBuilder.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metadata(metadata)
.routingTable(GlobalRoutingTableTestHelper.buildRoutingTable(metadata, RoutingTable.Builder::addAsNew))
.nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")))
.build();
var prevRoutingTable = clusterState.globalRoutingTable().routingTable(projectId);
var reroute = strategy.reroute(clusterState, "reroute", ActionListener.noop()).globalRoutingTable();
clusterState = ClusterState.builder(clusterState).routingTable(reroute).build();
var routingTable = reroute.routingTable(projectId);
assertEquals(prevRoutingTable.index("source").size(), 2);
assertEquals(prevRoutingTable.index("source").shard(0).shard(0).state(), UNASSIGNED);
assertEquals(prevRoutingTable.index("source").shard(1).shard(0).state(), UNASSIGNED);
assertEquals(routingTable.index("source").size(), 2);
assertEquals(routingTable.index("source").shard(0).shard(0).state(), INITIALIZING);
assertEquals(routingTable.index("source").shard(1).shard(0).state(), INITIALIZING);
if (startShards) {
clusterState = startShardsAndReroute(
strategy,
clusterState,
routingTable.index("source").shard(0).shard(0),
routingTable.index("source").shard(1).shard(0)
);
routingTable = clusterState.globalRoutingTable().routingTable(projectId);
assertEquals(routingTable.index("source").size(), 2);
assertEquals(routingTable.index("source").shard(0).shard(0).state(), STARTED);
assertEquals(routingTable.index("source").shard(1).shard(0).state(), STARTED);
}
return clusterState;
}
public void testNonResizeRouting() {
ClusterState clusterState = createInitialClusterState(true);
ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider();
RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
ShardRouting shardRouting = TestShardRouting.newShardRouting("non-resize", 0, null, true, ShardRoutingState.UNASSIGNED);
assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
assertEquals(
Decision.ALWAYS,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
);
}
public void testShrink() { // we don't handle shrink yet
ClusterState clusterState = createInitialClusterState(true);
Metadata.Builder metaBuilder = Metadata.builder(clusterState.metadata());
metaBuilder.put(
ProjectMetadata.builder(clusterState.metadata().getProject(projectId))
.put(
IndexMetadata.builder("target")
.settings(
settings(IndexVersion.current()).put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), "source")
.put(IndexMetadata.SETTING_INDEX_UUID, "target_uuid")
.put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY, IndexMetadata.INDEX_UUID_NA_VALUE)
)
.numberOfShards(1)
.numberOfReplicas(0)
)
);
includeAdditionalProjects(randomIntBetween(1, 3), metaBuilder);
Metadata metadata = metaBuilder.build();
GlobalRoutingTable routingTable = GlobalRoutingTableTestHelper.buildRoutingTable(metadata, RoutingTable.Builder::addAsNew);
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metadata(metadata).build();
Index idx = clusterState.metadata().getProject(projectId).index("target").getIndex();
ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider();
RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, 0), null, true, ShardRoutingState.UNASSIGNED).withRecoverySource(
RecoverySource.LocalShardsRecoverySource.INSTANCE
).build();
assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
assertEquals(
Decision.ALWAYS,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
);
assertEquals(
Decision.ALWAYS,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
);
}
public void testSourceNotActive() {
ClusterState clusterState = createInitialClusterState(false);
Metadata.Builder metaBuilder = Metadata.builder(clusterState.metadata());
metaBuilder.put(
IndexMetadata.builder("target")
.settings(
settings(IndexVersion.current()).put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), "source")
.put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY, IndexMetadata.INDEX_UUID_NA_VALUE)
)
.numberOfShards(4)
.numberOfReplicas(0)
);
includeAdditionalProjects(randomIntBetween(1, 3), metaBuilder);
Metadata metadata = metaBuilder.build();
GlobalRoutingTable routingTable = GlobalRoutingTableTestHelper.buildRoutingTable(metadata, RoutingTable.Builder::addAsNew);
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metadata(metadata).build();
Index idx = clusterState.metadata().getProject(projectId).index("target").getIndex();
ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider();
RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
int shardId = randomIntBetween(0, 3);
int sourceShardId = IndexMetadata.selectSplitShard(shardId, clusterState.metadata().getProject(projectId).index("source"), 4).id();
ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED)
.withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
.build();
assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
assertEquals(
Decision.NO,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
);
assertEquals(
Decision.NO,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
);
routingAllocation.debugDecision(true);
assertEquals(
"source primary shard [[source][" + sourceShardId + "]] is not active",
resizeAllocationDecider.canAllocate(shardRouting, routingAllocation).getExplanation()
);
assertEquals(
"source primary shard [[source][" + sourceShardId + "]] is not active",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node0"), routingAllocation)
.getExplanation()
);
assertEquals(
"source primary shard [[source][" + sourceShardId + "]] is not active",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
.getExplanation()
);
}
public void testSourcePrimaryActive() {
ClusterState clusterState = createInitialClusterState(true);
Metadata.Builder metaBuilder = Metadata.builder(clusterState.metadata());
metaBuilder.put(
IndexMetadata.builder("target")
.settings(
settings(IndexVersion.current()).put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), "source")
.put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY, IndexMetadata.INDEX_UUID_NA_VALUE)
)
.numberOfShards(4)
.numberOfReplicas(0)
);
includeAdditionalProjects(randomIntBetween(1, 3), metaBuilder);
Metadata metadata = metaBuilder.build();
clusterState = ClusterState.builder(clusterState).metadata(metadata).build();
final GlobalRoutingTable routingTable = GlobalRoutingTableTestHelper.updateRoutingTable(
clusterState,
RoutingTable.Builder::addAsNew
);
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
Index idx = clusterState.metadata().getProject(projectId).index("target").getIndex();
ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider();
RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState, null, null, 0);
int shardId = randomIntBetween(0, 3);
int sourceShardId = IndexMetadata.selectSplitShard(shardId, clusterState.metadata().getProject(projectId).index("source"), 4).id();
ShardRouting shardRouting = shardRoutingBuilder(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED)
.withRecoverySource(RecoverySource.LocalShardsRecoverySource.INSTANCE)
.build();
assertEquals(Decision.YES, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation));
String allowedNode = clusterState.routingTable(projectId).index("source").shard(sourceShardId).primaryShard().currentNodeId();
if ("node1".equals(allowedNode)) {
assertEquals(
Decision.YES,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
);
assertEquals(
Decision.NO,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
);
} else {
assertEquals(
Decision.NO,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
);
assertEquals(
Decision.YES,
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
);
}
routingAllocation.debugDecision(true);
assertEquals("source primary is active", resizeAllocationDecider.canAllocate(shardRouting, routingAllocation).getExplanation());
if ("node1".equals(allowedNode)) {
assertEquals(
"source primary is allocated on this node",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
.getExplanation()
);
assertEquals(
"source primary is allocated on another node",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
.getExplanation()
);
} else {
assertEquals(
"source primary is allocated on another node",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)
.getExplanation()
);
assertEquals(
"source primary is allocated on this node",
resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), routingAllocation)
.getExplanation()
);
}
}
public void testGetForcedInitialShardAllocationToNodes() {
final int additionalProjects = randomIntBetween(0, 5);
projectId = additionalProjects == 0 ? Metadata.DEFAULT_PROJECT_ID : randomUniqueProjectId();
var source = IndexMetadata.builder("source")
.settings(indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.SETTING_INDEX_UUID, "uuid-1"))
.build();
var target = IndexMetadata.builder("target")
.settings(
indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), "source")
.put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID.getKey(), "uuid-1")
.put(IndexMetadata.SETTING_INDEX_UUID, "uuid-2")
)
.build();
final Metadata.Builder metadataBuilder = Metadata.builder();
metadataBuilder.put(ProjectMetadata.builder(projectId).put(source, false).put(target, false));
includeAdditionalProjects(additionalProjects, metadataBuilder);
final Metadata metadata = metadataBuilder.build();
var clusterState = ClusterState.builder(new ClusterName("test-cluster"))
.nodes(DiscoveryNodes.builder().add(newNode("node-1")).add(newNode("node-2")))
.metadata(metadata)
.routingTable(GlobalRoutingTableTestHelper.buildRoutingTable(metadata, (rtb, index) -> {
if (index == source) {
rtb.add(
IndexRoutingTable.builder(source.getIndex())
.addShard(
shardRoutingBuilder(new ShardId(source.getIndex(), 0), "node-1", true, STARTED).withRecoverySource(null)
.build()
)
);
} else {
rtb.addAsNew(index);
}
}))
.build();
var decider = new ResizeAllocationDecider();
var allocation = new RoutingAllocation(new AllocationDeciders(List.of(decider)), clusterState, null, null, 0);
var localRecoveryShard = ShardRouting.newUnassigned(
new ShardId(target.getIndex(), 0),
true,
RecoverySource.LocalShardsRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "index created"),
ShardRouting.Role.DEFAULT
);
assertThat(decider.getForcedInitialShardAllocationToNodes(localRecoveryShard, allocation), equalTo(Optional.of(Set.of("node-1"))));
var newShard = ShardRouting.newUnassigned(
new ShardId(target.getIndex(), 0),
true,
RecoverySource.EmptyStoreRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "index created"),
ShardRouting.Role.DEFAULT
);
assertThat(decider.getForcedInitialShardAllocationToNodes(newShard, allocation), equalTo(Optional.empty()));
}
private static void includeAdditionalProjects(int projectCount, Metadata.Builder metadataBuilder) {
for (int i = 0; i < projectCount; i++) {
final ProjectMetadata.Builder project = ProjectMetadata.builder(randomUniqueProjectId());
for (String index : randomSubsetOf(List.of("source", "target", "index-" + i))) {
final Settings.Builder indexSettings = indexSettings(IndexVersion.current(), randomIntBetween(1, 5), randomIntBetween(0, 2))
.put(IndexMetadata.SETTING_INDEX_UUID, randomUUID());
project.put(IndexMetadata.builder(index).settings(indexSettings).build(), false);
}
metadataBuilder.put(project);
}
}
}
|
ResizeAllocationDeciderTests
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/PropertyMatchesTests.java
|
{
"start": 838,
"end": 4144
}
|
class ____ {
@Test
void simpleBeanPropertyTypo() {
PropertyMatches matches = PropertyMatches.forProperty("naem", SampleBeanProperties.class);
assertThat(matches.getPossibleMatches()).contains("name");
}
@Test
void complexBeanPropertyTypo() {
PropertyMatches matches = PropertyMatches.forProperty("desriptn", SampleBeanProperties.class);
assertThat(matches.getPossibleMatches()).isEmpty();
}
@Test
void unknownBeanProperty() {
PropertyMatches matches = PropertyMatches.forProperty("unknown", SampleBeanProperties.class);
assertThat(matches.getPossibleMatches()).isEmpty();
}
@Test
void severalMatchesBeanProperty() {
PropertyMatches matches = PropertyMatches.forProperty("counter", SampleBeanProperties.class);
assertThat(matches.getPossibleMatches()).contains("counter1");
assertThat(matches.getPossibleMatches()).contains("counter2");
assertThat(matches.getPossibleMatches()).contains("counter3");
}
@Test
void simpleBeanPropertyErrorMessage() {
PropertyMatches matches = PropertyMatches.forProperty("naem", SampleBeanProperties.class);
String msg = matches.buildErrorMessage();
assertThat(msg).contains("naem");
assertThat(msg).contains("name");
assertThat(msg).contains("setter");
assertThat(msg).doesNotContain("field");
}
@Test
void complexBeanPropertyErrorMessage() {
PropertyMatches matches = PropertyMatches.forProperty("counter", SampleBeanProperties.class);
String msg = matches.buildErrorMessage();
assertThat(msg).contains("counter");
assertThat(msg).contains("counter1");
assertThat(msg).contains("counter2");
assertThat(msg).contains("counter3");
}
@Test
void simpleFieldPropertyTypo() {
PropertyMatches matches = PropertyMatches.forField("naem", SampleFieldProperties.class);
assertThat(matches.getPossibleMatches()).contains("name");
}
@Test
void complexFieldPropertyTypo() {
PropertyMatches matches = PropertyMatches.forField("desriptn", SampleFieldProperties.class);
assertThat(matches.getPossibleMatches()).isEmpty();
}
@Test
void unknownFieldProperty() {
PropertyMatches matches = PropertyMatches.forField("unknown", SampleFieldProperties.class);
assertThat(matches.getPossibleMatches()).isEmpty();
}
@Test
void severalMatchesFieldProperty() {
PropertyMatches matches = PropertyMatches.forField("counter", SampleFieldProperties.class);
assertThat(matches.getPossibleMatches()).contains("counter1");
assertThat(matches.getPossibleMatches()).contains("counter2");
assertThat(matches.getPossibleMatches()).contains("counter3");
}
@Test
void simpleFieldPropertyErrorMessage() {
PropertyMatches matches = PropertyMatches.forField("naem", SampleFieldProperties.class);
String msg = matches.buildErrorMessage();
assertThat(msg).contains("naem");
assertThat(msg).contains("name");
assertThat(msg).contains("field");
assertThat(msg).doesNotContain("setter");
}
@Test
void complexFieldPropertyErrorMessage() {
PropertyMatches matches = PropertyMatches.forField("counter", SampleFieldProperties.class);
String msg = matches.buildErrorMessage();
assertThat(msg).contains("counter");
assertThat(msg).contains("counter1");
assertThat(msg).contains("counter2");
assertThat(msg).contains("counter3");
}
@SuppressWarnings("unused")
private static
|
PropertyMatchesTests
|
java
|
micronaut-projects__micronaut-core
|
inject-groovy/src/main/groovy/io/micronaut/ast/groovy/visitor/GroovyBeanDefinitionBuilder.java
|
{
"start": 2098,
"end": 9818
}
|
class ____ extends AbstractBeanDefinitionBuilder {
private final GroovyVisitorContext visitorContext;
private final GroovyAnnotationMetadataBuilder annotationBuilder;
/**
* Default constructor.
*
* @param originatingElement The originating element
* @param beanType The bean type
* @param elementAnnotationMetadataFactory The element annotation metadata factory
* @param visitorContext the visitor context
*/
GroovyBeanDefinitionBuilder(
Element originatingElement,
ClassElement beanType,
ElementAnnotationMetadataFactory elementAnnotationMetadataFactory,
GroovyVisitorContext visitorContext) {
super(originatingElement, beanType, visitorContext, elementAnnotationMetadataFactory);
if (getClass() == GroovyBeanDefinitionBuilder.class) {
visitorContext.addBeanDefinitionBuilder(this);
}
this.visitorContext = visitorContext;
this.annotationBuilder = visitorContext.getAnnotationMetadataBuilder();
}
@Override
protected @NonNull AbstractBeanDefinitionBuilder createChildBean(FieldElement producerField) {
final ClassElement parentType = getBeanType();
return new GroovyBeanDefinitionBuilder(
GroovyBeanDefinitionBuilder.this.getOriginatingElement(),
producerField.getGenericField().getType(),
elementAnnotationMetadataFactory,
GroovyBeanDefinitionBuilder.this.visitorContext
) {
@Override
public @NonNull Element getProducingElement() {
return producerField;
}
@Override
public @NonNull ClassElement getDeclaringElement() {
return producerField.getDeclaringType();
}
@Override
protected BeanDefinitionVisitor createBeanDefinitionWriter() {
final BeanDefinitionVisitor writer = super.createBeanDefinitionWriter();
ClassElement newParent = parentType.withAnnotationMetadata(parentType.copyAnnotationMetadata()); // Just a copy
writer.visitBeanFactoryField(
newParent,
producerField.withAnnotationMetadata(
new AnnotationMetadataHierarchy(newParent.getDeclaredMetadata(), producerField.getDeclaredMetadata())
)
);
return writer;
}
};
}
@Override
protected @NonNull AbstractBeanDefinitionBuilder createChildBean(MethodElement producerMethod) {
final ClassElement parentType = getBeanType();
return new GroovyBeanDefinitionBuilder(
GroovyBeanDefinitionBuilder.this.getOriginatingElement(),
producerMethod.getGenericReturnType().getType(),
elementAnnotationMetadataFactory,
GroovyBeanDefinitionBuilder.this.visitorContext
) {
BeanParameterElement[] parameters;
@Override
public @NonNull Element getProducingElement() {
return producerMethod;
}
@Override
public @NonNull ClassElement getDeclaringElement() {
return producerMethod.getDeclaringType();
}
@Override
protected BeanParameterElement[] getParameters() {
if (parameters == null) {
parameters = initBeanParameters(producerMethod.getParameters());
}
return parameters;
}
@Override
protected BeanDefinitionVisitor createBeanDefinitionWriter() {
final BeanDefinitionVisitor writer = super.createBeanDefinitionWriter();
final GroovyElementFactory elementFactory = ((GroovyVisitorContext) visitorContext).getElementFactory();
ClassElement resolvedParent = resolveParent(parentType, elementFactory);
writer.visitBeanFactoryMethod(
resolvedParent,
producerMethod.withAnnotationMetadata(
new AnnotationMetadataHierarchy(resolvedParent.getDeclaredMetadata(), producerMethod.getDeclaredMetadata())
),
getParameters()
);
return writer;
}
};
}
@Override
protected <T extends Annotation> void annotate(AnnotationMetadata annotationMetadata, String annotationType, Consumer<AnnotationValueBuilder<T>> consumer) {
if (consumer != null && annotationMetadata != null && annotationType != null) {
AnnotationValueBuilder<T> builder = AnnotationValue.builder(annotationType);
consumer.accept(builder);
AnnotationValue<T> av = builder.build();
annotationBuilder.annotate(annotationMetadata, av);
}
}
@Override
protected <T extends Annotation> void annotate(@NonNull AnnotationMetadata annotationMetadata, @NonNull AnnotationValue<T> annotationValue) {
ArgumentUtils.requireNonNull("annotationMetadata", annotationMetadata);
ArgumentUtils.requireNonNull("annotationValue", annotationValue);
annotationBuilder.annotate(annotationMetadata, annotationValue);
}
@Override
protected void removeStereotype(AnnotationMetadata annotationMetadata, String annotationType) {
if (annotationMetadata != null && annotationType != null) {
annotationBuilder.removeStereotype(annotationMetadata, annotationType);
}
}
@Override
protected <T extends Annotation> void removeAnnotationIf(AnnotationMetadata annotationMetadata, Predicate<AnnotationValue<T>> predicate) {
if (annotationMetadata != null && predicate != null) {
annotationBuilder.removeAnnotationIf(annotationMetadata, predicate);
}
}
@Override
protected void removeAnnotation(AnnotationMetadata annotationMetadata, String annotationType) {
if (annotationMetadata != null && annotationType != null) {
annotationBuilder.removeAnnotation(annotationMetadata, annotationType);
}
}
private ClassElement resolveParent(ClassElement parentType, GroovyElementFactory elementFactory) {
ClassElement resolvedParent = parentType;
if (parentType instanceof GroovyClassElement groovyClassElement) {
resolvedParent = elementFactory.newClassElement(groovyClassElement.classNode, elementAnnotationMetadataFactory);
}
return resolvedParent;
}
@Override
protected @NonNull BeanDefinitionVisitor createAopWriter(BeanDefinitionWriter beanDefinitionWriter, AnnotationMetadata annotationMetadata) {
AnnotationValue<?>[] interceptorTypes =
InterceptedMethodUtil.resolveInterceptorBinding(annotationMetadata, InterceptorKind.AROUND);
return new AopProxyWriter(
getBeanType(),
beanDefinitionWriter,
annotationMetadata.getValues(Around.class, Boolean.class),
visitorContext,
interceptorTypes
);
}
@Override
protected @NonNull BiConsumer<TypedElement, MethodElement> createAroundMethodVisitor(BeanDefinitionVisitor aopWriter) {
var aopProxyWriter = (AopProxyWriter) aopWriter;
return (bean, method) -> {
AnnotationValue<?>[] newTypes =
InterceptedMethodUtil.resolveInterceptorBinding(method.getAnnotationMetadata(), InterceptorKind.AROUND);
aopProxyWriter.visitInterceptorBinding(newTypes);
aopProxyWriter.visitAroundMethod(
bean, method
);
};
}
}
|
GroovyBeanDefinitionBuilder
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/parameter/ExecutionSpecificParameters.java
|
{
"start": 541,
"end": 2860
}
|
class ____ extends Parameters<ExecutionSpecificParameters.ExecutionAwareParameter> {
private static final List<Class<?>> TYPES = Arrays.asList(Timeout.class, CommandBatching.class);
private final int timeoutIndex;
private final int commandBatchingIndex;
/**
* Create new {@link ExecutionSpecificParameters} given a {@link Method}.
*
* @param method must not be {@code null}.
*/
public ExecutionSpecificParameters(Method method) {
super(method);
int timeoutIndex = -1;
int commandBatchingIndex = -1;
List<ExecutionAwareParameter> parameters = getParameters();
for (int i = 0; i < method.getParameterCount(); i++) {
Parameter methodParameter = parameters.get(i);
if (methodParameter.isSpecialParameter()) {
if (methodParameter.isAssignableTo(Timeout.class)) {
timeoutIndex = i;
}
if (methodParameter.isAssignableTo(CommandBatching.class)) {
commandBatchingIndex = i;
}
}
}
this.timeoutIndex = timeoutIndex;
this.commandBatchingIndex = commandBatchingIndex;
}
/**
* @return the timeout argument index if present, or {@literal -1} if the command method declares a {@link Timeout}
* parameter.
*/
public int getTimeoutIndex() {
return timeoutIndex;
}
/**
* @return the command batching argument index if present, or {@literal -1} if the command method declares a
* {@link CommandBatching} parameter.
*/
public int getCommandBatchingIndex() {
return commandBatchingIndex;
}
@Override
protected ExecutionAwareParameter createParameter(Method method, int parameterIndex) {
return new ExecutionAwareParameter(method, parameterIndex);
}
/**
* @return {@code true} if the method defines a {@link CommandBatching} parameter.
*/
public boolean hasCommandBatchingIndex() {
return commandBatchingIndex != -1;
}
/**
* @return {@code true} if the method defines a {@link Timeout} parameter.
*/
public boolean hasTimeoutIndex() {
return getTimeoutIndex() != -1;
}
public static
|
ExecutionSpecificParameters
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/function/PredicateResourceLookupFunction.java
|
{
"start": 1071,
"end": 1691
}
|
class ____ implements Function<ServerRequest, Optional<Resource>> {
private final RequestPredicate predicate;
private final Resource resource;
public PredicateResourceLookupFunction(RequestPredicate predicate, Resource resource) {
Assert.notNull(predicate, "'predicate' must not be null");
Assert.notNull(resource, "'resource' must not be null");
this.predicate = predicate;
this.resource = resource;
}
@Override
public Optional<Resource> apply(ServerRequest serverRequest) {
return this.predicate.test(serverRequest) ? Optional.of(this.resource) : Optional.empty();
}
}
|
PredicateResourceLookupFunction
|
java
|
spring-projects__spring-boot
|
buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/UpdateListener.java
|
{
"start": 870,
"end": 1585
}
|
interface ____<E extends UpdateEvent> {
/**
* A no-op update listener.
* @see #none()
*/
UpdateListener<UpdateEvent> NONE = (event) -> {
};
/**
* Called when the operation starts.
*/
default void onStart() {
}
/**
* Called when an update event is available.
* @param event the update event
*/
void onUpdate(E event);
/**
* Called when the operation finishes (with or without error).
*/
default void onFinish() {
}
/**
* A no-op update listener that does nothing.
* @param <E> the event type
* @return a no-op update listener
*/
@SuppressWarnings("unchecked")
static <E extends UpdateEvent> UpdateListener<E> none() {
return (UpdateListener<E>) NONE;
}
}
|
UpdateListener
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/proxy/BlobTest.java
|
{
"start": 884,
"end": 6245
}
|
class ____ extends TestCase {
private static String create_url = "jdbc:wrap-jdbc:filters=default,commonLogging,log4j:name=demo:jdbc:derby:memory:blobTestDB;create=true";
protected void setUp() throws Exception {
Class.forName("com.alibaba.druid.proxy.DruidDriver");
Connection conn = DriverManager.getConnection(create_url);
createTable();
conn.close();
}
private void createTable() throws SQLException {
Connection conn = DriverManager.getConnection(create_url);
Statement stmt = conn.createStatement();
stmt.execute("CREATE TABLE T_BLOB (ID INTEGER, DATA BLOB)");
stmt.close();
conn.close();
}
private void dropTable() throws SQLException {
Connection conn = DriverManager.getConnection(create_url);
Statement stmt = conn.createStatement();
stmt.execute("DROP TABLE T_BLOB");
stmt.close();
conn.close();
}
protected void tearDown() throws Exception {
dropTable();
DruidDriver.getProxyDataSources().clear();
assertEquals(0, JdbcStatManager.getInstance().getSqlList().size());
}
public void test_blob() throws Exception {
Connection conn = null;
PreparedStatement pstmt = null;
Statement stmt = null;
ResultSet rs = null;
try {
conn = DriverManager.getConnection(create_url);
conn.setCatalog(conn.getCatalog());
conn.setClientInfo(conn.getClientInfo());
conn.setHoldability(conn.getHoldability());
conn.setReadOnly(conn.isReadOnly());
conn.setTransactionIsolation(conn.getTransactionIsolation());
conn.setTypeMap(conn.getTypeMap());
pstmt = conn.prepareStatement("INSERT INTO T_BLOB (ID, DATA) VALUES (?, ?)");
Blob blob = conn.createBlob();
blob.setBytes(1, new byte[100]);
pstmt.setInt(1, 1);
pstmt.setBlob(2, blob);
int updateCount = pstmt.executeUpdate();
assertEquals(1, updateCount);
pstmt.setInt(1, 2);
pstmt.setBlob(2, new ByteArrayInputStream("XBCSDasdfasdfasfasfF".getBytes()));
updateCount = pstmt.executeUpdate();
assertEquals(1, updateCount);
pstmt.setInt(1, 2);
pstmt.setBlob(2, new ByteArrayInputStream("XBCSDasdfasdfasfasfF".getBytes()), 20);
updateCount = pstmt.executeUpdate();
assertEquals(1, updateCount);
stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
conn.nativeSQL("SELECT ID, DATA FROM T_BLOB");
// //////
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
rs.getStatement(); // just call
while (rs.next()) {
Blob readBlob = rs.getBlob(2);
readBlob.length();
readBlob.getBinaryStream(1, 10).close();
readBlob.getBinaryStream().close();
readBlob.free();
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.getBinaryStream(2).close();
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.getBinaryStream("DATA").close();
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.getBytes(2);
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
Blob x = conn.createBlob();
x.setBytes(1, new byte[100]);
rs.updateBlob(2, x);
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
Blob x = conn.createBlob();
x.setBytes(1, new byte[100]);
rs.updateBlob("DATA", x);
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.updateBlob("DATA", new ByteArrayInputStream(new byte[100]));
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.updateBlob(2, new ByteArrayInputStream(new byte[100]));
}
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.updateBlob("DATA", new ByteArrayInputStream(new byte[100]), 100);
}
JdbcUtils.close(rs);
rs = stmt.executeQuery("SELECT ID, DATA FROM T_BLOB");
while (rs.next()) {
rs.updateBlob(2, new ByteArrayInputStream(new byte[100]), 100);
}
JdbcUtils.close(rs);
} finally {
JdbcUtils.close(rs);
JdbcUtils.close(stmt);
JdbcUtils.close(pstmt);
JdbcUtils.close(conn);
}
}
}
|
BlobTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapperField.java
|
{
"start": 368,
"end": 632
}
|
class ____ {
public static final ParseField ID = new ParseField("id");
public static final ParseField TYPE = new ParseField("type");
public static final ParseField STATUS = new ParseField("status");
private ActionWrapperField() {}
}
|
ActionWrapperField
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/compact/Dubbo2ActivateUtils.java
|
{
"start": 925,
"end": 5127
}
|
class ____ {
private static final Class<? extends Annotation> ACTIVATE_CLASS;
private static final Method GROUP_METHOD;
private static final Method VALUE_METHOD;
private static final Method BEFORE_METHOD;
private static final Method AFTER_METHOD;
private static final Method ORDER_METHOD;
private static final Method ON_CLASS_METHOD;
static {
ACTIVATE_CLASS = loadClass();
GROUP_METHOD = loadMethod("group");
VALUE_METHOD = loadMethod("value");
BEFORE_METHOD = loadMethod("before");
AFTER_METHOD = loadMethod("after");
ORDER_METHOD = loadMethod("order");
ON_CLASS_METHOD = loadMethod("onClass");
}
@SuppressWarnings("unchecked")
private static Class<? extends Annotation> loadClass() {
try {
Class<?> clazz = Class.forName("com.alibaba.dubbo.common.extension.Activate");
if (clazz.isAnnotation()) {
return (Class<? extends Annotation>) clazz;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
public static boolean isActivateLoaded() {
return ACTIVATE_CLASS != null;
}
public static Class<? extends Annotation> getActivateClass() {
return ACTIVATE_CLASS;
}
private static Method loadMethod(String name) {
if (ACTIVATE_CLASS == null) {
return null;
}
try {
return ACTIVATE_CLASS.getMethod(name);
} catch (Throwable e) {
return null;
}
}
public static String[] getGroup(Annotation annotation) {
if (GROUP_METHOD == null) {
return null;
}
try {
Object result = GROUP_METHOD.invoke(annotation);
if (result instanceof String[]) {
return (String[]) result;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
public static String[] getValue(Annotation annotation) {
if (VALUE_METHOD == null) {
return null;
}
try {
Object result = VALUE_METHOD.invoke(annotation);
if (result instanceof String[]) {
return (String[]) result;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
public static String[] getBefore(Annotation annotation) {
if (BEFORE_METHOD == null) {
return null;
}
try {
Object result = BEFORE_METHOD.invoke(annotation);
if (result instanceof String[]) {
return (String[]) result;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
public static String[] getAfter(Annotation annotation) {
if (AFTER_METHOD == null) {
return null;
}
try {
Object result = AFTER_METHOD.invoke(annotation);
if (result instanceof String[]) {
return (String[]) result;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
public static int getOrder(Annotation annotation) {
if (ORDER_METHOD == null) {
return 0;
}
try {
Object result = ORDER_METHOD.invoke(annotation);
if (result instanceof Integer) {
return (Integer) result;
} else {
return 0;
}
} catch (Throwable e) {
return 0;
}
}
public static String[] getOnClass(Annotation annotation) {
if (ON_CLASS_METHOD == null) {
return null;
}
try {
Object result = ON_CLASS_METHOD.invoke(annotation);
if (result instanceof String[]) {
return (String[]) result;
} else {
return null;
}
} catch (Throwable e) {
return null;
}
}
}
|
Dubbo2ActivateUtils
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/BaseResource.java
|
{
"start": 1067,
"end": 1618
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1492603053176889431L;
private String uri;
/**
* Resource location for a service, e.g.
* /app/v1/services/helloworld
*
**/
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("BaseResource [uri=")
.append(uri)
.append("]");
return builder.toString();
}
}
|
BaseResource
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmSettingsTests.java
|
{
"start": 2065,
"end": 38395
}
|
class ____ extends JwtTestCase {
public void testAllSettings() throws Exception {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Settings settings = generateRandomRealmSettings(realmName).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
for (final Setting.AffixSetting<?> setting : JwtRealmSettings.getSettings()) {
realmConfig.getConcreteSetting(setting);
}
}
public void testAllowedIssuer() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<String> setting = JwtRealmSettings.ALLOWED_ISSUER;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { null, "" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(exception.getMessage(), equalTo("Invalid empty value for [" + settingKey + "]."));
}
for (final String acceptedValue : new String[] { "http://localhost/iss1", "issuer1", "i" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(acceptedValue));
}
}
public void testAllowedSignatureAlgorithms() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<List<String>> setting = JwtRealmSettings.ALLOWED_SIGNATURE_ALGORITHMS;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { "unknown", "HS256,unknown" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo(
"Invalid value [unknown] for ["
+ settingKey
+ "]."
+ " Allowed values are "
+ JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS
+ "."
)
);
}
for (final String ignoredValue : new String[] { null, "" }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
final String allAcceptedValues = JwtRealmSettings.SUPPORTED_SIGNATURE_ALGORITHMS.toString().replaceAll("[\\[\\] ]", "");
for (final String acceptedValue : List.of("HS256", "HS512,RS512", allAcceptedValues)) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(Arrays.asList(acceptedValue.split(",", -1))));
}
}
public void testJwtPath() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<String> setting = JwtRealmSettings.PKC_JWKSET_PATH;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String ignoredValue : new String[] { null, "" }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
for (final String acceptedValue : new String[] { "./config/jwkset.json", "http://localhost/jwkset.json" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(acceptedValue));
}
}
public void testAllowedAudiences() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<List<String>> setting = JwtRealmSettings.ALLOWED_AUDIENCES;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { null, "" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(exception.getMessage(), equalTo("Invalid empty list for [" + settingKey + "]."));
}
for (final String acceptedValue : new String[] { "elasticsearch", "elasticsearch,other" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(Arrays.asList(acceptedValue.split(",", -1))));
}
}
public void testClaimNames() {
for (final Setting.AffixSetting<String> setting : List.of(
JwtRealmSettings.CLAIMS_PRINCIPAL.getClaim(),
JwtRealmSettings.CLAIMS_GROUPS.getClaim(),
JwtRealmSettings.CLAIMS_DN.getClaim(),
JwtRealmSettings.CLAIMS_MAIL.getClaim(),
JwtRealmSettings.CLAIMS_NAME.getClaim()
)) {
final String realmName = "jwt" + randomIntBetween(1, 9);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { null, "" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(exception.getMessage(), equalTo("Invalid null or empty claim name for [" + settingKey + "]."));
}
for (final String acceptedValue : new String[] { "sub", "name", "email", "dn" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(acceptedValue));
}
}
}
public void testClaimPatterns() {
for (final Setting.AffixSetting<String> setting : List.of(
JwtRealmSettings.CLAIMS_PRINCIPAL.getPattern(),
JwtRealmSettings.CLAIMS_GROUPS.getPattern(),
JwtRealmSettings.CLAIMS_DN.getPattern(),
JwtRealmSettings.CLAIMS_MAIL.getPattern(),
JwtRealmSettings.CLAIMS_NAME.getPattern()
)) {
final String realmName = "jwt" + randomIntBetween(1, 9);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { "[" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(exception.getMessage(), equalTo("Invalid claim value regex pattern for [" + settingKey + "]."));
}
for (final String acceptedValue : new String[] { "^([^@]+)@example\\.com$", "^Group-(.+)$" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final String actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(acceptedValue));
}
}
}
public void testPopulateUserMetadata() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<Boolean> setting = JwtRealmSettings.POPULATE_USER_METADATA;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { "", "unknown", "t", "f", "TRUE", "FALSE", "True", "False" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Boolean actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo("Failed to parse value [" + rejectedValue + "] as only [true] or [false] are allowed.")
);
}
for (final String ignoredValue : new String[] { null }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Boolean actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
for (final String acceptedValue : new String[] { "true", "false" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Boolean actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(Booleans.parseBoolean(acceptedValue)));
}
}
public void testClientAuthenticationType() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<ClientAuthenticationType> setting = JwtRealmSettings.CLIENT_AUTHENTICATION_TYPE;
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { "unknown", "", randomAlphaOfLengthBetween(1, 3) }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final ClientAuthenticationType actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo("Invalid value [" + rejectedValue + "] for [" + settingKey + "]," + " allowed values are " + "[none,shared_secret]")
);
}
for (final String ignoredValue : new String[] { null }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final ClientAuthenticationType actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
for (final String acceptedValue : new String[] { "shared_secret", "none" }) {
for (String inputValue : new String[] { acceptedValue, acceptedValue.toUpperCase(Locale.ROOT), capitalize(acceptedValue) }) {
final Settings settings = Settings.builder().put(settingKey, inputValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final ClientAuthenticationType actualValue = realmConfig.getSetting(setting);
assertThat(actualValue.value(), equalTo(acceptedValue));
}
}
}
public void testAuthenticationRealms() {
final String realmName = "jwt" + randomIntBetween(1, 9);
final Setting.AffixSetting<List<String>> setting = DelegatedAuthorizationSettings.AUTHZ_REALMS.apply(JwtRealmSettings.TYPE);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String ignoredValue : new String[] { null, "" }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(List.of()));
}
for (final String acceptedValue : new String[] { "a", "1", "native1,file1,ldap1,ad1" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final List<String> actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(Arrays.asList(acceptedValue.split(",", -1))));
}
}
public void testSecureStrings() {
for (final Setting.AffixSetting<SecureString> setting : List.of(
JwtRealmSettings.HMAC_JWKSET,
JwtRealmSettings.HMAC_KEY,
JwtRealmSettings.CLIENT_AUTHENTICATION_SHARED_SECRET
)) {
final String realmName = "jwt" + randomIntBetween(1, 9);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { null }) {
final Exception exception = expectThrows(NullPointerException.class, () -> {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(settingKey, rejectedValue);
final SecureString actualValue = secureSettings.getString(settingKey);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo("Cannot invoke \"String.getBytes(java.nio.charset.Charset)\" because \"value\" is null")
);
}
for (final String acceptedValue : new String[] { "", "abc123", "a", "1" }) {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString(settingKey, acceptedValue);
final SecureString actualValue = secureSettings.getString(settingKey);
assertThat(actualValue, equalTo(acceptedValue));
}
}
}
public void testTimeSettingsWithDefault() {
for (final Setting.AffixSetting<TimeValue> setting : List.of(
JwtRealmSettings.ALLOWED_CLOCK_SKEW,
JwtRealmSettings.HTTP_CONNECTION_READ_TIMEOUT,
JwtRealmSettings.HTTP_SOCKET_TIMEOUT
)) {
final String realmName = "jwt" + randomIntBetween(1, 9);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
for (final String rejectedValue : new String[] { "", "-2", "10", "1w", "1M", "1y" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final TimeValue actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo(
"failed to parse setting ["
+ settingKey
+ "] with value ["
+ rejectedValue
+ "] as a time value: unit is missing or unrecognized"
)
);
}
for (final String ignoredValue : new String[] { null }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final TimeValue actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
for (final String acceptedValue : new String[] { "-1", "0", "0s", "1s", "1m", "1h", "1d" }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final TimeValue actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(TimeValue.parseTimeValue(acceptedValue, settingKey)));
}
}
}
public void testIntegerSettingsWithDefault() {
for (final Setting.AffixSetting<Integer> setting : List.of(
JwtRealmSettings.HTTP_MAX_CONNECTIONS,
JwtRealmSettings.HTTP_MAX_ENDPOINT_CONNECTIONS
)) {
final String realmName = "jwt" + randomIntBetween(1, 9);
final String settingKey = RealmSettings.getFullSettingKey(realmName, setting);
// If Integer parsing fails, " must be >= 0" is not appended to exception message.
for (final String rejectedValue : new String[] { "", "100_000", "NaN" }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Integer actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo("Failed to parse value [" + rejectedValue + "] for setting [" + settingKey + "]")
);
}
// If Integer parsing succeeds, " must be >= 0" is appended to exception message.
for (final String rejectedValue : new String[] { "-1", Integer.toString(Integer.MIN_VALUE) }) {
final Exception exception = expectThrows(IllegalArgumentException.class, () -> {
final Settings settings = Settings.builder().put(settingKey, rejectedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Integer actualValue = realmConfig.getSetting(setting);
fail("No exception. Expected one for " + settingKey + "=" + rejectedValue + ". Got " + actualValue + ".");
});
assertThat(
exception.getMessage(),
equalTo("Failed to parse value [" + rejectedValue + "] for setting [" + settingKey + "] must be >= 0")
);
}
for (final String ignoredValue : new String[] { null }) {
final Settings settings = Settings.builder().put(settingKey, ignoredValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Integer actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(setting.getDefault(settings)));
}
for (final String acceptedValue : new String[] { "0", "1", "100000", Integer.toString(Integer.MAX_VALUE) }) {
final Settings settings = Settings.builder().put(settingKey, acceptedValue).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, 0);
final Integer actualValue = realmConfig.getSetting(setting);
assertThat(actualValue, equalTo(Integer.valueOf(acceptedValue)));
}
}
}
public void testTokenTypeSetting() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String fullSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE);
// Default is id_token
assertThat(
buildRealmConfig(JwtRealmSettings.TYPE, realmName, Settings.EMPTY, randomInt()).getSetting(JwtRealmSettings.TOKEN_TYPE),
is(JwtRealmSettings.TokenType.ID_TOKEN)
);
// Valid values
final JwtRealmSettings.TokenType expectedTokenType = randomFrom(JwtRealmSettings.TokenType.values());
final Settings settings = Settings.builder()
.put(fullSettingKey, randomBoolean() ? expectedTokenType.value() : expectedTokenType.value().toUpperCase(Locale.ROOT))
.build();
assertThat(
buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt()).getSetting(JwtRealmSettings.TOKEN_TYPE),
is(expectedTokenType)
);
// Anything else is invalid
final Settings invalidSettings = Settings.builder().put(fullSettingKey, randomAlphaOfLengthBetween(3, 20)).build();
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> buildRealmConfig(JwtRealmSettings.TYPE, realmName, invalidSettings, randomInt()).getSetting(JwtRealmSettings.TOKEN_TYPE)
);
assertThat(e.getMessage(), containsString("Invalid value"));
}
public void testFallbackClaimSettingsNotAllowedForIdTokenType() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final Settings.Builder settingsBuilder = Settings.builder();
if (randomBoolean()) {
settingsBuilder.put(
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE),
JwtRealmSettings.TokenType.ID_TOKEN.value()
);
}
settingsBuilder.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM), randomAlphaOfLength(8))
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM), randomAlphaOfLength(8));
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settingsBuilder.build(), randomInt());
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.FALLBACK_SUB_CLAIM)
);
assertThat(
e1.getMessage(),
containsString(
"fallback claim setting ["
+ RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM)
+ "] is not allowed when JWT realm ["
+ realmName
+ "] is [id_token] type"
)
);
final IllegalArgumentException e2 = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.FALLBACK_AUD_CLAIM)
);
assertThat(
e2.getMessage(),
containsString(
"fallback claim setting ["
+ RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM)
+ "] is not allowed when JWT realm ["
+ realmName
+ "] is [id_token] type"
)
);
}
public void testFallbackSettingsForAccessTokenType() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String fallbackSub = randomAlphaOfLength(8);
final String fallbackAud = randomAlphaOfLength(8);
final Settings settings = Settings.builder()
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), JwtRealmSettings.TokenType.ACCESS_TOKEN.value())
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM), fallbackSub)
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM), fallbackAud)
.build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
assertThat(realmConfig.getSetting(JwtRealmSettings.FALLBACK_SUB_CLAIM), equalTo(fallbackSub));
assertThat(realmConfig.getSetting(JwtRealmSettings.FALLBACK_AUD_CLAIM), equalTo(fallbackAud));
}
public void testRegisteredClaimsCannotBeUsedForFallbackSettings() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String fallbackSub = randomValueOtherThan("sub", () -> randomFrom(JwtRealmSettings.REGISTERED_CLAIM_NAMES));
final String fallbackAud = randomValueOtherThan("aud", () -> randomFrom(JwtRealmSettings.REGISTERED_CLAIM_NAMES));
final Settings settings = Settings.builder()
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.TOKEN_TYPE), JwtRealmSettings.TokenType.ACCESS_TOKEN.value())
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM), fallbackSub)
.put(RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM), fallbackAud)
.build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.FALLBACK_SUB_CLAIM)
);
assertThat(
e1.getMessage(),
containsString(
Strings.format(
"Invalid fallback claims setting [%s]. Claim [%s] cannot fallback to a registered claim [%s]",
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_SUB_CLAIM),
"sub",
fallbackSub
)
)
);
final IllegalArgumentException e2 = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.FALLBACK_AUD_CLAIM)
);
assertThat(
e2.getMessage(),
containsString(
Strings.format(
"Invalid fallback claims setting [%s]. Claim [%s] cannot fallback to a registered claim [%s]",
RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.FALLBACK_AUD_CLAIM),
"aud",
fallbackAud
)
)
);
}
public void testRequiredClaims() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
// Required claims are optional
final RealmConfig realmConfig1 = buildRealmConfig(JwtRealmSettings.TYPE, realmName, Settings.EMPTY, randomInt());
assertThat(realmConfig1.getSetting(JwtRealmSettings.REQUIRED_CLAIMS).names(), emptyIterable());
// Multiple required claims with different value types
final String prefix = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.REQUIRED_CLAIMS);
final Settings settings = Settings.builder()
.put(prefix + "extra_1", "foo")
.put(prefix + "extra_2", "hello,world")
.put(prefix + "extra_3", 42)
.build();
final RealmConfig realmConfig2 = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final Settings requireClaimsSettings = realmConfig2.getSetting(JwtRealmSettings.REQUIRED_CLAIMS);
assertThat(requireClaimsSettings.names(), containsInAnyOrder("extra_1", "extra_2", "extra_3"));
assertThat(requireClaimsSettings.getAsList("extra_1"), equalTo(List.of("foo")));
assertThat(requireClaimsSettings.getAsList("extra_2"), equalTo(List.of("hello", "world")));
assertThat(requireClaimsSettings.getAsList("extra_3"), equalTo(List.of("42")));
}
public void testInvalidRequiredClaims() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String invalidRequiredClaim = randomFrom("iss", "sub", "aud", "exp", "nbf", "iat");
final String fullSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.REQUIRED_CLAIMS) + invalidRequiredClaim;
final Settings settings = Settings.builder().put(fullSettingKey, randomAlphaOfLength(8)).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.REQUIRED_CLAIMS)
);
assertThat(e.getMessage(), containsString("required claim [" + fullSettingKey + "] cannot be one of [iss,sub,aud,exp,nbf,iat]"));
}
public void testRequiredClaimsCannotBeEmpty() {
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String invalidRequiredClaim = randomAlphaOfLengthBetween(4, 8);
final String fullSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.REQUIRED_CLAIMS) + invalidRequiredClaim;
final Settings settings = Settings.builder().put(fullSettingKey, "").build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.REQUIRED_CLAIMS)
);
assertThat(e.getMessage(), containsString("required claim [" + fullSettingKey + "] cannot be empty"));
}
public void testInvalidProxySchemeThrowsError() {
final String scheme = randomBoolean() ? "https" : randomAlphaOfLengthBetween(3, 8);
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String proxySchemeSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HTTP_PROXY_SCHEME);
final Settings settings = Settings.builder().put(proxySchemeSettingKey, scheme).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.HTTP_PROXY_SCHEME)
);
assertThat(
e.getMessage(),
equalTo(Strings.format("Invalid value [%s] for [%s]. Allowed values are [http].", scheme, proxySchemeSettingKey))
);
}
public void testInvalidProxyHostThrowsError() {
final int proxyPort = randomIntBetween(1, 65535);
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String proxyPortSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HTTP_PROXY_PORT);
final String proxyHostSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HTTP_PROXY_HOST);
final Settings settings = Settings.builder().put(proxyHostSettingKey, "not a url").put(proxyPortSettingKey, proxyPort).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.HTTP_PROXY_HOST)
);
assertThat(
e.getMessage(),
allOf(startsWith(Strings.format("HTTP host for hostname [not a url] (from [%s])", proxyHostSettingKey)), endsWith("is invalid"))
);
}
public void testInvalidProxyPortThrowsError() {
final int proxyPort = randomFrom(randomIntBetween(Integer.MIN_VALUE, -1), randomIntBetween(65536, Integer.MAX_VALUE));
final String realmName = randomAlphaOfLengthBetween(3, 8);
final String proxyPortSettingKey = RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.HTTP_PROXY_PORT);
final Settings settings = Settings.builder().put(proxyPortSettingKey, proxyPort).build();
final RealmConfig realmConfig = buildRealmConfig(JwtRealmSettings.TYPE, realmName, settings, randomInt());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> realmConfig.getSetting(JwtRealmSettings.HTTP_PROXY_PORT)
);
assertThat(
e.getMessage(),
startsWith(Strings.format("Failed to parse value [%d] for setting [%s]", proxyPort, proxyPortSettingKey))
);
}
}
|
JwtRealmSettingsTests
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/handler/predicate/ReadBodyRoutePredicateFactory.java
|
{
"start": 4551,
"end": 5356
}
|
class ____ {
private @Nullable Class inClass;
private @Nullable Predicate predicate;
private @Nullable Map<String, Object> hints;
public @Nullable Class getInClass() {
return inClass;
}
public Config setInClass(Class inClass) {
this.inClass = inClass;
return this;
}
public @Nullable Predicate getPredicate() {
return predicate;
}
public Config setPredicate(Predicate predicate) {
this.predicate = predicate;
return this;
}
public <T> Config setPredicate(Class<T> inClass, Predicate<T> predicate) {
setInClass(inClass);
this.predicate = predicate;
return this;
}
public @Nullable Map<String, Object> getHints() {
return hints;
}
public Config setHints(Map<String, Object> hints) {
this.hints = hints;
return this;
}
}
}
|
Config
|
java
|
spring-projects__spring-data-jpa
|
spring-data-envers/src/test/java/org/springframework/data/envers/sample/QCountry.java
|
{
"start": 1078,
"end": 2053
}
|
class ____ extends EntityPathBase<Country> {
@Serial private static final long serialVersionUID = -936338527;
private static final PathInits INITS = PathInits.DIRECT2;
public static final QCountry country = new QCountry("country");
public final StringPath code = createString("code");
public final StringPath name = createString("name");
public QCountry(String variable) {
this(Country.class, forVariable(variable), INITS);
}
@SuppressWarnings("all")
public QCountry(Path<? extends Country> path) {
this((Class) path.getType(), path.getMetadata(), path.getMetadata().isRoot() ? INITS : PathInits.DEFAULT);
}
public QCountry(PathMetadata metadata) {
this(metadata, metadata.isRoot() ? INITS : PathInits.DEFAULT);
}
public QCountry(PathMetadata metadata, PathInits inits) {
this(Country.class, metadata, inits);
}
public QCountry(Class<? extends Country> type, PathMetadata metadata, PathInits inits) {
super(type, metadata, inits);
}
}
|
QCountry
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Utils.java
|
{
"start": 1920,
"end": 2240
}
|
class ____ log files from directory given
* It doesnt accept paths having _logs.
* This can be used to list paths of output directory as follows:
* Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
* new OutputLogFilter()));
*/
public static
|
filters
|
java
|
bumptech__glide
|
integration/okhttp3/src/main/java/com/bumptech/glide/integration/okhttp3/OkHttpStreamFetcher.java
|
{
"start": 683,
"end": 3204
}
|
class ____ implements DataFetcher<InputStream>, okhttp3.Callback {
private static final String TAG = "OkHttpFetcher";
private final Call.Factory client;
private final GlideUrl url;
private InputStream stream;
private ResponseBody responseBody;
private DataCallback<? super InputStream> callback;
// call may be accessed on the main thread while the object is in use on other threads. All other
// accesses to variables may occur on different threads, but only one at a time.
private volatile Call call;
// Public API.
@SuppressWarnings("WeakerAccess")
public OkHttpStreamFetcher(Call.Factory client, GlideUrl url) {
this.client = client;
this.url = url;
}
@Override
public void loadData(
@NonNull Priority priority, @NonNull final DataCallback<? super InputStream> callback) {
Request.Builder requestBuilder = new Request.Builder().url(url.toStringUrl());
for (Map.Entry<String, String> headerEntry : url.getHeaders().entrySet()) {
String key = headerEntry.getKey();
requestBuilder.addHeader(key, headerEntry.getValue());
}
Request request = requestBuilder.build();
this.callback = callback;
call = client.newCall(request);
call.enqueue(this);
}
@Override
public void onFailure(@NonNull Call call, @NonNull IOException e) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "OkHttp failed to obtain result", e);
}
callback.onLoadFailed(e);
}
@Override
public void onResponse(@NonNull Call call, @NonNull Response response) {
responseBody = response.body();
if (response.isSuccessful()) {
long contentLength = Preconditions.checkNotNull(responseBody).contentLength();
stream = ContentLengthInputStream.obtain(responseBody.byteStream(), contentLength);
callback.onDataReady(stream);
} else {
callback.onLoadFailed(new HttpException(response.message(), response.code()));
}
}
@Override
public void cleanup() {
try {
if (stream != null) {
stream.close();
}
} catch (IOException e) {
// Ignored
}
if (responseBody != null) {
responseBody.close();
}
callback = null;
}
@Override
public void cancel() {
Call local = call;
if (local != null) {
local.cancel();
}
}
@NonNull
@Override
public Class<InputStream> getDataClass() {
return InputStream.class;
}
@NonNull
@Override
public DataSource getDataSource() {
return DataSource.REMOTE;
}
}
|
OkHttpStreamFetcher
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/DeclarativeSlotPoolService.java
|
{
"start": 2355,
"end": 12409
}
|
class ____ implements SlotPoolService {
private final JobID jobId;
private final Duration rpcTimeout;
private final DeclarativeSlotPool declarativeSlotPool;
private final Clock clock;
private final Set<ResourceID> registeredTaskManagers;
protected final Logger log = LoggerFactory.getLogger(getClass());
private DeclareResourceRequirementServiceConnectionManager
resourceRequirementServiceConnectionManager =
NoOpDeclareResourceRequirementServiceConnectionManager.INSTANCE;
@Nullable private JobMasterId jobMasterId;
@Nullable private String jobManagerAddress;
private State state = State.CREATED;
protected final ComponentMainThreadExecutor componentMainThreadExecutor;
public DeclarativeSlotPoolService(
JobID jobId,
DeclarativeSlotPoolFactory declarativeSlotPoolFactory,
Clock clock,
Duration idleSlotTimeout,
Duration rpcTimeout,
Duration slotRequestMaxInterval,
@Nonnull ComponentMainThreadExecutor componentMainThreadExecutor) {
this.jobId = jobId;
this.clock = clock;
this.rpcTimeout = rpcTimeout;
this.registeredTaskManagers = new HashSet<>();
this.componentMainThreadExecutor = componentMainThreadExecutor;
this.declarativeSlotPool =
declarativeSlotPoolFactory.create(
jobId,
this::declareResourceRequirements,
idleSlotTimeout,
rpcTimeout,
slotRequestMaxInterval,
componentMainThreadExecutor);
}
protected DeclarativeSlotPool getDeclarativeSlotPool() {
return declarativeSlotPool;
}
protected long getRelativeTimeMillis() {
return clock.relativeTimeMillis();
}
@Override
public <T> Optional<T> castInto(Class<T> clazz) {
if (clazz.isAssignableFrom(declarativeSlotPool.getClass())) {
return Optional.of(clazz.cast(declarativeSlotPool));
}
return Optional.empty();
}
@Override
public final void start(JobMasterId jobMasterId, String address) throws Exception {
Preconditions.checkState(
state == State.CREATED, "The DeclarativeSlotPoolService can only be started once.");
this.jobMasterId = Preconditions.checkNotNull(jobMasterId);
this.jobManagerAddress = Preconditions.checkNotNull(address);
this.resourceRequirementServiceConnectionManager =
DefaultDeclareResourceRequirementServiceConnectionManager.create(
componentMainThreadExecutor);
onStart();
state = State.STARTED;
}
/**
* This method is called when the slot pool service is started. It can be overridden by
* subclasses.
*/
protected void onStart() {}
protected void assertHasBeenStarted() {
Preconditions.checkState(
state == State.STARTED, "The DeclarativeSlotPoolService has to be started.");
}
@Override
public final void close() {
if (state != State.CLOSED) {
onClose();
resourceRequirementServiceConnectionManager.close();
resourceRequirementServiceConnectionManager =
NoOpDeclareResourceRequirementServiceConnectionManager.INSTANCE;
releaseAllTaskManagers(
new FlinkException("The DeclarativeSlotPoolService is being closed."));
state = State.CLOSED;
}
}
/**
* This method is called when the slot pool service is closed. It can be overridden by
* subclasses.
*/
protected void onClose() {}
@Override
public Collection<SlotOffer> offerSlots(
TaskManagerLocation taskManagerLocation,
TaskManagerGateway taskManagerGateway,
Collection<SlotOffer> offers) {
assertHasBeenStarted();
if (!isTaskManagerRegistered(taskManagerLocation.getResourceID())) {
log.debug(
"Ignoring offered slots from unknown task manager {}.",
taskManagerLocation.getResourceID());
return Collections.emptyList();
}
return declarativeSlotPool.offerSlots(
offers, taskManagerLocation, taskManagerGateway, clock.relativeTimeMillis());
}
boolean isTaskManagerRegistered(ResourceID taskManagerId) {
return registeredTaskManagers.contains(taskManagerId);
}
@Override
public Optional<ResourceID> failAllocation(
@Nullable ResourceID taskManagerId, AllocationID allocationId, Exception cause) {
assertHasBeenStarted();
Preconditions.checkNotNull(allocationId);
Preconditions.checkNotNull(
taskManagerId,
"This slot pool only supports failAllocation calls coming from the TaskExecutor.");
final ResourceCounter previouslyFulfilledRequirements =
declarativeSlotPool.releaseSlot(allocationId, cause);
onFailAllocation(previouslyFulfilledRequirements);
if (declarativeSlotPool.containsSlots(taskManagerId)) {
return Optional.empty();
} else {
return Optional.of(taskManagerId);
}
}
/**
* This method is called when an allocation fails. It can be overridden by subclasses.
*
* @param previouslyFulfilledRequirements previouslyFulfilledRequirements by the failed
* allocation
*/
protected void onFailAllocation(ResourceCounter previouslyFulfilledRequirements) {}
@Override
public boolean registerTaskManager(ResourceID taskManagerId) {
assertHasBeenStarted();
log.debug("Register new TaskExecutor {}.", taskManagerId);
return registeredTaskManagers.add(taskManagerId);
}
@Override
public boolean releaseTaskManager(ResourceID taskManagerId, Exception cause) {
assertHasBeenStarted();
if (registeredTaskManagers.remove(taskManagerId)) {
internalReleaseTaskManager(taskManagerId, cause);
return true;
}
return false;
}
@Override
public void releaseFreeSlotsOnTaskManager(ResourceID taskManagerId, Exception cause) {
assertHasBeenStarted();
if (isTaskManagerRegistered(taskManagerId)) {
Collection<AllocationID> freeSlots =
declarativeSlotPool.getFreeSlotTracker().getFreeSlotsInformation().stream()
.filter(
slotInfo ->
slotInfo.getTaskManagerLocation()
.getResourceID()
.equals(taskManagerId))
.map(SlotInfo::getAllocationId)
.collect(Collectors.toSet());
for (AllocationID allocationId : freeSlots) {
final ResourceCounter previouslyFulfilledRequirement =
declarativeSlotPool.releaseSlot(allocationId, cause);
// release free slots, previously fulfilled requirement should be empty.
Preconditions.checkState(
previouslyFulfilledRequirement.equals(ResourceCounter.empty()));
}
}
}
private void releaseAllTaskManagers(Exception cause) {
for (ResourceID registeredTaskManager : registeredTaskManagers) {
internalReleaseTaskManager(registeredTaskManager, cause);
}
registeredTaskManagers.clear();
}
private void internalReleaseTaskManager(ResourceID taskManagerId, Exception cause) {
assertHasBeenStarted();
final ResourceCounter previouslyFulfilledRequirement =
declarativeSlotPool.releaseSlots(taskManagerId, cause);
onReleaseTaskManager(previouslyFulfilledRequirement);
}
/**
* This method is called when a TaskManager is released. It can be overridden by subclasses.
*
* @param previouslyFulfilledRequirement previouslyFulfilledRequirement by the released
* TaskManager
*/
protected void onReleaseTaskManager(ResourceCounter previouslyFulfilledRequirement) {}
@Override
public void connectToResourceManager(ResourceManagerGateway resourceManagerGateway) {
assertHasBeenStarted();
resourceRequirementServiceConnectionManager.connect(
resourceRequirements ->
resourceManagerGateway.declareRequiredResources(
jobMasterId, resourceRequirements, rpcTimeout));
declareResourceRequirements(declarativeSlotPool.getResourceRequirements());
}
private void declareResourceRequirements(Collection<ResourceRequirement> resourceRequirements) {
assertHasBeenStarted();
resourceRequirementServiceConnectionManager.declareResourceRequirements(
ResourceRequirements.create(jobId, jobManagerAddress, resourceRequirements));
}
@Override
public void disconnectResourceManager() {
assertHasBeenStarted();
resourceRequirementServiceConnectionManager.disconnect();
}
@Override
public AllocatedSlotReport createAllocatedSlotReport(ResourceID taskManagerId) {
assertHasBeenStarted();
final Collection<AllocatedSlotInfo> allocatedSlotInfos = new ArrayList<>();
for (SlotInfo slotInfo : declarativeSlotPool.getAllSlotsInformation()) {
if (slotInfo.getTaskManagerLocation().getResourceID().equals(taskManagerId)) {
allocatedSlotInfos.add(
new AllocatedSlotInfo(
slotInfo.getPhysicalSlotNumber(), slotInfo.getAllocationId()));
}
}
return new AllocatedSlotReport(jobId, allocatedSlotInfos);
}
private
|
DeclarativeSlotPoolService
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionHistory.java
|
{
"start": 1355,
"end": 2868
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private final BoundedLinkedHashMap<Integer, ArchivedExecution> historicalExecutions;
private int maxAttemptNumber;
public ExecutionHistory(int sizeLimit) {
super();
this.historicalExecutions = new BoundedLinkedHashMap<>(sizeLimit);
this.maxAttemptNumber = -1;
}
ExecutionHistory(ExecutionHistory other) {
this.historicalExecutions = new BoundedLinkedHashMap<>(other.historicalExecutions);
this.maxAttemptNumber = other.maxAttemptNumber;
}
void add(ArchivedExecution execution) {
if (execution.getAttemptNumber() > maxAttemptNumber) {
maxAttemptNumber = execution.getAttemptNumber();
}
historicalExecutions.put(execution.getAttemptNumber(), execution);
}
public Optional<ArchivedExecution> getHistoricalExecution(int attemptNumber) {
if (isValidAttemptNumber(attemptNumber)) {
return Optional.ofNullable(historicalExecutions.get(attemptNumber));
} else {
throw new IllegalArgumentException("Invalid attempt number.");
}
}
public Collection<ArchivedExecution> getHistoricalExecutions() {
return Collections.unmodifiableCollection(historicalExecutions.values());
}
public boolean isValidAttemptNumber(int attemptNumber) {
return attemptNumber >= 0 && attemptNumber <= maxAttemptNumber;
}
private static
|
ExecutionHistory
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/SleepJob.java
|
{
"start": 6372,
"end": 7845
}
|
class ____
extends Reducer<GridmixKey, NullWritable, NullWritable, NullWritable> {
private long duration = 0L;
@Override
protected void setup(Context context)
throws IOException, InterruptedException {
if (!context.nextKey() ||
context.getCurrentKey().getType() != GridmixKey.REDUCE_SPEC) {
throw new IOException("Missing reduce spec");
}
for (NullWritable ignored : context.getValues()) {
final GridmixKey spec = context.getCurrentKey();
duration += spec.getReduceOutputBytes();
}
long sleepInterval =
context.getConfiguration().getLong(GRIDMIX_SLEEP_INTERVAL, 5);
final long RINTERVAL =
TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS);
//This is to stop accumulating deviation from expected sleep time
//over a period of time.
long start = Time.monotonicNow();
long slept = 0L;
long sleep = 0L;
while (slept < duration) {
final long rem = duration - slept;
sleep = Math.min(rem, RINTERVAL);
context.setStatus("Sleeping... " + rem + " ms left");
TimeUnit.MILLISECONDS.sleep(sleep);
slept = Time.monotonicNow() - start;
}
}
@Override
protected void cleanup(Context context)
throws IOException, InterruptedException {
final String msg = "Slept for " + duration;
LOG.info(msg);
context.setStatus(msg);
}
}
public static
|
SleepReducer
|
java
|
mockito__mockito
|
mockito-extensions/mockito-errorprone/src/test/java/org/mockito/errorprone/bugpatterns/MockitoAnyClassWithPrimitiveTypeTest.java
|
{
"start": 6216,
"end": 7028
}
|
class ____ {",
" int run(int arg) {",
" return 42;",
" }",
" int runWithBoth(String arg1, long arg2) {",
" return 42;",
" }",
" }",
"}")
.addOutputLines(
"Test.java",
"import static org.mockito.ArgumentMatchers.any;",
"import static org.mockito.ArgumentMatchers.anyInt;",
"import static org.mockito.ArgumentMatchers.anyLong;",
"import static org.mockito.Mockito.mock;",
"import static org.mockito.Mockito.when;",
"
|
Foo
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/method/annotation/RequestParamMapMethodArgumentResolverTests.java
|
{
"start": 1947,
"end": 8971
}
|
class ____ {
private RequestParamMapMethodArgumentResolver resolver = new RequestParamMapMethodArgumentResolver();
private MockHttpServletRequest request = new MockHttpServletRequest();
private NativeWebRequest webRequest = new ServletWebRequest(request, new MockHttpServletResponse());
private ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build();
@Test
void supportsParameter() {
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(Map.class, String.class, String.class);
assertThat(resolver.supportsParameter(param)).isTrue();
param = this.testMethod.annotPresent(RequestParam.class).arg(MultiValueMap.class, String.class, String.class);
assertThat(resolver.supportsParameter(param)).isTrue();
param = this.testMethod.annot(requestParam().name("name")).arg(Map.class, String.class, String.class);
assertThat(resolver.supportsParameter(param)).isFalse();
param = this.testMethod.annotNotPresent(RequestParam.class).arg(Map.class, String.class, String.class);
assertThat(resolver.supportsParameter(param)).isFalse();
}
@Test
void resolveMapOfString() throws Exception {
String name = "foo";
String value = "bar";
request.addParameter(name, value);
Map<String, String> expected = Collections.singletonMap(name, value);
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(Map.class, String.class, String.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof Map;
assertThat(condition).isTrue();
assertThat(result).as("Invalid result").isEqualTo(expected);
}
@Test
void resolveMultiValueMapOfString() throws Exception {
String name = "foo";
String value1 = "bar";
String value2 = "baz";
request.addParameter(name, value1, value2);
MultiValueMap<String, String> expected = new LinkedMultiValueMap<>(1);
expected.add(name, value1);
expected.add(name, value2);
MethodParameter param = this.testMethod.annotPresent(RequestParam.class).arg(MultiValueMap.class, String.class, String.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof MultiValueMap;
assertThat(condition).isTrue();
assertThat(result).as("Invalid result").isEqualTo(expected);
}
@Test
@SuppressWarnings("unchecked")
public void resolveMapOfMultipartFile() throws Exception {
MockMultipartHttpServletRequest request = new MockMultipartHttpServletRequest();
MultipartFile expected1 = new MockMultipartFile("mfile", "Hello World".getBytes());
MultipartFile expected2 = new MockMultipartFile("other", "Hello World 3".getBytes());
request.addFile(expected1);
request.addFile(expected2);
webRequest = new ServletWebRequest(request);
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(Map.class, String.class, MultipartFile.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof Map;
assertThat(condition).isTrue();
Map<String, MultipartFile> resultMap = (Map<String, MultipartFile>) result;
assertThat(resultMap).hasSize(2);
assertThat(resultMap.get("mfile")).isEqualTo(expected1);
assertThat(resultMap.get("other")).isEqualTo(expected2);
}
@Test
@SuppressWarnings("unchecked")
public void resolveMultiValueMapOfMultipartFile() throws Exception {
MockMultipartHttpServletRequest request = new MockMultipartHttpServletRequest();
MultipartFile expected1 = new MockMultipartFile("mfilelist", "Hello World 1".getBytes());
MultipartFile expected2 = new MockMultipartFile("mfilelist", "Hello World 2".getBytes());
MultipartFile expected3 = new MockMultipartFile("other", "Hello World 3".getBytes());
request.addFile(expected1);
request.addFile(expected2);
request.addFile(expected3);
webRequest = new ServletWebRequest(request);
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(MultiValueMap.class, String.class, MultipartFile.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof MultiValueMap;
assertThat(condition).isTrue();
MultiValueMap<String, MultipartFile> resultMap = (MultiValueMap<String, MultipartFile>) result;
assertThat(resultMap).hasSize(2);
assertThat(resultMap.get("mfilelist")).containsExactly(expected1, expected2);
assertThat(resultMap.get("other")).containsExactly(expected3);
}
@Test
@SuppressWarnings("unchecked")
public void resolveMapOfPart() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("multipart/form-data");
Part expected1 = new MockPart("mfile", "Hello World".getBytes());
Part expected2 = new MockPart("other", "Hello World 3".getBytes());
request.addPart(expected1);
request.addPart(expected2);
webRequest = new ServletWebRequest(request);
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(Map.class, String.class, Part.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof Map;
assertThat(condition).isTrue();
Map<String, Part> resultMap = (Map<String, Part>) result;
assertThat(resultMap).hasSize(2);
assertThat(resultMap.get("mfile")).isEqualTo(expected1);
assertThat(resultMap.get("other")).isEqualTo(expected2);
}
@Test
@SuppressWarnings("unchecked")
public void resolveMultiValueMapOfPart() throws Exception {
MockHttpServletRequest request = new MockHttpServletRequest();
request.setContentType("multipart/form-data");
Part expected1 = new MockPart("mfilelist", "Hello World 1".getBytes());
Part expected2 = new MockPart("mfilelist", "Hello World 2".getBytes());
Part expected3 = new MockPart("other", "Hello World 3".getBytes());
request.addPart(expected1);
request.addPart(expected2);
request.addPart(expected3);
webRequest = new ServletWebRequest(request);
MethodParameter param = this.testMethod.annot(requestParam().noName()).arg(MultiValueMap.class, String.class, Part.class);
Object result = resolver.resolveArgument(param, null, webRequest, null);
boolean condition = result instanceof MultiValueMap;
assertThat(condition).isTrue();
MultiValueMap<String, Part> resultMap = (MultiValueMap<String, Part>) result;
assertThat(resultMap).hasSize(2);
assertThat(resultMap.get("mfilelist")).containsExactly(expected1, expected2);
assertThat(resultMap.get("other")).containsExactly(expected3);
}
public void handle(
@RequestParam Map<String, String> param1,
@RequestParam MultiValueMap<String, String> param2,
@RequestParam Map<String, MultipartFile> param3,
@RequestParam MultiValueMap<String, MultipartFile> param4,
@RequestParam Map<String, Part> param5,
@RequestParam MultiValueMap<String, Part> param6,
@RequestParam("name") Map<String, String> param7,
Map<String, String> param8) {
}
}
|
RequestParamMapMethodArgumentResolverTests
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/simp/broker/DefaultSubscriptionRegistryTests.java
|
{
"start": 1301,
"end": 20789
}
|
class ____ {
private final DefaultSubscriptionRegistry registry = new DefaultSubscriptionRegistry();
@Test
void registerSubscriptionInvalidInput() {
String sessId = "sess01";
String subsId = "subs01";
String dest = "/foo";
this.registry.registerSubscription(subscribeMessage(null, subsId, dest));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).isEmpty();
this.registry.registerSubscription(subscribeMessage(sessId, null, dest));
actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).isEmpty();
this.registry.registerSubscription(subscribeMessage(sessId, subsId, null));
actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).isEmpty();
}
@Test
void registerSubscription() {
String sessId = "sess01";
String subsId = "subs01";
String dest = "/foo";
this.registry.registerSubscription(subscribeMessage(sessId, subsId, dest));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subsId);
}
@Test
void registerSubscriptionOneSession() {
String sessId = "sess01";
List<String> subscriptionIds = List.of("subs01", "subs02", "subs03");
String dest = "/foo";
for (String subId : subscriptionIds) {
this.registry.registerSubscription(subscribeMessage(sessId, subId, dest));
}
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(sort(actual.get(sessId))).isEqualTo(subscriptionIds);
}
@Test
void registerSameSubscriptionTwice() {
String sessId = "sess01";
String subId = "subs01";
String dest = "/foo";
this.registry.registerSubscription(subscribeMessage(sessId, subId, dest));
this.registry.registerSubscription(subscribeMessage(sessId, subId, dest));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subId);
// Register more after destinationCache populated through findSubscriptions,
// and make sure it's still only one subscriptionId
this.registry.registerSubscription(subscribeMessage(sessId, subId, dest));
this.registry.registerSubscription(subscribeMessage(sessId, subId, dest));
actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subId);
}
@Test
void registerSubscriptionMultipleSessions() {
List<String> sessIds = List.of("sess01", "sess02", "sess03");
List<String> subscriptionIds = List.of("subs01", "subs02", "subs03");
String dest = "/foo";
for (String sessId : sessIds) {
for (String subsId : subscriptionIds) {
this.registry.registerSubscription(subscribeMessage(sessId, subsId, dest));
}
}
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(3);
assertThat(sort(actual.get(sessIds.get(0)))).isEqualTo(subscriptionIds);
assertThat(sort(actual.get(sessIds.get(1)))).isEqualTo(subscriptionIds);
assertThat(sort(actual.get(sessIds.get(2)))).isEqualTo(subscriptionIds);
}
@Test
void registerSubscriptionWithDestinationPattern() {
String sessId = "sess01";
String subsId = "subs01";
String destPattern = "/topic/PRICE.STOCK.*.IBM";
String dest = "/topic/PRICE.STOCK.NASDAQ.IBM";
this.registry.registerSubscription(subscribeMessage(sessId, subsId, destPattern));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subsId);
}
@Test // SPR-11657
void registerSubscriptionsWithSimpleAndPatternDestinations() {
String sess1 = "sess01";
String sess2 = "sess02";
String subs1 = "subs01";
String subs2 = "subs02";
String subs3 = "subs03";
String destNasdaqIbm = "/topic/PRICE.STOCK.NASDAQ.IBM";
Message<?> destNasdaqIbmMessage = createMessage(destNasdaqIbm);
this.registry.registerSubscription(subscribeMessage(sess1, subs2, destNasdaqIbm));
this.registry.registerSubscription(subscribeMessage(sess1, subs1, "/topic/PRICE.STOCK.*.IBM"));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(1);
assertThat(actual.get(sess1)).containsExactlyInAnyOrder(subs2, subs1);
this.registry.registerSubscription(subscribeMessage(sess2, subs1, destNasdaqIbm));
this.registry.registerSubscription(subscribeMessage(sess2, subs2, "/topic/PRICE.STOCK.NYSE.IBM"));
this.registry.registerSubscription(subscribeMessage(sess2, subs3, "/topic/PRICE.STOCK.NASDAQ.GOOG"));
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(2);
assertThat(actual.get(sess1)).containsExactlyInAnyOrder(subs2, subs1);
assertThat(actual.get(sess2)).containsExactly(subs1);
this.registry.unregisterAllSubscriptions(sess1);
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(1);
assertThat(actual.get(sess2)).containsExactly(subs1);
this.registry.registerSubscription(subscribeMessage(sess1, subs1, "/topic/PRICE.STOCK.*.IBM"));
this.registry.registerSubscription(subscribeMessage(sess1, subs2, destNasdaqIbm));
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(2);
assertThat(actual.get(sess1)).containsExactlyInAnyOrder(subs1, subs2);
assertThat(actual.get(sess2)).containsExactly(subs1);
this.registry.unregisterSubscription(unsubscribeMessage(sess1, subs2));
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(2);
assertThat(actual.get(sess1)).containsExactly(subs1);
assertThat(actual.get(sess2)).containsExactly(subs1);
this.registry.unregisterSubscription(unsubscribeMessage(sess1, subs1));
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).hasSize(1);
assertThat(actual.get(sess2)).containsExactly(subs1);
this.registry.unregisterSubscription(unsubscribeMessage(sess2, subs1));
actual = this.registry.findSubscriptions(destNasdaqIbmMessage);
assertThat(actual).isEmpty();
}
@Test // SPR-11755
void registerAndUnregisterMultipleDestinations() {
String sess1 = "sess01";
String sess2 = "sess02";
String subs1 = "subs01";
String subs2 = "subs02";
String subs3 = "subs03";
String subs4 = "subs04";
String subs5 = "subs05";
this.registry.registerSubscription(subscribeMessage(sess1, subs1, "/topic/PRICE.STOCK.NASDAQ.IBM"));
this.registry.registerSubscription(subscribeMessage(sess1, subs2, "/topic/PRICE.STOCK.NYSE.IBM"));
this.registry.registerSubscription(subscribeMessage(sess1, subs3, "/topic/PRICE.STOCK.NASDAQ.GOOG"));
this.registry.findSubscriptions(createMessage("/topic/PRICE.STOCK.NYSE.IBM"));
this.registry.findSubscriptions(createMessage("/topic/PRICE.STOCK.NASDAQ.GOOG"));
this.registry.findSubscriptions(createMessage("/topic/PRICE.STOCK.NASDAQ.IBM"));
this.registry.unregisterSubscription(unsubscribeMessage(sess1, subs1));
this.registry.unregisterSubscription(unsubscribeMessage(sess1, subs2));
this.registry.unregisterSubscription(unsubscribeMessage(sess1, subs3));
this.registry.registerSubscription(subscribeMessage(sess1, subs1, "/topic/PRICE.STOCK.NASDAQ.IBM"));
this.registry.registerSubscription(subscribeMessage(sess1, subs2, "/topic/PRICE.STOCK.NYSE.IBM"));
this.registry.registerSubscription(subscribeMessage(sess1, subs3, "/topic/PRICE.STOCK.NASDAQ.GOOG"));
this.registry.registerSubscription(subscribeMessage(sess1, subs4, "/topic/PRICE.STOCK.NYSE.IBM"));
this.registry.registerSubscription(subscribeMessage(sess2, subs5, "/topic/PRICE.STOCK.NASDAQ.GOOG"));
this.registry.unregisterAllSubscriptions(sess1);
this.registry.unregisterAllSubscriptions(sess2);
}
@Test
void registerSubscriptionWithDestinationPatternRegex() {
String sessId = "sess01";
String subsId = "subs01";
String destPattern = "/topic/PRICE.STOCK.*.{ticker:(IBM|MSFT)}";
this.registry.registerSubscription(subscribeMessage(sessId, subsId, destPattern));
Message<?> message = createMessage("/topic/PRICE.STOCK.NASDAQ.IBM");
MultiValueMap<String, String> actual = this.registry.findSubscriptions(message);
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subsId);
message = createMessage("/topic/PRICE.STOCK.NASDAQ.MSFT");
actual = this.registry.findSubscriptions(message);
assertThat(actual).hasSize(1);
assertThat(actual.get(sessId)).containsExactly(subsId);
message = createMessage("/topic/PRICE.STOCK.NASDAQ.VMW");
actual = this.registry.findSubscriptions(message);
assertThat(actual).isEmpty();
}
@Test
void registerSubscriptionWithSelectorHeaderEnabled() {
String sessionId1 = "sess01";
String sessionId2 = "sess02";
String sessionId3 = "sess03";
String subscriptionId1 = "subs01";
String subscriptionId2 = "subs02";
String subscriptionId3 = "subs02";
String destination = "/foo";
String selector1 = "headers.foo == 'bar'";
String selector2 = "headers.foo == 'enigma'";
// Explicitly enable selector support
this.registry.setSelectorHeaderName("selector");
// Register subscription with matching selector header
this.registry.registerSubscription(subscribeMessage(sessionId1, subscriptionId1, destination, selector1));
// Register subscription with non-matching selector header
this.registry.registerSubscription(subscribeMessage(sessionId2, subscriptionId2, destination, selector2));
// Register subscription without a selector header
this.registry.registerSubscription(subscribeMessage(sessionId3, subscriptionId3, destination, null));
// First, try with message WITH selected 'foo' header present
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create();
accessor.setDestination(destination);
accessor.setNativeHeader("foo", "bar");
Message<?> message = MessageBuilder.createMessage("", accessor.getMessageHeaders());
MultiValueMap<String, String> subscriptions = this.registry.findSubscriptions(message);
assertThat(subscriptions).hasSize(2);
// Subscription #1 has a 'selector' header that DOES match.
assertThat(subscriptions.get(sessionId1)).containsExactly(subscriptionId1);
// Subscription #2 has a 'selector' header that does NOT match.
assertThat(subscriptions.get(sessionId2)).isNull();
// Subscription #3 does NOT have a 'selector' header, so it matches anyway.
assertThat(subscriptions.get(sessionId3)).containsExactly(subscriptionId3);
// Then try with message WITHOUT selected 'foo' header present
subscriptions = this.registry.findSubscriptions(createMessage(destination));
assertThat(subscriptions).hasSize(1);
// Subscription #3 does NOT have a 'selector' header, so it matches anyway.
assertThat(subscriptions.get(sessionId3)).containsExactly(subscriptionId3);
}
@Test
void registerSubscriptionWithSelectorHeaderDisabledByDefault() {
String sessionId1 = "sess01";
String sessionId2 = "sess02";
String sessionId3 = "sess03";
String subscriptionId1 = "subs01";
String subscriptionId2 = "subs02";
String subscriptionId3 = "subs02";
String destination = "/foo";
String selector1 = "headers.foo == 'bar'";
String selector2 = "headers.foo == 'enigma'";
// Register subscription with matching selector header
this.registry.registerSubscription(subscribeMessage(sessionId1, subscriptionId1, destination, selector1));
// Register subscription with non-matching selector header
this.registry.registerSubscription(subscribeMessage(sessionId2, subscriptionId2, destination, selector2));
// Register subscription without a selector header
this.registry.registerSubscription(subscribeMessage(sessionId3, subscriptionId3, destination, null));
// First, try with message WITH selected 'foo' header present
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create();
accessor.setDestination(destination);
accessor.setNativeHeader("foo", "bar");
Message<?> message = MessageBuilder.createMessage("", accessor.getMessageHeaders());
MultiValueMap<String, String> subscriptions = this.registry.findSubscriptions(message);
// 'selector' header is ignored, so all 3 subscriptions should be found
assertThat(subscriptions).hasSize(3);
assertThat(subscriptions.get(sessionId1)).containsExactly(subscriptionId1);
assertThat(subscriptions.get(sessionId2)).containsExactly(subscriptionId2);
assertThat(subscriptions.get(sessionId3)).containsExactly(subscriptionId3);
// Then try with message WITHOUT selected 'foo' header present
subscriptions = this.registry.findSubscriptions(createMessage(destination));
// 'selector' header is ignored, so all 3 subscriptions should be found
assertThat(subscriptions).hasSize(3);
assertThat(subscriptions.get(sessionId1)).containsExactly(subscriptionId1);
assertThat(subscriptions.get(sessionId2)).containsExactly(subscriptionId2);
assertThat(subscriptions.get(sessionId3)).containsExactly(subscriptionId3);
}
@Test // SPR-11931
void registerSubscriptionTwiceAndUnregister() {
this.registry.registerSubscription(subscribeMessage("sess01", "subs01", "/foo"));
this.registry.registerSubscription(subscribeMessage("sess01", "subs02", "/foo"));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(actual).hasSize(1);
assertThat(actual.get("sess01")).containsExactly("subs01", "subs02");
this.registry.unregisterSubscription(unsubscribeMessage("sess01", "subs01"));
actual = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(actual).hasSize(1);
assertThat(actual.get("sess01")).containsExactly("subs02");
this.registry.unregisterSubscription(unsubscribeMessage("sess01", "subs02"));
actual = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(actual).isEmpty();
}
@Test
void unregisterSubscription() {
List<String> sessIds = List.of("sess01", "sess02", "sess03");
List<String> subscriptionIds = List.of("subs01", "subs02", "subs03");
String dest = "/foo";
for (String sessId : sessIds) {
for (String subsId : subscriptionIds) {
this.registry.registerSubscription(subscribeMessage(sessId, subsId, dest));
}
}
this.registry.unregisterSubscription(unsubscribeMessage(sessIds.get(0), subscriptionIds.get(0)));
this.registry.unregisterSubscription(unsubscribeMessage(sessIds.get(0), subscriptionIds.get(1)));
this.registry.unregisterSubscription(unsubscribeMessage(sessIds.get(0), subscriptionIds.get(2)));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(2);
assertThat(sort(actual.get(sessIds.get(1)))).isEqualTo(subscriptionIds);
assertThat(sort(actual.get(sessIds.get(2)))).isEqualTo(subscriptionIds);
}
@Test
void unregisterAllSubscriptions() {
List<String> sessIds = List.of("sess01", "sess02", "sess03");
List<String> subscriptionIds = List.of("subs01", "subs02", "subs03");
String dest = "/foo";
for (String sessId : sessIds) {
for (String subsId : subscriptionIds) {
this.registry.registerSubscription(subscribeMessage(sessId, subsId, dest));
}
}
this.registry.unregisterAllSubscriptions(sessIds.get(0));
this.registry.unregisterAllSubscriptions(sessIds.get(1));
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage(dest));
assertThat(actual).hasSize(1);
assertThat(sort(actual.get(sessIds.get(2)))).isEqualTo(subscriptionIds);
}
@Test
void unregisterAllSubscriptionsNoMatch() {
this.registry.unregisterAllSubscriptions("bogus");
// no exceptions
}
@Test
void findSubscriptionsNoMatches() {
MultiValueMap<String, String> actual = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(actual).isEmpty();
}
@Test // SPR-12665
void findSubscriptionsReturnsMapSafeToIterate() {
this.registry.registerSubscription(subscribeMessage("sess1", "1", "/foo"));
this.registry.registerSubscription(subscribeMessage("sess2", "1", "/foo"));
MultiValueMap<String, String> subscriptions = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(subscriptions).hasSize(2);
Iterator<Map.Entry<String, List<String>>> iterator = subscriptions.entrySet().iterator();
iterator.next();
this.registry.registerSubscription(subscribeMessage("sess3", "1", "/foo"));
iterator.next();
// no ConcurrentModificationException
}
@Test // SPR-13185
void findSubscriptionsReturnsMapSafeToIterateIncludingValues() {
this.registry.registerSubscription(subscribeMessage("sess1", "1", "/foo"));
this.registry.registerSubscription(subscribeMessage("sess1", "2", "/foo"));
MultiValueMap<String, String> allSubscriptions = this.registry.findSubscriptions(createMessage("/foo"));
assertThat(allSubscriptions).hasSize(1);
Iterator<String> iteratorValues = allSubscriptions.get("sess1").iterator();
iteratorValues.next();
this.registry.unregisterSubscription(unsubscribeMessage("sess1", "2"));
iteratorValues.next();
// no ConcurrentModificationException
}
@Test // SPR-13555
void cacheLimitExceeded() {
this.registry.setCacheLimit(1);
this.registry.registerSubscription(subscribeMessage("sess1", "1", "/foo"));
this.registry.registerSubscription(subscribeMessage("sess1", "2", "/bar"));
assertThat(this.registry.findSubscriptions(createMessage("/foo"))).hasSize(1);
assertThat(this.registry.findSubscriptions(createMessage("/bar"))).hasSize(1);
this.registry.registerSubscription(subscribeMessage("sess2", "1", "/foo"));
this.registry.registerSubscription(subscribeMessage("sess2", "2", "/bar"));
assertThat(this.registry.findSubscriptions(createMessage("/foo"))).hasSize(2);
assertThat(this.registry.findSubscriptions(createMessage("/bar"))).hasSize(2);
}
private Message<?> createMessage(String destination) {
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create();
accessor.setDestination(destination);
return MessageBuilder.createMessage("", accessor.getMessageHeaders());
}
private Message<?> subscribeMessage(String sessionId, String subscriptionId, String destination) {
return subscribeMessage(sessionId, subscriptionId, destination, null);
}
private Message<?> subscribeMessage(String sessionId, String subscriptionId, String dest, String selector) {
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create(SimpMessageType.SUBSCRIBE);
accessor.setSessionId(sessionId);
accessor.setSubscriptionId(subscriptionId);
if (dest != null) {
accessor.setDestination(dest);
}
if (selector != null) {
accessor.setNativeHeader("selector", selector);
}
return MessageBuilder.createMessage("", accessor.getMessageHeaders());
}
private Message<?> unsubscribeMessage(String sessionId, String subscriptionId) {
SimpMessageHeaderAccessor accessor = SimpMessageHeaderAccessor.create(SimpMessageType.UNSUBSCRIBE);
accessor.setSessionId(sessionId);
accessor.setSubscriptionId(subscriptionId);
return MessageBuilder.createMessage("", accessor.getMessageHeaders());
}
private List<String> sort(List<String> list) {
Collections.sort(list);
return list;
}
}
|
DefaultSubscriptionRegistryTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/tree/from/MutatingTableReferenceGroupWrapper.java
|
{
"start": 543,
"end": 2817
}
|
class ____ implements TableGroup {
private final NavigablePath navigablePath;
private final ModelPartContainer modelPart;
private final NamedTableReference mutatingTableReference;
public MutatingTableReferenceGroupWrapper(
NavigablePath navigablePath,
ModelPartContainer modelPart,
NamedTableReference mutatingTableReference) {
this.navigablePath = navigablePath;
this.modelPart = modelPart;
this.mutatingTableReference = mutatingTableReference;
}
@Override
public NavigablePath getNavigablePath() {
return navigablePath;
}
@Override
public ModelPart getExpressionType() {
return getModelPart();
}
@Override
public String getGroupAlias() {
return null;
}
@Override
public ModelPartContainer getModelPart() {
return modelPart;
}
@Override
public TableReference getPrimaryTableReference() {
return mutatingTableReference;
}
@Override
public TableReference getTableReference(
NavigablePath navigablePath,
String tableExpression,
boolean resolve) {
return mutatingTableReference.getTableReference( tableExpression );
}
@Override
public void applyAffectedTableNames(Consumer<String> nameCollector) {
mutatingTableReference.applyAffectedTableNames( nameCollector);
}
@Override
public String getSourceAlias() {
return null;
}
@Override
public List<TableGroupJoin> getTableGroupJoins() {
return Collections.emptyList();
}
@Override
public List<TableGroupJoin> getNestedTableGroupJoins() {
return Collections.emptyList();
}
@Override
public boolean canUseInnerJoins() {
return false;
}
@Override
public void addTableGroupJoin(TableGroupJoin join) {
throw new UnsupportedOperationException();
}
@Override
public void prependTableGroupJoin(NavigablePath navigablePath, TableGroupJoin join) {
throw new UnsupportedOperationException();
}
@Override
public void addNestedTableGroupJoin(TableGroupJoin join) {
throw new UnsupportedOperationException();
}
@Override
public void visitTableGroupJoins(Consumer<TableGroupJoin> consumer) {
}
@Override
public void visitNestedTableGroupJoins(Consumer<TableGroupJoin> consumer) {
}
@Override
public List<TableReferenceJoin> getTableReferenceJoins() {
return Collections.emptyList();
}
}
|
MutatingTableReferenceGroupWrapper
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/ArrayListSerializerUpgradeTest.java
|
{
"start": 3234,
"end": 4080
}
|
class ____
implements TypeSerializerUpgradeTestBase.UpgradeVerifier<ArrayList<String>> {
@Override
public TypeSerializer<ArrayList<String>> createUpgradedSerializer() {
return new ArrayListSerializer<>(StringSerializer.INSTANCE);
}
@Override
public Condition<ArrayList<String>> testDataCondition() {
ArrayList<String> data = new ArrayList<>(2);
data.add("Apache");
data.add("Flink");
return new Condition<>(data::equals, "value is equal to " + data);
}
@Override
public Condition<TypeSerializerSchemaCompatibility<ArrayList<String>>>
schemaCompatibilityCondition(FlinkVersion version) {
return TypeSerializerConditions.isCompatibleAsIs();
}
}
}
|
ArrayListSerializerVerifier
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_631/Issue631Test.java
|
{
"start": 632,
"end": 1391
}
|
class ____ {
@ProcessorTest
@IssueKey("631")
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousSourceTargetMapper.class,
kind = Kind.ERROR,
line = 22,
message = "Can't generate mapping method for a generic type variable target."),
@Diagnostic(type = ErroneousSourceTargetMapper.class,
kind = Kind.ERROR,
line = 24,
message = "Can't generate mapping method for a generic type variable source.")
}
)
@WithClasses({ErroneousSourceTargetMapper.class, Base1.class, Base2.class})
public void showsCantMapPropertyError() {
}
}
|
Issue631Test
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/util/StateHandleStoreUtils.java
|
{
"start": 1321,
"end": 3120
}
|
class ____ {
/**
* Serializes the passed {@link StateObject} and discards the state in case of failure.
*
* @param stateObject the {@code StateObject} that shall be serialized.
* @return The serialized version of the passed {@code StateObject}.
* @throws Exception if an error occurred during the serialization. The corresponding {@code
* StateObject} will be discarded in that case.
*/
public static byte[] serializeOrDiscard(StateObject stateObject) throws Exception {
try {
return InstantiationUtil.serializeObject(stateObject);
} catch (Exception e) {
try {
stateObject.discardState();
} catch (Exception discardException) {
e.addSuppressed(discardException);
}
ExceptionUtils.rethrowException(e);
}
// will never happen but is added to please the compiler
return new byte[0];
}
/**
* Deserializes the passed data into a {@link RetrievableStateHandle}.
*
* @param data The data that shall be deserialized.
* @param <T> The type of data handled by the deserialized {@code RetrievableStateHandle}.
* @return The {@code RetrievableStateHandle} instance.
* @throws IOException Any of the usual Input/Output related exceptions.
* @throws ClassNotFoundException If the data couldn't be deserialized into a {@code
* RetrievableStateHandle} referring to the expected type {@code <T>}.
*/
public static <T extends Serializable> T deserialize(byte[] data)
throws IOException, ClassNotFoundException {
return InstantiationUtil.deserializeObject(
data, Thread.currentThread().getContextClassLoader());
}
}
|
StateHandleStoreUtils
|
java
|
junit-team__junit5
|
junit-platform-engine/src/main/java/org/junit/platform/engine/discovery/NestedMethodSelector.java
|
{
"start": 2538,
"end": 5034
}
|
class ____ implements DiscoverySelector {
private final NestedClassSelector nestedClassSelector;
private final MethodSelector methodSelector;
NestedMethodSelector(@Nullable ClassLoader classLoader, List<String> enclosingClassNames, String nestedClassName,
String methodName, String parameterTypeNames) {
this.nestedClassSelector = new NestedClassSelector(classLoader, enclosingClassNames, nestedClassName);
this.methodSelector = new MethodSelector(classLoader, nestedClassName, methodName, parameterTypeNames);
}
/**
* @since 1.10
*/
NestedMethodSelector(@Nullable ClassLoader classLoader, List<String> enclosingClassNames, String nestedClassName,
String methodName, Class<?>... parameterTypes) {
this.nestedClassSelector = new NestedClassSelector(classLoader, enclosingClassNames, nestedClassName);
this.methodSelector = new MethodSelector(classLoader, nestedClassName, methodName, parameterTypes);
}
NestedMethodSelector(List<Class<?>> enclosingClasses, Class<?> nestedClass, String methodName,
String parameterTypeNames) {
this.nestedClassSelector = new NestedClassSelector(enclosingClasses, nestedClass);
this.methodSelector = new MethodSelector(nestedClass, methodName, parameterTypeNames);
}
/**
* @since 1.10
*/
NestedMethodSelector(List<Class<?>> enclosingClasses, Class<?> nestedClass, String methodName,
Class<?>... parameterTypes) {
this.nestedClassSelector = new NestedClassSelector(enclosingClasses, nestedClass);
this.methodSelector = new MethodSelector(nestedClass, methodName, parameterTypes);
}
NestedMethodSelector(List<Class<?>> enclosingClasses, Class<?> nestedClass, Method method) {
this.nestedClassSelector = new NestedClassSelector(enclosingClasses, nestedClass);
this.methodSelector = new MethodSelector(nestedClass, method);
}
/**
* Get the {@link ClassLoader} used to load the nested class.
*
* @since 1.10
*/
@API(status = MAINTAINED, since = "1.13.3")
public @Nullable ClassLoader getClassLoader() {
return this.nestedClassSelector.getClassLoader();
}
/**
* Get the names of the classes enclosing the nested class
* containing the selected method.
*/
public List<String> getEnclosingClassNames() {
return this.nestedClassSelector.getEnclosingClassNames();
}
/**
* Get the list of {@link Class} enclosing the nested {@link Class}
* containing the selected {@link Method}.
*
* <p>If the {@link Class} were not provided, but only the name of the
* nested
|
NestedMethodSelector
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-tracing-opentelemetry/src/test/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/CompositeTextMapPropagatorTests.java
|
{
"start": 5391,
"end": 6246
}
|
class ____ implements TextMapPropagator {
private final String field;
private final ContextKeyRegistry contextKeyRegistry;
private DummyTextMapPropagator(String field, ContextKeyRegistry contextKeyRegistry) {
this.field = field;
this.contextKeyRegistry = contextKeyRegistry;
}
@Override
public Collection<String> fields() {
return List.of(this.field);
}
@Override
public <C> void inject(Context context, @Nullable C carrier, TextMapSetter<C> setter) {
setter.set(carrier, this.field, this.field + "-value");
}
@Override
public <C> Context extract(Context context, @Nullable C carrier, TextMapGetter<C> getter) {
String value = getter.get(carrier, this.field);
if (value != null) {
return context.with(this.contextKeyRegistry.get(this.field), value);
}
return context;
}
}
}
|
DummyTextMapPropagator
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ScopingValidationTest.java
|
{
"start": 25524,
"end": 25905
}
|
interface ____ {",
" SimpleType type();",
"}");
Source mediumLifetime1 =
CompilerTests.javaSource(
"test.ComponentMedium1",
"package test;",
"",
"import dagger.Component;",
"",
"@ScopeB",
"@Component(dependencies = ComponentLong.class)",
"
|
ComponentLong
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java
|
{
"start": 2591,
"end": 3663
}
|
enum ____ { START(0),
SET_JOB_CONF(1),
SET_INPUT_TYPES(2),
RUN_MAP(3),
MAP_ITEM(4),
RUN_REDUCE(5),
REDUCE_KEY(6),
REDUCE_VALUE(7),
CLOSE(8),
ABORT(9),
AUTHENTICATION_REQ(10),
OUTPUT(50),
PARTITIONED_OUTPUT(51),
STATUS(52),
PROGRESS(53),
DONE(54),
REGISTER_COUNTER(55),
INCREMENT_COUNTER(56),
AUTHENTICATION_RESP(57);
final int code;
MessageType(int code) {
this.code = code;
}
}
private static
|
MessageType
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/MessagesImpl.java
|
{
"start": 392,
"end": 2571
}
|
class ____ implements Messages {
private final SharedErrors errors;
private final Logger log;
MessagesImpl(SharedErrors errors, org.jboss.jandex.ClassInfo extensionClass) {
this.errors = errors;
this.log = Logger.getLogger(extensionClass.name().toString());
}
@Override
public void info(String message) {
log.info(message);
}
@Override
public void info(String message, AnnotationTarget relatedTo) {
log.info(message + " at " + relatedTo);
}
@Override
public void info(String message, BeanInfo relatedTo) {
log.info(message + " at " + relatedTo);
}
@Override
public void info(String message, ObserverInfo relatedTo) {
log.info(message + " at " + relatedTo);
}
@Override
public void warn(String message) {
log.warn(message);
}
@Override
public void warn(String message, AnnotationTarget relatedTo) {
log.warn(message + " at " + relatedTo);
}
@Override
public void warn(String message, BeanInfo relatedTo) {
log.warn(message + " at " + relatedTo);
}
@Override
public void warn(String message, ObserverInfo relatedTo) {
log.warn(message + " at " + relatedTo);
}
@Override
public void error(String message) {
log.error(message);
errors.add(new DeploymentException(message));
}
@Override
public void error(String message, AnnotationTarget relatedTo) {
log.error(message + " at " + relatedTo);
errors.add(new DeploymentException(message + " at " + relatedTo));
}
@Override
public void error(String message, BeanInfo relatedTo) {
log.error(message + " at " + relatedTo);
errors.add(new DeploymentException(message + " at " + relatedTo));
}
@Override
public void error(String message, ObserverInfo relatedTo) {
log.error(message + " at " + relatedTo);
errors.add(new DeploymentException(message + " at " + relatedTo));
}
@Override
public void error(Exception exception) {
log.error(exception.getMessage());
errors.add(exception);
}
}
|
MessagesImpl
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/spring/bind/JmsMessageBindIT.java
|
{
"start": 1537,
"end": 2863
}
|
class ____ extends AbstractSpringJMSITSupport {
@Test
public void testSendAMessageToBean() throws Exception {
MockEndpoint endpoint = getMockEndpoint("mock:result");
endpoint.expectedBodiesReceived("Completed");
Map<String, Object> headers = new HashMap<>();
headers.put("foo", "bar");
// this header should not be sent as its value cannot be serialized
headers.put("binding", new JmsBinding());
template.sendBodyAndHeaders("activemq:Test.BindingQueue", "SomeBody", headers);
// lets wait for the method to be invoked
MockEndpoint.assertIsSatisfied(context);
// now lets test that the bean is correct
MyBean bean = getMandatoryBean(MyBean.class, "myBean");
assertEquals("SomeBody", bean.getBody(), "body");
Map<?, ?> beanHeaders = bean.getHeaders();
assertNotNull(beanHeaders, "No headers!");
assertEquals("bar", beanHeaders.get("foo"), "foo header");
assertNull(beanHeaders.get("binding"), "Should get a null value");
}
@Override
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/component/jms/integration/spring/bind/JmsMessageBindTest.xml");
}
}
|
JmsMessageBindIT
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/InvocationTargetExceptionTest.java
|
{
"start": 698,
"end": 1463
}
|
class ____ {
@Test
public void testProxiedInvocationException(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (s) -> {
Bean bean = new Bean();
bean.setSomeString( "my-bean" );
s.persist( bean );
} );
factoryScope.inTransaction( (s) -> {
Bean bean = s.getReference( Bean.class, "my-bean" );
assertThat( Hibernate.isInitialized( bean ) ).isFalse();
try {
bean.throwException();
fail( "exception not thrown" );
}
catch ( ParseException e ) {
// expected behavior
}
catch ( Throwable t ) {
fail( "unexpected exception type : " + t );
}
} );
}
@AfterEach
public void dropTestData(SessionFactoryScope factoryScope) throws Exception {
factoryScope.dropData();
}
}
|
InvocationTargetExceptionTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/highavailability/TestingClientHAServices.java
|
{
"start": 1095,
"end": 1804
}
|
class ____ implements ClientHighAvailabilityServices {
private final LeaderRetrievalService clusterRestEndpointLeaderRetriever;
private TestingClientHAServices(LeaderRetrievalService clusterRestEndpointLeaderRetriever) {
this.clusterRestEndpointLeaderRetriever = clusterRestEndpointLeaderRetriever;
}
@Override
public LeaderRetrievalService getClusterRestEndpointLeaderRetriever() {
return clusterRestEndpointLeaderRetriever;
}
@Override
public void close() throws Exception {}
public static TestingClientHAServices createClientHAServices() {
return new TestingClientHAServices(new SettableLeaderRetrievalService());
}
}
|
TestingClientHAServices
|
java
|
google__guice
|
core/test/com/google/inject/ScopesTest.java
|
{
"start": 2616,
"end": 7218
}
|
class ____ {
static final long DEADLOCK_TIMEOUT_SECONDS = 1;
private final AbstractModule singletonsModule =
new AbstractModule() {
@Override
protected void configure() {
bind(BoundAsSingleton.class).in(Scopes.SINGLETON);
bind(AnnotatedSingleton.class);
bind(EagerSingleton.class).asEagerSingleton();
bind(LinkedSingleton.class).to(RealLinkedSingleton.class);
bind(DependsOnJustInTimeSingleton.class);
bind(NotASingleton.class);
bind(ImplementedBySingleton.class).in(Scopes.SINGLETON);
bind(ProvidedBySingleton.class).in(Scopes.SINGLETON);
bind(ProvidedByAnnotatedSingleton.class);
}
};
@Before
public void setUp() throws Exception {
AnnotatedSingleton.nextInstanceId = 0;
BoundAsSingleton.nextInstanceId = 0;
EagerSingleton.nextInstanceId = 0;
RealLinkedSingleton.nextInstanceId = 0;
JustInTimeSingleton.nextInstanceId = 0;
NotASingleton.nextInstanceId = 0;
Implementation.nextInstanceId = 0;
ProvidedBySingleton.nextInstanceId = 0;
ThrowingSingleton.nextInstanceId = 0;
ProvidedByAnnotatedSingleton.nextInstanceId = 0;
}
@Test
public void testSingletons() {
Injector injector = Guice.createInjector(singletonsModule);
assertSame(
injector.getInstance(BoundAsSingleton.class), injector.getInstance(BoundAsSingleton.class));
assertSame(
injector.getInstance(AnnotatedSingleton.class),
injector.getInstance(AnnotatedSingleton.class));
assertSame(
injector.getInstance(EagerSingleton.class), injector.getInstance(EagerSingleton.class));
assertSame(
injector.getInstance(LinkedSingleton.class), injector.getInstance(LinkedSingleton.class));
assertSame(
injector.getInstance(JustInTimeSingleton.class),
injector.getInstance(JustInTimeSingleton.class));
assertNotSame(
injector.getInstance(NotASingleton.class), injector.getInstance(NotASingleton.class));
assertSame(
injector.getInstance(ImplementedBySingleton.class),
injector.getInstance(ImplementedBySingleton.class));
assertSame(
injector.getInstance(ProvidedBySingleton.class),
injector.getInstance(ProvidedBySingleton.class));
assertSame(
injector.getInstance(ProvidedByAnnotatedSingleton.class),
injector.getInstance(ProvidedByAnnotatedSingleton.class));
}
@Test
public void testJustInTimeAnnotatedSingleton() {
Injector injector = Guice.createInjector();
assertSame(
injector.getInstance(AnnotatedSingleton.class),
injector.getInstance(AnnotatedSingleton.class));
assertSame(
injector.getInstance(ProvidedByAnnotatedSingleton.class),
injector.getInstance(ProvidedByAnnotatedSingleton.class));
}
@Test
public void testSingletonIsPerInjector() {
assertNotSame(
Guice.createInjector().getInstance(AnnotatedSingleton.class),
Guice.createInjector().getInstance(AnnotatedSingleton.class));
assertNotSame(
Guice.createInjector().getInstance(ProvidedByAnnotatedSingleton.class),
Guice.createInjector().getInstance(ProvidedByAnnotatedSingleton.class));
}
@Test
public void testOverriddingAnnotation() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(AnnotatedSingleton.class).in(Scopes.NO_SCOPE);
bind(ProvidedByAnnotatedSingleton.class).in(Scopes.NO_SCOPE);
}
});
assertNotSame(
injector.getInstance(AnnotatedSingleton.class),
injector.getInstance(AnnotatedSingleton.class));
assertNotSame(
injector.getInstance(ProvidedByAnnotatedSingleton.class),
injector.getInstance(ProvidedByAnnotatedSingleton.class));
}
@Test
public void testScopingAnnotationsOnAbstractTypeViaBind() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(A.class).to(AImpl.class);
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"ScopesTest$A is annotated with Singleton, but scope annotations are not supported for"
+ " abstract types.",
"at ScopesTest$A.class");
}
}
@SuppressWarnings("InjectScopeAnnotationOnInterfaceOrAbstractClass") // for testing
@Singleton
|
ScopesTest
|
java
|
elastic__elasticsearch
|
modules/data-streams/src/main/java/org/elasticsearch/datastreams/options/rest/RestPutDataStreamOptionsAction.java
|
{
"start": 1407,
"end": 2838
}
|
class ____ extends BaseRestHandler {
private static final Set<String> CAPABILITIES = Set.of(RestGetDataStreamsAction.FAILURES_LIFECYCLE_API_CAPABILITY);
@Override
public String getName() {
return "put_data_stream_options_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(PUT, "/_data_stream/{name}/_options"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
try (XContentParser parser = request.contentParser()) {
PutDataStreamOptionsAction.Request putOptionsRequest = PutDataStreamOptionsAction.Request.parseRequest(
parser,
(failureStore) -> new PutDataStreamOptionsAction.Request(
getMasterNodeTimeout(request),
getAckTimeout(request),
Strings.splitStringByCommaToArray(request.param("name")),
failureStore
)
);
putOptionsRequest.indicesOptions(IndicesOptions.fromRequest(request, putOptionsRequest.indicesOptions()));
return channel -> client.execute(PutDataStreamOptionsAction.INSTANCE, putOptionsRequest, new RestToXContentListener<>(channel));
}
}
@Override
public Set<String> supportedCapabilities() {
return CAPABILITIES;
}
}
|
RestPutDataStreamOptionsAction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cascade/circle/Route.java
|
{
"start": 226,
"end": 1962
}
|
class ____ {
// @Id
// @SequenceGenerator(name="ROUTE_SEQ", sequenceName="ROUTE_SEQ", initialValue=1, allocationSize=1)
// @GeneratedValue(strategy=GenerationType.SEQUENCE, generator="ROUTE_SEQ")
private Long routeID;
private long version;
/** A List of nodes contained in this route. */
// @OneToMany(targetEntity=Node.class, fetch=FetchType.EAGER, cascade=CascadeType.ALL, mappedBy="route")
private Set nodes = new HashSet();
private Set vehicles = new HashSet();
private String name;
// @Transient
private String transientField = null;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set getNodes() {
return nodes;
}
protected void setNodes(Set nodes) {
this.nodes = nodes;
}
protected Set getVehicles() {
return vehicles;
}
protected void setVehicles(Set vehicles) {
this.vehicles = vehicles;
}
protected void setRouteID(Long routeID) {
this.routeID = routeID;
}
public Long getRouteID() {
return routeID;
}
public long getVersion() {
return version;
}
protected void setVersion(long version) {
this.version = version;
}
public String toString()
{
StringBuilder buffer = new StringBuilder();
buffer.append("Route name: " + name + " id: " + routeID + " transientField: " + transientField + "\n");
for (Iterator it = nodes.iterator(); it.hasNext();) {
buffer.append("Node: " + it.next() );
}
for (Iterator it = vehicles.iterator(); it.hasNext();) {
buffer.append("Vehicle: " + it.next() );
}
return buffer.toString();
}
public String getTransientField() {
return transientField;
}
public void setTransientField(String transientField) {
this.transientField = transientField;
}
}
|
Route
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/any/xml/PropertyValue.java
|
{
"start": 212,
"end": 267
}
|
interface ____ {
public String asString();
}
|
PropertyValue
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlShowSlowStatement.java
|
{
"start": 882,
"end": 2095
}
|
class ____ extends MySqlStatementImpl implements MySqlShowStatement {
private boolean physical;
private SQLOrderBy orderBy;
private SQLExpr where;
private SQLLimit limit;
private boolean full;
public SQLLimit getLimit() {
return limit;
}
public void setLimit(SQLLimit limit) {
this.limit = limit;
}
public SQLOrderBy getOrderBy() {
return orderBy;
}
public void setOrderBy(SQLOrderBy orderBy) {
this.orderBy = orderBy;
}
public SQLExpr getWhere() {
return where;
}
public void setWhere(SQLExpr where) {
this.where = where;
}
public boolean isPhysical() {
return physical;
}
public void setPhysical(boolean physical) {
this.physical = physical;
}
public boolean isFull() {
return full;
}
public void setFull(boolean full) {
this.full = full;
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, where);
acceptChild(visitor, orderBy);
acceptChild(visitor, limit);
}
visitor.endVisit(this);
}
}
|
MySqlShowSlowStatement
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/SequenceFileRecordReader.java
|
{
"start": 1425,
"end": 3279
}
|
class ____<K, V> extends RecordReader<K, V> {
private SequenceFile.Reader in;
private long start;
private long end;
private boolean more = true;
private K key = null;
private V value = null;
protected Configuration conf;
@Override
public void initialize(InputSplit split,
TaskAttemptContext context
) throws IOException, InterruptedException {
FileSplit fileSplit = (FileSplit) split;
conf = context.getConfiguration();
Path path = fileSplit.getPath();
FileSystem fs = path.getFileSystem(conf);
this.in = new SequenceFile.Reader(fs, path, conf);
this.end = fileSplit.getStart() + fileSplit.getLength();
if (fileSplit.getStart() > in.getPosition()) {
in.sync(fileSplit.getStart()); // sync to start
}
this.start = in.getPosition();
more = start < end;
}
@Override
@SuppressWarnings("unchecked")
public boolean nextKeyValue() throws IOException, InterruptedException {
if (!more) {
return false;
}
long pos = in.getPosition();
key = (K) in.next(key);
if (key == null || (pos >= end && in.syncSeen())) {
more = false;
key = null;
value = null;
} else {
value = (V) in.getCurrentValue(value);
}
return more;
}
@Override
public K getCurrentKey() {
return key;
}
@Override
public V getCurrentValue() {
return value;
}
/**
* Return the progress within the input split
* @return 0.0 to 1.0 of the input byte range
*/
public float getProgress() throws IOException {
if (end == start) {
return 0.0f;
} else {
return Math.min(1.0f, (in.getPosition() - start) / (float)(end - start));
}
}
public synchronized void close() throws IOException { in.close(); }
}
|
SequenceFileRecordReader
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/short2darray/Short2DArrayAssert_hasDimensions_Test.java
|
{
"start": 924,
"end": 1272
}
|
class ____ extends Short2DArrayAssertBaseTest {
@Override
protected Short2DArrayAssert invoke_api_method() {
return assertions.hasDimensions(1, 2);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasDimensions(getInfo(assertions), getActual(assertions), 1, 2);
}
}
|
Short2DArrayAssert_hasDimensions_Test
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/transformer/TransformerRouteTest.java
|
{
"start": 14460,
"end": 14793
}
|
class ____ extends Transformer {
@Override
public void transform(Message message, DataType from, DataType to) {
assertEquals("name=XOrder", message.getBody());
LOG.info("Bean: Other -> XOrder");
message.setBody(new XOrder());
}
}
public static
|
OtherToXOrderTransformer
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/testutils/MiniClusterResourceConfiguration.java
|
{
"start": 1419,
"end": 3131
}
|
class ____ {
private final UnmodifiableConfiguration configuration;
private final int numberTaskManagers;
private final int numberSlotsPerTaskManager;
private final Duration shutdownTimeout;
private final RpcServiceSharing rpcServiceSharing;
private final MiniCluster.HaServices haServices;
protected MiniClusterResourceConfiguration(
Configuration configuration,
int numberTaskManagers,
int numberSlotsPerTaskManager,
Duration shutdownTimeout,
RpcServiceSharing rpcServiceSharing,
MiniCluster.HaServices haServices) {
this.configuration =
new UnmodifiableConfiguration(Preconditions.checkNotNull(configuration));
this.numberTaskManagers = numberTaskManagers;
this.numberSlotsPerTaskManager = numberSlotsPerTaskManager;
this.shutdownTimeout = Preconditions.checkNotNull(shutdownTimeout);
this.rpcServiceSharing = Preconditions.checkNotNull(rpcServiceSharing);
this.haServices = haServices;
}
public Configuration getConfiguration() {
return configuration;
}
public int getNumberTaskManagers() {
return numberTaskManagers;
}
public int getNumberSlotsPerTaskManager() {
return numberSlotsPerTaskManager;
}
public Duration getShutdownTimeout() {
return shutdownTimeout;
}
public RpcServiceSharing getRpcServiceSharing() {
return rpcServiceSharing;
}
public MiniCluster.HaServices getHaServices() {
return haServices;
}
/** Builder for {@link MiniClusterResourceConfiguration}. */
public static final
|
MiniClusterResourceConfiguration
|
java
|
apache__rocketmq
|
client/src/test/java/org/apache/rocketmq/client/impl/admin/MqClientAdminImplTest.java
|
{
"start": 4634,
"end": 30733
}
|
class ____ {
@Mock
private RemotingClient remotingClient;
@Mock
private RemotingCommand response;
private MqClientAdminImpl mqClientAdminImpl;
private final String defaultTopic = "defaultTopic";
private final String defaultBrokerAddr = "127.0.0.1:10911";
private final long defaultTimeout = 3000L;
@Before
public void init() throws RemotingException, InterruptedException, MQClientException {
mqClientAdminImpl = new MqClientAdminImpl(remotingClient);
when(remotingClient.invoke(any(String.class), any(RemotingCommand.class), any(Long.class))).thenReturn(CompletableFuture.completedFuture(response));
}
@Test
public void assertQueryMessageWithSuccess() throws Exception {
setResponseSuccess(getMessageResult());
QueryMessageRequestHeader requestHeader = mock(QueryMessageRequestHeader.class);
when(requestHeader.getTopic()).thenReturn(defaultTopic);
when(requestHeader.getKey()).thenReturn("keys");
CompletableFuture<List<MessageExt>> actual = mqClientAdminImpl.queryMessage(defaultBrokerAddr, false, false, requestHeader, defaultTimeout);
List<MessageExt> messageExtList = actual.get();
assertNotNull(messageExtList);
assertEquals(1, messageExtList.size());
}
@Test
public void assertQueryMessageWithNotFound() throws Exception {
when(response.getCode()).thenReturn(ResponseCode.QUERY_NOT_FOUND);
QueryMessageRequestHeader requestHeader = mock(QueryMessageRequestHeader.class);
CompletableFuture<List<MessageExt>> actual = mqClientAdminImpl.queryMessage(defaultBrokerAddr, false, false, requestHeader, defaultTimeout);
List<MessageExt> messageExtList = actual.get();
assertNotNull(messageExtList);
assertEquals(0, messageExtList.size());
}
@Test
public void assertQueryMessageWithError() {
setResponseError();
QueryMessageRequestHeader requestHeader = mock(QueryMessageRequestHeader.class);
CompletableFuture<List<MessageExt>> actual = mqClientAdminImpl.queryMessage(defaultBrokerAddr, false, false, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertGetTopicStatsInfoWithSuccess() throws Exception {
TopicStatsTable responseBody = new TopicStatsTable();
setResponseSuccess(RemotingSerializable.encode(responseBody));
GetTopicStatsInfoRequestHeader requestHeader = mock(GetTopicStatsInfoRequestHeader.class);
CompletableFuture<TopicStatsTable> actual = mqClientAdminImpl.getTopicStatsInfo(defaultBrokerAddr, requestHeader, defaultTimeout);
TopicStatsTable topicStatsTable = actual.get();
assertNotNull(topicStatsTable);
assertEquals(0, topicStatsTable.getOffsetTable().size());
}
@Test
public void assertGetTopicStatsInfoWithError() {
setResponseError();
GetTopicStatsInfoRequestHeader requestHeader = mock(GetTopicStatsInfoRequestHeader.class);
CompletableFuture<TopicStatsTable> actual = mqClientAdminImpl.getTopicStatsInfo(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertQueryConsumeTimeSpanWithSuccess() throws Exception {
QueryConsumeTimeSpanBody responseBody = new QueryConsumeTimeSpanBody();
setResponseSuccess(RemotingSerializable.encode(responseBody));
QueryConsumeTimeSpanRequestHeader requestHeader = mock(QueryConsumeTimeSpanRequestHeader.class);
CompletableFuture<List<QueueTimeSpan>> actual = mqClientAdminImpl.queryConsumeTimeSpan(defaultBrokerAddr, requestHeader, defaultTimeout);
List<QueueTimeSpan> queueTimeSpans = actual.get();
assertNotNull(queueTimeSpans);
assertEquals(0, queueTimeSpans.size());
}
@Test
public void assertQueryConsumeTimeSpanWithError() {
setResponseError();
QueryConsumeTimeSpanRequestHeader requestHeader = mock(QueryConsumeTimeSpanRequestHeader.class);
CompletableFuture<List<QueueTimeSpan>> actual = mqClientAdminImpl.queryConsumeTimeSpan(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertUpdateOrCreateTopicWithSuccess() throws Exception {
setResponseSuccess(null);
CreateTopicRequestHeader requestHeader = mock(CreateTopicRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.updateOrCreateTopic(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertUpdateOrCreateTopicWithError() {
setResponseError();
CreateTopicRequestHeader requestHeader = mock(CreateTopicRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.updateOrCreateTopic(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertUpdateOrCreateSubscriptionGroupWithSuccess() throws Exception {
setResponseSuccess(null);
SubscriptionGroupConfig config = mock(SubscriptionGroupConfig.class);
CompletableFuture<Void> actual = mqClientAdminImpl.updateOrCreateSubscriptionGroup(defaultBrokerAddr, config, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertUpdateOrCreateSubscriptionGroupWithError() {
setResponseError();
SubscriptionGroupConfig config = mock(SubscriptionGroupConfig.class);
CompletableFuture<Void> actual = mqClientAdminImpl.updateOrCreateSubscriptionGroup(defaultBrokerAddr, config, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertDeleteTopicInBrokerWithSuccess() throws Exception {
setResponseSuccess(null);
DeleteTopicRequestHeader requestHeader = mock(DeleteTopicRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInBroker(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertDeleteTopicInBrokerWithError() {
setResponseError();
DeleteTopicRequestHeader requestHeader = mock(DeleteTopicRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInBroker(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertDeleteTopicInNameserverWithSuccess() throws Exception {
setResponseSuccess(null);
DeleteTopicFromNamesrvRequestHeader requestHeader = mock(DeleteTopicFromNamesrvRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInNameserver(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertDeleteTopicInNameserverWithError() {
setResponseError();
DeleteTopicFromNamesrvRequestHeader requestHeader = mock(DeleteTopicFromNamesrvRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteTopicInNameserver(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertDeleteKvConfigWithSuccess() throws Exception {
setResponseSuccess(null);
DeleteKVConfigRequestHeader requestHeader = mock(DeleteKVConfigRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteKvConfig(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertDeleteKvConfigWithError() {
setResponseError();
DeleteKVConfigRequestHeader requestHeader = mock(DeleteKVConfigRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteKvConfig(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertDeleteSubscriptionGroupWithSuccess() throws Exception {
setResponseSuccess(null);
DeleteSubscriptionGroupRequestHeader requestHeader = mock(DeleteSubscriptionGroupRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteSubscriptionGroup(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertDeleteSubscriptionGroupWithError() {
setResponseError();
DeleteSubscriptionGroupRequestHeader requestHeader = mock(DeleteSubscriptionGroupRequestHeader.class);
CompletableFuture<Void> actual = mqClientAdminImpl.deleteSubscriptionGroup(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertInvokeBrokerToResetOffsetWithSuccess() throws Exception {
ResetOffsetBody responseBody = new ResetOffsetBody();
setResponseSuccess(RemotingSerializable.encode(responseBody));
ResetOffsetRequestHeader requestHeader = mock(ResetOffsetRequestHeader.class);
CompletableFuture<Map<MessageQueue, Long>> actual = mqClientAdminImpl.invokeBrokerToResetOffset(defaultBrokerAddr, requestHeader, defaultTimeout);
assertEquals(0, actual.get().size());
}
@Test
public void assertInvokeBrokerToResetOffsetWithError() {
setResponseError();
ResetOffsetRequestHeader requestHeader = mock(ResetOffsetRequestHeader.class);
CompletableFuture<Map<MessageQueue, Long>> actual = mqClientAdminImpl.invokeBrokerToResetOffset(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertViewMessageWithSuccess() throws Exception {
setResponseSuccess(getMessageResult());
ViewMessageRequestHeader requestHeader = mock(ViewMessageRequestHeader.class);
CompletableFuture<MessageExt> actual = mqClientAdminImpl.viewMessage(defaultBrokerAddr, requestHeader, defaultTimeout);
MessageExt result = actual.get();
assertNotNull(result);
assertEquals(defaultTopic, result.getTopic());
}
@Test
public void assertViewMessageWithError() {
setResponseError();
ViewMessageRequestHeader requestHeader = mock(ViewMessageRequestHeader.class);
CompletableFuture<MessageExt> actual = mqClientAdminImpl.viewMessage(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertGetBrokerClusterInfoWithSuccess() throws Exception {
ClusterInfo responseBody = new ClusterInfo();
setResponseSuccess(RemotingSerializable.encode(responseBody));
CompletableFuture<ClusterInfo> actual = mqClientAdminImpl.getBrokerClusterInfo(defaultBrokerAddr, defaultTimeout);
ClusterInfo result = actual.get();
assertNotNull(result);
}
@Test
public void assertGetBrokerClusterInfoWithError() {
setResponseError();
CompletableFuture<ClusterInfo> actual = mqClientAdminImpl.getBrokerClusterInfo(defaultBrokerAddr, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertGetConsumerConnectionListWithSuccess() throws Exception {
ConsumerConnection responseBody = new ConsumerConnection();
setResponseSuccess(RemotingSerializable.encode(responseBody));
GetConsumerConnectionListRequestHeader requestHeader = mock(GetConsumerConnectionListRequestHeader.class);
CompletableFuture<ConsumerConnection> actual = mqClientAdminImpl.getConsumerConnectionList(defaultBrokerAddr, requestHeader, defaultTimeout);
ConsumerConnection result = actual.get();
assertNotNull(result);
assertEquals(0, result.getConnectionSet().size());
}
@Test
public void assertGetConsumerConnectionListWithError() {
setResponseError();
GetConsumerConnectionListRequestHeader requestHeader = mock(GetConsumerConnectionListRequestHeader.class);
CompletableFuture<ConsumerConnection> actual = mqClientAdminImpl.getConsumerConnectionList(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertQueryTopicsByConsumerWithSuccess() throws Exception {
TopicList responseBody = new TopicList();
setResponseSuccess(RemotingSerializable.encode(responseBody));
QueryTopicsByConsumerRequestHeader requestHeader = mock(QueryTopicsByConsumerRequestHeader.class);
CompletableFuture<TopicList> actual = mqClientAdminImpl.queryTopicsByConsumer(defaultBrokerAddr, requestHeader, defaultTimeout);
TopicList result = actual.get();
assertNotNull(result);
assertEquals(0, result.getTopicList().size());
}
@Test
public void assertQueryTopicsByConsumerWithError() {
setResponseError();
QueryTopicsByConsumerRequestHeader requestHeader = mock(QueryTopicsByConsumerRequestHeader.class);
CompletableFuture<TopicList> actual = mqClientAdminImpl.queryTopicsByConsumer(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertQuerySubscriptionByConsumerWithSuccess() throws Exception {
SubscriptionData responseBody = new SubscriptionData();
setResponseSuccess(RemotingSerializable.encode(responseBody));
QuerySubscriptionByConsumerRequestHeader requestHeader = mock(QuerySubscriptionByConsumerRequestHeader.class);
CompletableFuture<SubscriptionData> actual = mqClientAdminImpl.querySubscriptionByConsumer(defaultBrokerAddr, requestHeader, defaultTimeout);
assertNull(actual.get());
}
@Test
public void assertQuerySubscriptionByConsumerWithError() {
setResponseError();
QuerySubscriptionByConsumerRequestHeader requestHeader = mock(QuerySubscriptionByConsumerRequestHeader.class);
CompletableFuture<SubscriptionData> actual = mqClientAdminImpl.querySubscriptionByConsumer(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertGetConsumeStatsWithSuccess() throws Exception {
ConsumeStats responseBody = new ConsumeStats();
setResponseSuccess(RemotingSerializable.encode(responseBody));
GetConsumeStatsRequestHeader requestHeader = mock(GetConsumeStatsRequestHeader.class);
CompletableFuture<ConsumeStats> actual = mqClientAdminImpl.getConsumeStats(defaultBrokerAddr, requestHeader, defaultTimeout);
ConsumeStats result = actual.get();
assertNotNull(result);
assertEquals(0, result.getOffsetTable().size());
}
@Test
public void assertGetConsumeStatsWithError() {
setResponseError();
GetConsumeStatsRequestHeader requestHeader = mock(GetConsumeStatsRequestHeader.class);
CompletableFuture<ConsumeStats> actual = mqClientAdminImpl.getConsumeStats(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertQueryTopicConsumeByWhoWithSuccess() throws Exception {
GroupList responseBody = new GroupList();
setResponseSuccess(RemotingSerializable.encode(responseBody));
QueryTopicConsumeByWhoRequestHeader requestHeader = mock(QueryTopicConsumeByWhoRequestHeader.class);
CompletableFuture<GroupList> actual = mqClientAdminImpl.queryTopicConsumeByWho(defaultBrokerAddr, requestHeader, defaultTimeout);
GroupList result = actual.get();
assertNotNull(result);
assertEquals(0, result.getGroupList().size());
}
@Test
public void assertQueryTopicConsumeByWhoWithError() {
setResponseError();
QueryTopicConsumeByWhoRequestHeader requestHeader = mock(QueryTopicConsumeByWhoRequestHeader.class);
CompletableFuture<GroupList> actual = mqClientAdminImpl.queryTopicConsumeByWho(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertGetConsumerRunningInfoWithSuccess() throws Exception {
ConsumerRunningInfo responseBody = new ConsumerRunningInfo();
setResponseSuccess(RemotingSerializable.encode(responseBody));
GetConsumerRunningInfoRequestHeader requestHeader = mock(GetConsumerRunningInfoRequestHeader.class);
CompletableFuture<ConsumerRunningInfo> actual = mqClientAdminImpl.getConsumerRunningInfo(defaultBrokerAddr, requestHeader, defaultTimeout);
ConsumerRunningInfo result = actual.get();
assertNotNull(result);
assertEquals(0, result.getProperties().size());
}
@Test
public void assertGetConsumerRunningInfoWithError() {
setResponseError();
GetConsumerRunningInfoRequestHeader requestHeader = mock(GetConsumerRunningInfoRequestHeader.class);
CompletableFuture<ConsumerRunningInfo> actual = mqClientAdminImpl.getConsumerRunningInfo(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
@Test
public void assertConsumeMessageDirectlyWithSuccess() throws Exception {
ConsumeMessageDirectlyResult responseBody = new ConsumeMessageDirectlyResult();
setResponseSuccess(RemotingSerializable.encode(responseBody));
ConsumeMessageDirectlyResultRequestHeader requestHeader = mock(ConsumeMessageDirectlyResultRequestHeader.class);
CompletableFuture<ConsumeMessageDirectlyResult> actual = mqClientAdminImpl.consumeMessageDirectly(defaultBrokerAddr, requestHeader, defaultTimeout);
ConsumeMessageDirectlyResult result = actual.get();
assertNotNull(result);
assertTrue(result.isAutoCommit());
}
@Test
public void assertConsumeMessageDirectlyWithError() {
setResponseError();
ConsumeMessageDirectlyResultRequestHeader requestHeader = mock(ConsumeMessageDirectlyResultRequestHeader.class);
CompletableFuture<ConsumeMessageDirectlyResult> actual = mqClientAdminImpl.consumeMessageDirectly(defaultBrokerAddr, requestHeader, defaultTimeout);
Throwable thrown = assertThrows(ExecutionException.class, actual::get);
assertTrue(thrown.getCause() instanceof MQClientException);
MQClientException mqException = (MQClientException) thrown.getCause();
assertEquals(ResponseCode.SYSTEM_ERROR, mqException.getResponseCode());
assertTrue(mqException.getMessage().contains("CODE: 1 DESC: null"));
}
private byte[] getMessageResult() throws Exception {
byte[] bytes = MessageDecoder.encode(createMessageExt(), false);
ByteBuffer byteBuffer = ByteBuffer.allocate(bytes.length);
byteBuffer.put(bytes);
return byteBuffer.array();
}
private MessageExt createMessageExt() {
MessageExt result = new MessageExt();
result.setBody("body".getBytes(StandardCharsets.UTF_8));
result.setTopic(defaultTopic);
result.setBrokerName("defaultBroker");
result.putUserProperty("key", "value");
result.getProperties().put(MessageConst.PROPERTY_PRODUCER_GROUP, "defaultGroup");
result.getProperties().put(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX, "TX1");
result.setKeys("keys");
SocketAddress bornHost = new InetSocketAddress("127.0.0.1", 12911);
SocketAddress storeHost = new InetSocketAddress("127.0.0.1", 10911);
result.setBornHost(bornHost);
result.setStoreHost(storeHost);
return result;
}
private void setResponseSuccess(byte[] body) {
when(response.getCode()).thenReturn(ResponseCode.SUCCESS);
when(response.getBody()).thenReturn(body);
}
private void setResponseError() {
when(response.getCode()).thenReturn(ResponseCode.SYSTEM_ERROR);
}
}
|
MqClientAdminImplTest
|
java
|
apache__flink
|
flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/api/java/typeutils/runtime/WritableSerializerUpgradeTest.java
|
{
"start": 3623,
"end": 4128
}
|
class ____
implements TypeSerializerUpgradeTestBase.PreUpgradeSetup<WritableName> {
@Override
public TypeSerializer<WritableName> createPriorSerializer() {
return new WritableSerializer<>(WritableName.class);
}
@Override
public WritableName createTestData() {
WritableName writable = new WritableName();
writable.setName("flink");
return writable;
}
}
/**
* This
|
WritableSerializerSetup
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserRequestBuilder.java
|
{
"start": 498,
"end": 946
}
|
class ____ extends ActionRequestBuilder<DeleteUserRequest, DeleteUserResponse>
implements
WriteRequestBuilder<DeleteUserRequestBuilder> {
public DeleteUserRequestBuilder(ElasticsearchClient client) {
super(client, DeleteUserAction.INSTANCE, new DeleteUserRequest());
}
public DeleteUserRequestBuilder username(String username) {
request.username(username);
return this;
}
}
|
DeleteUserRequestBuilder
|
java
|
playframework__playframework
|
documentation/manual/working/commonGuide/pekko/code/javaguide/pekko/typed/PekkoTypedDocTest.java
|
{
"start": 482,
"end": 2138
}
|
class ____ {
@Test
public void runtime_DI_support_for_OO_style_typed_actors() {
Module module = new javaguide.pekko.typed.oo.AppModule();
GuiceApplicationBuilder builder = new GuiceApplicationBuilder().bindings(module);
Injector injector = builder.configure("my.config", "foo").injector();
javaguide.pekko.typed.oo.Main main = injector.instanceOf(javaguide.pekko.typed.oo.Main.class);
assertThat(main.helloActor).isNotNull();
assertThat(main.configuredActor).isNotNull();
}
@Test
public void runtime_DI_support_for_multi_instance_OO_style_typed_actors() {
Module module = new javaguide.pekko.typed.oo.multi.AppModule();
GuiceApplicationBuilder builder = new GuiceApplicationBuilder().bindings(module);
Injector injector = builder.configure("my.config", "foo").injector();
javaguide.pekko.typed.oo.multi.Main main =
injector.instanceOf(javaguide.pekko.typed.oo.multi.Main.class);
assertThat(main.helloActor1).isNotNull();
assertThat(main.helloActor2).isNotNull();
assertThat(main.configuredActor1).isNotNull();
assertThat(main.configuredActor2).isNotNull();
}
@Test
public void compile_time_DI_without_support_works() {
// A sanity-check of what compile-time DI looks like
Environment environment = Environment.simple();
ApplicationLoader.Context context =
ApplicationLoader.create(environment, Collections.singletonMap("my.config", "foo"));
javaguide.pekko.typed.oo.Main main = new javaguide.pekko.typed.oo.AppComponents(context).main;
assertThat(main.helloActor).isNotNull();
assertThat(main.configuredActor).isNotNull();
}
}
|
PekkoTypedDocTest
|
java
|
alibaba__nacos
|
core/src/main/java/com/alibaba/nacos/core/controller/v3/NamespaceControllerV3.java
|
{
"start": 2290,
"end": 7855
}
|
class ____ {
private final NamespaceOperationService namespaceOperationService;
private final NamespacePersistService namespacePersistService;
public NamespaceControllerV3(NamespaceOperationService namespaceOperationService,
NamespacePersistService namespacePersistService) {
this.namespaceOperationService = namespaceOperationService;
this.namespacePersistService = namespacePersistService;
}
private final Pattern namespaceIdCheckPattern = Pattern.compile("^[\\w-]+");
private final Pattern namespaceNameCheckPattern = Pattern.compile("^[^@#$%^&*]+$");
private static final int NAMESPACE_ID_MAX_LENGTH = 128;
/**
* Get namespace list.
*
* @return namespace list
*/
@GetMapping("/list")
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "/namespace", action = ActionTypes.READ, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<List<Namespace>> getNamespaceList() {
return Result.success(namespaceOperationService.getNamespaceList());
}
/**
* get namespace all info by namespace id.
*
* @param namespaceId namespaceId
* @return namespace all info
*/
@GetMapping
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "namespaces", action = ActionTypes.READ, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<Namespace> getNamespace(@RequestParam("namespaceId") String namespaceId) throws NacosException {
return Result.success(namespaceOperationService.getNamespace(namespaceId));
}
/**
* create namespace.
*
* @param namespaceForm namespaceForm.
* @return whether create ok
*/
@PostMapping
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "namespaces", action = ActionTypes.WRITE, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<Boolean> createNamespace(NamespaceForm namespaceForm) throws Exception {
namespaceForm.validate();
String namespaceId = namespaceForm.getNamespaceId();
String namespaceName = namespaceForm.getNamespaceName();
String namespaceDesc = namespaceForm.getNamespaceDesc();
if (StringUtils.isBlank(namespaceId)) {
namespaceId = UUID.randomUUID().toString();
} else {
// TODO check should be parameter check filter.
namespaceId = namespaceId.trim();
if (!namespaceIdCheckPattern.matcher(namespaceId).matches()) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.ILLEGAL_NAMESPACE,
"namespaceId [" + namespaceId + "] mismatch the pattern");
}
if (namespaceId.length() > NAMESPACE_ID_MAX_LENGTH) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.ILLEGAL_NAMESPACE,
"too long namespaceId, over " + NAMESPACE_ID_MAX_LENGTH);
}
}
// contains illegal chars
if (!namespaceNameCheckPattern.matcher(namespaceName).matches()) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.ILLEGAL_NAMESPACE,
"namespaceName [" + namespaceName + "] contains illegal char");
}
return Result.success(namespaceOperationService.createNamespace(namespaceId, namespaceName, namespaceDesc));
}
/**
* update namespace.
*
* @param namespaceForm namespace params
* @return whether edit ok
*/
@PutMapping
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "namespaces", action = ActionTypes.WRITE, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<Boolean> updateNamespace(NamespaceForm namespaceForm) throws NacosException {
namespaceForm.validate();
// contains illegal chars
if (!namespaceNameCheckPattern.matcher(namespaceForm.getNamespaceName()).matches()) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.ILLEGAL_NAMESPACE,
"namespaceName [" + namespaceForm.getNamespaceName() + "] contains illegal char");
}
return Result.success(namespaceOperationService.editNamespace(namespaceForm.getNamespaceId(),
namespaceForm.getNamespaceName(), namespaceForm.getNamespaceDesc()));
}
/**
* delete namespace by id.
*
* @param namespaceId namespace ID
* @return whether delete ok
*/
@DeleteMapping
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "namespaces", action = ActionTypes.WRITE, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<Boolean> deleteNamespace(@RequestParam("namespaceId") String namespaceId) {
return Result.success(namespaceOperationService.removeNamespace(namespaceId));
}
/**
* check namespace id exist.
*
* @param namespaceId namespaceId
* @return whether exist
*/
@GetMapping("/check")
@Secured(resource = Commons.NACOS_ADMIN_CORE_CONTEXT_V3
+ "namespaces", action = ActionTypes.READ, signType = SignType.CONSOLE, apiType = ApiType.ADMIN_API)
public Result<Integer> checkNamespaceIdExist(@RequestParam("namespaceId") String namespaceId) {
return Result.success(namespacePersistService.tenantInfoCountByTenantId(namespaceId));
}
}
|
NamespaceControllerV3
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/transformer/TransformerRouteTest.java
|
{
"start": 2186,
"end": 10280
}
|
class ____ extends ContextTestSupport {
protected static final Logger LOG = LoggerFactory.getLogger(TransformerRouteTest.class);
@Test
public void testJavaTransformer() throws Exception {
MockEndpoint abcresult = getMockEndpoint("mock:abcresult");
abcresult.expectedMessageCount(1);
abcresult.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.info("Asserting String -> XOrderResponse conversion");
assertEquals(XOrderResponse.class, exchange.getIn().getBody().getClass());
}
});
MockEndpoint xyzresult = getMockEndpoint("mock:xyzresult");
xyzresult.expectedMessageCount(1);
xyzresult.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.info("Asserting String -> XOrderResponse conversion is not yet performed");
assertEquals("response", exchange.getIn().getBody());
}
});
Exchange exchange = new DefaultExchange(context, ExchangePattern.InOut);
exchange.getIn().setBody(new AOrder());
Exchange answerEx = template.send("direct:abc", exchange);
if (answerEx.getException() != null) {
throw answerEx.getException();
}
assertEquals(AOrderResponse.class, answerEx.getMessage().getBody().getClass());
assertMockEndpointsSatisfied();
}
@Test
public void testDataFormatTransformer() throws Exception {
MockEndpoint xyzresult = getMockEndpoint("mock:xyzresult");
xyzresult.expectedMessageCount(1);
xyzresult.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.info("Asserting String -> XOrderResponse conversion is not yet performed");
assertEquals("response", exchange.getIn().getBody());
}
});
Exchange exchange = new DefaultExchange(context, ExchangePattern.InOut);
((DataTypeAware) exchange.getIn()).setBody("{name:XOrder}", new DataType("json:JsonXOrder"));
Exchange answerEx = template.send("direct:dataFormat", exchange);
if (answerEx.getException() != null) {
throw answerEx.getException();
}
assertEquals("{name:XOrderResponse}", answerEx.getMessage().getBody(String.class));
assertMockEndpointsSatisfied();
}
@Test
public void testEndpointTransformer() throws Exception {
MockEndpoint xyzresult = getMockEndpoint("mock:xyzresult");
xyzresult.expectedMessageCount(1);
xyzresult.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.info("Asserting String -> XOrderResponse conversion is not yet performed");
assertEquals("response", exchange.getIn().getBody());
}
});
Exchange exchange = new DefaultExchange(context, ExchangePattern.InOut);
exchange.getIn().setBody("<XOrder/>");
Exchange answerEx = template.send("direct:endpoint", exchange);
if (answerEx.getException() != null) {
throw answerEx.getException();
}
assertEquals("<XOrderResponse/>", answerEx.getMessage().getBody(String.class));
assertMockEndpointsSatisfied();
}
@Test
public void testCustomTransformer() throws Exception {
MockEndpoint xyzresult = getMockEndpoint("mock:xyzresult");
xyzresult.expectedMessageCount(1);
xyzresult.whenAnyExchangeReceived(new Processor() {
@Override
public void process(Exchange exchange) {
LOG.info("Asserting String -> XOrderResponse conversion is not yet performed");
assertEquals("response", exchange.getIn().getBody());
}
});
Exchange exchange = new DefaultExchange(context, ExchangePattern.InOut);
exchange.getIn().setBody("name=XOrder");
Exchange answerEx = template.send("direct:custom", exchange);
if (answerEx.getException() != null) {
throw answerEx.getException();
}
assertEquals("name=XOrderResponse", answerEx.getMessage().getBody(String.class));
assertMockEndpointsSatisfied();
}
@Test
void shouldKeepDataTypeAcrossRoutes() throws Exception {
MockEndpoint customDataTypeResult = getMockEndpoint("mock:testDataType");
customDataTypeResult.expectedMessageCount(1);
Exchange answerCustomDataType = template.send("direct:testDataType",
ex -> ((DataTypeAware) ex.getIn()).setBody("my fake content", new DataType("myDataType")));
if (answerCustomDataType.getException() != null) {
throw answerCustomDataType.getException();
}
assertIsInstanceOf(MyDataType.class, answerCustomDataType.getIn().getBody());
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
context.getTypeConverterRegistry().addTypeConverters(new MyTypeConverters());
from("direct:abc").inputType(AOrder.class).outputType(AOrderResponse.class).process(new Processor() {
public void process(Exchange exchange) {
LOG.info("Asserting input -> AOrder conversion");
assertEquals(AOrder.class, exchange.getIn().getBody().getClass());
}
}).to(ExchangePattern.InOut, "direct:xyz").to("mock:abcresult");
from("direct:xyz").inputType(XOrder.class).outputType(XOrderResponse.class).process(new Processor() {
public void process(Exchange exchange) {
LOG.info("Asserting input -> XOrder conversion");
assertEquals(XOrder.class, exchange.getIn().getBody().getClass());
exchange.getIn().setBody("response");
}
}).to("mock:xyzresult");
transformer().scheme("json").withDataFormat(new MyJsonDataFormatDefinition());
from("direct:dataFormat").inputType("json:JsonXOrder").outputType("json:JsonXOrderResponse")
.to(ExchangePattern.InOut, "direct:xyz");
context.addComponent("myxml", new MyXmlComponent());
transformer().fromType("xml:XmlXOrder").toType(XOrder.class).withUri("myxml:endpoint");
transformer().fromType(XOrderResponse.class).toType("xml:XmlXOrderResponse").withUri("myxml:endpoint");
from("direct:endpoint").inputType("xml:XmlXOrder").outputType("xml:XmlXOrderResponse").to(ExchangePattern.InOut,
"direct:xyz");
transformer().fromType("other:OtherXOrder").toType(XOrder.class).withJava(OtherToXOrderTransformer.class);
transformer().fromType(XOrderResponse.class).toType("other:OtherXOrderResponse")
.withJava(XOrderResponseToOtherTransformer.class);
from("direct:custom").inputType("other:OtherXOrder").outputType("other:OtherXOrderResponse")
.to(ExchangePattern.InOut, "direct:xyz");
transformer().name("myDataType").withDataFormat(new MyDataFormatDefinition());
from("direct:testDataType").inputTypeWithValidate("myDataType")
.to("direct:testDataTypeStep2");
from("direct:testDataTypeStep2").inputType(MyDataType.class)
.to("mock:testDataType");
validator().type("myDataType").withExpression(bodyAs(String.class).contains("fake"));
transformer().withDefaults();
transformer().scan("com.apache.camel.processor.transformer.custom");
}
};
}
public static
|
TransformerRouteTest
|
java
|
apache__rocketmq
|
client/src/test/java/org/apache/rocketmq/client/impl/consumer/RebalancePushImplTest.java
|
{
"start": 2220,
"end": 12219
}
|
class ____ {
@Spy
private DefaultMQPushConsumerImpl defaultMQPushConsumer = new DefaultMQPushConsumerImpl(new DefaultMQPushConsumer("RebalancePushImplTest"), null);
@Mock
private MQClientInstance mqClientInstance;
private OffsetStore offsetStore = mock(OffsetStore.class);
private String consumerGroup = "CID_RebalancePushImplTest";
private String topic = "TopicA";
private MessageQueue mq = new MessageQueue("topic1", "broker1", 0);
private MessageQueue retryMq = new MessageQueue(MixAll.RETRY_GROUP_TOPIC_PREFIX + "group", "broker1", 0);
private DefaultMQPushConsumerImpl consumerImpl = mock(DefaultMQPushConsumerImpl.class);
private RebalancePushImpl rebalanceImpl = new RebalancePushImpl(consumerImpl);
private DefaultMQPushConsumer consumer = new DefaultMQPushConsumer();
private MQClientInstance client = mock(MQClientInstance.class);
private MQAdminImpl admin = mock(MQAdminImpl.class);
public RebalancePushImplTest() {
when(consumerImpl.getDefaultMQPushConsumer()).thenReturn(consumer);
when(consumerImpl.getOffsetStore()).thenReturn(offsetStore);
rebalanceImpl.setmQClientFactory(client);
when(client.getMQAdminImpl()).thenReturn(admin);
}
@Test
public void testMessageQueueChanged_CountThreshold() {
RebalancePushImpl rebalancePush = new RebalancePushImpl(consumerGroup, MessageModel.CLUSTERING,
new AllocateMessageQueueAveragely(), mqClientInstance, defaultMQPushConsumer);
init(rebalancePush);
// Just set pullThresholdForQueue
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdForQueue(1024);
Set<MessageQueue> allocateResultSet = new HashSet<>();
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 0));
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 1));
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdForQueue()).isEqualTo(1024);
// Set pullThresholdForTopic
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdForTopic(1024);
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdForQueue()).isEqualTo(512);
// Change message queue allocate result
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 2));
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdForQueue()).isEqualTo(341);
}
private void doRebalanceForcibly(RebalancePushImpl rebalancePush, Set<MessageQueue> allocateResultSet) {
rebalancePush.topicSubscribeInfoTable.put(topic, allocateResultSet);
rebalancePush.doRebalance(false);
rebalancePush.messageQueueChanged(topic, allocateResultSet, allocateResultSet);
}
private void init(final RebalancePushImpl rebalancePush) {
rebalancePush.getSubscriptionInner().putIfAbsent(topic, new SubscriptionData());
rebalancePush.subscriptionInner.putIfAbsent(topic, new SubscriptionData());
when(mqClientInstance.findConsumerIdList(anyString(), anyString())).thenReturn(Collections.singletonList(consumerGroup));
when(mqClientInstance.getClientId()).thenReturn(consumerGroup);
when(defaultMQPushConsumer.getOffsetStore()).thenReturn(offsetStore);
}
@Test
public void testMessageQueueChanged_SizeThreshold() {
RebalancePushImpl rebalancePush = new RebalancePushImpl(consumerGroup, MessageModel.CLUSTERING,
new AllocateMessageQueueAveragely(), mqClientInstance, defaultMQPushConsumer);
init(rebalancePush);
// Just set pullThresholdSizeForQueue
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdSizeForQueue(1024);
Set<MessageQueue> allocateResultSet = new HashSet<>();
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 0));
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 1));
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdSizeForQueue()).isEqualTo(1024);
// Set pullThresholdSizeForTopic
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdSizeForTopic(1024);
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdSizeForQueue()).isEqualTo(512);
// Change message queue allocate result
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 2));
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.getDefaultMQPushConsumer().getPullThresholdSizeForQueue()).isEqualTo(341);
}
@Test
public void testMessageQueueChanged_ConsumerRuntimeInfo() throws MQClientException {
RebalancePushImpl rebalancePush = new RebalancePushImpl(consumerGroup, MessageModel.CLUSTERING,
new AllocateMessageQueueAveragely(), mqClientInstance, defaultMQPushConsumer);
init(rebalancePush);
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdSizeForQueue(1024);
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdForQueue(1024);
Set<MessageQueue> allocateResultSet = new HashSet<>();
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 0));
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 1));
doRebalanceForcibly(rebalancePush, allocateResultSet);
defaultMQPushConsumer.setConsumeMessageService(new ConsumeMessageConcurrentlyService(defaultMQPushConsumer, null));
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForQueue")).isEqualTo("1024");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForQueue")).isEqualTo("1024");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForTopic")).isEqualTo("-1");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForTopic")).isEqualTo("-1");
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdSizeForTopic(1024);
defaultMQPushConsumer.getDefaultMQPushConsumer().setPullThresholdForTopic(1024);
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForQueue")).isEqualTo("512");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForQueue")).isEqualTo("512");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForTopic")).isEqualTo("1024");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForTopic")).isEqualTo("1024");
// Change message queue allocate result
allocateResultSet.add(new MessageQueue(topic, "BrokerA", 2));
doRebalanceForcibly(rebalancePush, allocateResultSet);
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForQueue")).isEqualTo("341");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForQueue")).isEqualTo("341");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdSizeForTopic")).isEqualTo("1024");
assertThat(defaultMQPushConsumer.consumerRunningInfo().getProperties().get("pullThresholdForTopic")).isEqualTo("1024");
}
@Test
public void testComputePullFromWhereWithException_ne_minus1() throws MQClientException {
for (ConsumeFromWhere where : new ConsumeFromWhere[]{
ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET,
ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET,
ConsumeFromWhere.CONSUME_FROM_TIMESTAMP}) {
consumer.setConsumeFromWhere(where);
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(0L);
assertEquals(0, rebalanceImpl.computePullFromWhereWithException(mq));
}
}
@Test
public void testComputePullFromWhereWithException_eq_minus1_last() throws MQClientException {
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(-1L);
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET);
when(admin.maxOffset(any(MessageQueue.class))).thenReturn(12345L);
assertEquals(12345L, rebalanceImpl.computePullFromWhereWithException(mq));
assertEquals(0L, rebalanceImpl.computePullFromWhereWithException(retryMq));
}
@Test
public void testComputePullFromWhereWithException_eq_minus1_first() throws MQClientException {
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(-1L);
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
assertEquals(0, rebalanceImpl.computePullFromWhereWithException(mq));
}
@Test
public void testComputePullFromWhereWithException_eq_minus1_timestamp() throws MQClientException {
when(offsetStore.readOffset(any(MessageQueue.class), any(ReadOffsetType.class))).thenReturn(-1L);
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_TIMESTAMP);
when(admin.searchOffset(any(MessageQueue.class), anyLong())).thenReturn(12345L);
when(admin.maxOffset(any(MessageQueue.class))).thenReturn(23456L);
assertEquals(12345L, rebalanceImpl.computePullFromWhereWithException(mq));
assertEquals(23456L, rebalanceImpl.computePullFromWhereWithException(retryMq));
}
}
|
RebalancePushImplTest
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/WorkerConfig.java
|
{
"start": 4083,
"end": 4921
}
|
class ____ to convert between Kafka Connect format and the serialized form that is written to Kafka." +
" This controls the format of the keys in messages written to or read from Kafka, and since this is" +
" independent of connectors it allows any connector to work with any serialization format." +
" Examples of common formats include JSON and Avro.";
public static final String KEY_CONVERTER_VERSION = "key.converter." + PLUGIN_VERSION_SUFFIX;
public static final String KEY_CONVERTER_VERSION_DEFAULT = null;
public static final String KEY_CONVERTER_VERSION_DOC = "Version of the key converter.";
public static final String VALUE_CONVERTER_CLASS_CONFIG = "value.converter";
public static final String VALUE_CONVERTER_CLASS_DOC =
"Converter
|
used
|
java
|
apache__camel
|
components/camel-mllp/src/main/java/org/apache/camel/component/mllp/MllpWriteException.java
|
{
"start": 940,
"end": 1668
}
|
class ____ extends MllpException {
public MllpWriteException(String message, byte[] hl7Message, boolean logPhi) {
super(message, hl7Message, logPhi);
}
public MllpWriteException(String message, byte[] hl7Message, byte[] hl7Acknowledgement, boolean logPhi) {
super(message, hl7Message, hl7Acknowledgement, logPhi);
}
public MllpWriteException(String message, byte[] hl7Message, Throwable cause, boolean logPhi) {
super(message, hl7Message, cause, logPhi);
}
public MllpWriteException(String message, byte[] hl7Message, byte[] hl7Acknowledgement, Throwable cause, boolean logPhi) {
super(message, hl7Message, hl7Acknowledgement, cause, logPhi);
}
}
|
MllpWriteException
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/tx/JmsTransactedDeadLetterChannelHandlerRollbackOnExceptionIT.java
|
{
"start": 2109,
"end": 4118
}
|
class ____ {
@Handler
public void onException(Exchange exchange, Exception exception) {
throw new RuntimeCamelException("error in errorhandler");
}
}
protected final String testingEndpoint = "activemq:test." + getClass().getName();
protected boolean isHandleNew() {
return true;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// we use DLC to handle the exception but if it throw a new exception
// then the DLC handles that too (the transaction will always commit)
errorHandler(deadLetterChannel("bean:" + BadErrorHandler.class.getName())
.deadLetterHandleNewException(isHandleNew())
.logNewException(true));
from(testingEndpoint)
.log("Incoming JMS message ${body}")
.throwException(new RuntimeCamelException("bad error"));
}
};
}
@Test
public void shouldNotLoseMessagesOnExceptionInErrorHandler() {
template.sendBody(testingEndpoint, "Hello World");
// as we handle new exception, then the exception is ignored
// and causes the transaction to commit, so there is no message in the ActiveMQ DLQ queue
Object dlqBody = consumer.receiveBody("activemq:DLQ", 2000);
assertNull(dlqBody, "Should not rollback the transaction");
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
// no redeliveries
ConnectionFactory connectionFactory = ConnectionFactoryHelper.createConnectionFactory(service, 0);
JmsComponent component = jmsComponentTransacted(connectionFactory);
camelContext.addComponent("activemq", component);
return camelContext;
}
}
|
BadErrorHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/internal/util/ReflectHelper.java
|
{
"start": 5983,
"end": 6625
}
|
class ____.
*
* @throws ClassNotFoundException From {@link Class#forName(String)}.
*
* @deprecated Depending on context, either {@link ClassLoaderService}
* or {@link org.hibernate.boot.spi.ClassLoaderAccess} should be preferred
*/
@Deprecated
public static Class<?> classForName(String name) throws ClassNotFoundException {
try {
final var classLoader = currentThread().getContextClassLoader();
if ( classLoader != null ) {
return classLoader.loadClass(name);
}
}
catch ( Throwable ignore ) {
}
return Class.forName( name );
}
/**
* Is this member publicly accessible.
*
* @param clazz The
|
reference
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/circular/CircularDependenciesChainTest.java
|
{
"start": 1536,
"end": 1963
}
|
class ____ {
private Comparator<String> comparator;
@ApplicationScoped
@Produces
Comparator<String> producedComparator = Comparator.naturalOrder();
@Inject
public void setComparator(Comparator<String> comparator) {
this.comparator = comparator;
}
public Comparator<String> getComparator() {
return comparator;
}
}
}
|
Producing
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/CompositeIdRowValueTest.java
|
{
"start": 571,
"end": 1031
}
|
class ____ {
@Test
public void testTupleAfterSubQuery(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
Query q = entityManager.createQuery("SELECT e FROM EntityWithCompositeId e "
+ "WHERE EXISTS (SELECT 1 FROM EntityWithCompositeId) "
+ "AND e.id = :id");
q.setParameter("id", new CompositeId(1, 2));
assertThat(q.getResultList().size(), is(0));
}
);
}
}
|
CompositeIdRowValueTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/StaticQualifiedUsingExpressionTest.java
|
{
"start": 11679,
"end": 11889
}
|
interface ____ {
default void f() {}
}
}
""")
.expectUnchanged()
.addInputLines(
"in/Test.java",
"""
|
Builder
|
java
|
apache__flink
|
flink-python/src/main/java/org/apache/flink/client/cli/PythonProgramOptions.java
|
{
"start": 1652,
"end": 2111
}
|
class ____ extends ProgramOptions {
private final Configuration pythonConfiguration;
private final boolean isPythonEntryPoint;
public PythonProgramOptions(CommandLine line) throws CliArgsException {
super(line);
isPythonEntryPoint = isPythonEntryPoint(line);
pythonConfiguration = PythonDependencyUtils.parsePythonDependencyConfiguration(line);
// If the job is Python Shell job, the entry point
|
PythonProgramOptions
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaBinaryHelper.java
|
{
"start": 1379,
"end": 2314
}
|
class ____ {
/**
* @param pathOfGpuBinary The path of the binary
* @return the GpuDeviceInformation parsed from the nvidia-smi output
* @throws IOException if the binary output is not readable
* @throws YarnException if the pathOfGpuBinary is null,
* or the output parse failed
*/
synchronized GpuDeviceInformation getGpuDeviceInformation(
String pathOfGpuBinary, long discoveryTimeoutMs)
throws IOException, YarnException {
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
if (pathOfGpuBinary == null) {
throw new YarnException(
"Failed to find GPU discovery executable, please double check "
+ YarnConfiguration.NM_GPU_PATH_TO_EXEC + " setting.");
}
String output = Shell.execCommand(new HashMap<>(),
new String[]{pathOfGpuBinary, "-x", "-q"}, discoveryTimeoutMs);
return parser.parseXml(output);
}
}
|
NvidiaBinaryHelper
|
java
|
elastic__elasticsearch
|
x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/fst/Util.java
|
{
"start": 3517,
"end": 4822
}
|
class ____<T> {
/** Holds the last arc appended to this path */
public Arc<T> arc;
/** Holds cost plus any usage-specific output: */
public T output;
public final IntsRefBuilder input;
public final float boost;
public final CharSequence context;
// Custom int payload for consumers; the NRT suggester uses this to record if this path has
// already enumerated a surface form
public int payload;
FSTPath(T output, Arc<T> arc, IntsRefBuilder input, float boost, CharSequence context, int payload) {
this.arc = new Arc<T>().copyFrom(arc);
this.output = output;
this.input = input;
this.boost = boost;
this.context = context;
this.payload = payload;
}
FSTPath<T> newPath(T output, IntsRefBuilder input) {
return new FSTPath<>(output, this.arc, input, this.boost, this.context, this.payload);
}
@Override
public String toString() {
return "input=" + input.get() + " output=" + output + " context=" + context + " boost=" + boost + " payload=" + payload;
}
}
/** Compares first by the provided comparator, and then tie breaks by path.input. */
private static
|
FSTPath
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/onexception/OnExceptionContinuePredicateTest.java
|
{
"start": 1158,
"end": 2721
}
|
class ____ extends OnExceptionContinueTest {
private final AtomicInteger predicateInvoked = new AtomicInteger();
private final AtomicInteger processorInvoked = new AtomicInteger();
@Override
@Test
public void testContinued() throws Exception {
getMockEndpoint("mock:me").expectedMessageCount(1);
super.testContinued();
assertEquals(1, predicateInvoked.get());
assertEquals(1, processorInvoked.get());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// use a predicate instance
Predicate predicate = new Predicate() {
@Override
public boolean matches(Exchange exchange) {
predicateInvoked.incrementAndGet();
return true;
}
};
// tell Camel to handle and continue when this exception is
// thrown
onException(IllegalArgumentException.class).continued(predicate).process(new Processor() {
@Override
public void process(Exchange exchange) {
processorInvoked.incrementAndGet();
}
}).to("mock:me");
from("direct:start").to("mock:start").throwException(new IllegalArgumentException("Forced")).to("mock:result");
}
};
}
}
|
OnExceptionContinuePredicateTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java
|
{
"start": 2607,
"end": 34012
}
|
class ____ extends FieldTypeTestCase {
private static final long nowInMillis = 0;
public void testIsFieldWithinRangeEmptyReader() throws IOException {
QueryRewriteContext context = new QueryRewriteContext(parserConfig(), null, () -> nowInMillis);
IndexReader reader = new MultiReader();
DateFieldType ft = new DateFieldType("my_date");
assertEquals(
Relation.DISJOINT,
ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context)
);
}
public void testIsFieldWithinRangeOnlyDocValues() throws IOException {
QueryRewriteContext context = new QueryRewriteContext(parserConfig(), null, () -> nowInMillis);
IndexReader reader = new MultiReader();
DateFieldType ft = new DateFieldType("my_date", false);
// in case of only doc-values, we can't establish disjointness
assertEquals(
Relation.INTERSECTS,
ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", randomBoolean(), randomBoolean(), null, null, context)
);
}
public void testIsFieldWithinQueryDateMillis() throws IOException {
DateFieldType ft = new DateFieldType("my_date");
isFieldWithinRangeTestCase(ft);
}
public void testIsFieldWithinQueryDateNanos() throws IOException {
DateFieldType ft = new DateFieldType("my_date", Resolution.NANOSECONDS);
isFieldWithinRangeTestCase(ft);
}
public void testIsFieldWithinQueryDateMillisDocValueSkipper() throws IOException {
DateFieldType ft = new DateFieldType(
"my_date",
IndexType.skippers(),
false,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Resolution.MILLISECONDS,
null,
null,
Collections.emptyMap()
);
isFieldWithinRangeTestCase(ft);
}
public void testIsFieldWithinQueryDateNanosDocValueSkipper() throws IOException {
DateFieldType ft = new DateFieldType(
"my_date",
IndexType.skippers(),
false,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Resolution.NANOSECONDS,
null,
null,
Collections.emptyMap()
);
isFieldWithinRangeTestCase(ft);
}
public void testIsFieldWithinQueryDocValueSkipperNotInAllSegments() throws IOException {
var ft = new DateFieldType(
"my_date",
IndexType.skippers(),
false,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
Resolution.NANOSECONDS,
null,
null,
Collections.emptyMap()
);
try (Directory dir = newDirectory()) {
try (IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null))) {
// Simulates one segment have no my_date field
LuceneDocument doc = new LuceneDocument();
doc.add(SortedNumericDocValuesField.indexedField("my_other_date", 123456789000L));
w.addDocument(doc);
w.flush();
doc = new LuceneDocument();
Field field = SortedNumericDocValuesField.indexedField("my_date", ft.parse("2015-10-12"));
doc.add(field);
w.addDocument(doc);
field.setLongValue(ft.parse("2016-04-03"));
w.addDocument(doc);
try (DirectoryReader reader = DirectoryReader.open(w)) {
DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser();
doTestIsFieldWithinQuery(ft, reader, null, null);
doTestIsFieldWithinQuery(ft, reader, null, alternateFormat);
doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, null);
doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, alternateFormat);
}
}
}
}
public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
LuceneDocument doc = new LuceneDocument();
Field field;
if (ft.indexType.hasDocValuesSkipper()) {
field = SortedNumericDocValuesField.indexedField("my_date", ft.parse("2015-10-12"));
} else {
field = new LongPoint("my_date", ft.parse("2015-10-12"));
}
doc.add(field);
w.addDocument(doc);
field.setLongValue(ft.parse("2016-04-03"));
w.addDocument(doc);
DirectoryReader reader = DirectoryReader.open(w);
DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser();
doTestIsFieldWithinQuery(ft, reader, null, null);
doTestIsFieldWithinQuery(ft, reader, null, alternateFormat);
doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, null);
doTestIsFieldWithinQuery(ft, reader, ZoneOffset.UTC, alternateFormat);
QueryRewriteContext context = new QueryRewriteContext(parserConfig(), null, () -> nowInMillis);
// Fields with no value indexed.
DateFieldType ft2 = new DateFieldType("my_date2");
assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context));
IOUtils.close(reader, w, dir);
}
private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, ZoneId zone, DateMathParser alternateFormat)
throws IOException {
QueryRewriteContext context = new QueryRewriteContext(parserConfig(), null, () -> nowInMillis);
assertEquals(
Relation.INTERSECTS,
ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(
Relation.INTERSECTS,
ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(
Relation.INTERSECTS,
ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(
Relation.DISJOINT,
ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(
Relation.DISJOINT,
ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(
Relation.WITHIN,
ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", randomBoolean(), randomBoolean(), zone, null, context)
);
assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", true, true, zone, null, context));
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", false, false, zone, null, context));
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", false, true, zone, null, context));
assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", true, false, zone, null, context));
// Bad dates
assertThrows(
ElasticsearchParseException.class,
() -> ft.isFieldWithinQuery(reader, "2015-00-01", "2016-04-03", randomBoolean(), randomBoolean(), zone, null, context)
);
assertThrows(
ElasticsearchParseException.class,
() -> ft.isFieldWithinQuery(reader, "2015-01-01", "2016-04-00", randomBoolean(), randomBoolean(), zone, null, context)
);
assertThrows(
ElasticsearchParseException.class,
() -> ft.isFieldWithinQuery(reader, "2015-22-01", "2016-04-00", randomBoolean(), randomBoolean(), zone, null, context)
);
assertThrows(
ElasticsearchParseException.class,
() -> ft.isFieldWithinQuery(reader, "2015-01-01", "2016-04-45", randomBoolean(), randomBoolean(), zone, null, context)
);
assertThrows(
ElasticsearchParseException.class,
() -> ft.isFieldWithinQuery(reader, "2015-01-01", "2016-04-01T25:00:00", randomBoolean(), randomBoolean(), zone, null, context)
);
if (ft.resolution().equals(Resolution.NANOSECONDS)) {
assertThrows(
IllegalArgumentException.class,
() -> ft.isFieldWithinQuery(reader, "-2016-04-01", "2016-04-01", randomBoolean(), randomBoolean(), zone, null, context)
);
assertThrows(
IllegalArgumentException.class,
() -> ft.isFieldWithinQuery(
reader,
"9223372036854775807",
"2016-04-01",
randomBoolean(),
randomBoolean(),
zone,
null,
context
)
);
}
}
public void testValueFormat() {
MappedFieldType ft = new DateFieldType("field");
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-12T14:10:55"))
.toInstant()
.toEpochMilli();
assertEquals("2015-10-12T14:10:55.000Z", ft.docValueFormat(null, ZoneOffset.UTC).format(instant));
assertEquals("2015-10-12T15:10:55.000+01:00", ft.docValueFormat(null, ZoneOffset.ofHours(1)).format(instant));
assertEquals("2015", new DateFieldType("field").docValueFormat("YYYY", ZoneOffset.UTC).format(instant));
assertEquals(instant, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", false, null));
assertEquals(instant + 999, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12T14:10:55", true, null));
long i = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse("2015-10-13")).toInstant().toEpochMilli();
assertEquals(i - 1, ft.docValueFormat(null, ZoneOffset.UTC).parseLong("2015-10-12||/d", true, null));
}
public void testValueForSearch() {
MappedFieldType ft = new DateFieldType("field");
String date = "2015-10-12T12:09:55.000Z";
long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(date);
assertEquals(date, ft.valueForDisplay(instant));
}
/**
* If the term field is a string of date-time format with exact seconds (no sub-seconds), any data within a 1second range will match.
*/
public void testTermQuery() {
SearchExecutionContext context = prepareIndexForTermQuery();
MappedFieldType ft = new DateFieldType("field");
String date = "2015-10-12T14:10:55";
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant + 999),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant + 999)
);
assertEquals(expected, ft.termQuery(date, context));
ft = new DateFieldType("field", false);
expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant + 999);
assertEquals(expected, ft.termQuery(date, context));
assertIndexUnsearchable(Resolution.MILLISECONDS, (unsearchable) -> unsearchable.termQuery(date, context));
}
/**
* If the term field is a string of date-time format with sub-seconds, only data with exact ms precision will match.
*/
public void testTermQuerySubseconds() {
SearchExecutionContext context = prepareIndexForTermQuery();
MappedFieldType ft = new DateFieldType("field");
String date = "2015-10-12T14:10:55.01";
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant)
);
assertEquals(expected, ft.termQuery(date, context));
ft = new DateFieldType("field", false);
expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant);
assertEquals(expected, ft.termQuery(date, context));
assertIndexUnsearchable(Resolution.MILLISECONDS, (unsearchable) -> unsearchable.termQuery(date, context));
}
/**
* If the term field is a string of the long value (ms since epoch), only data with exact ms precision will match.
*/
public void testTermQueryMillis() {
SearchExecutionContext context = prepareIndexForTermQuery();
MappedFieldType ft = new DateFieldType("field");
String date = "2015-10-12T14:10:55";
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant)
);
assertEquals(expected, ft.termQuery(instant, context));
ft = new DateFieldType("field", false);
expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant);
assertEquals(expected, ft.termQuery(instant, context));
assertIndexUnsearchable(Resolution.MILLISECONDS, (unsearchable) -> unsearchable.termQuery(instant, context));
}
/**
* This query has similar behaviour to passing a String containing a long to termQuery, only data with exact ms precision will match.
*/
public void testEqualityQuery() {
SearchExecutionContext context = prepareIndexForTermQuery();
DateFieldType ft = new DateFieldType("field");
String date = "2015-10-12T14:10:55";
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant)
);
assertEquals(expected, ft.equalityQuery(instant, context));
ft = new DateFieldType("field", false);
expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant);
assertEquals(expected, ft.equalityQuery(instant, context));
assertIndexUnsearchable(Resolution.MILLISECONDS, (unsearchable) -> unsearchable.equalityQuery(instant, context));
}
/**
* This query supports passing a ns value, and only data with exact ns precision will match.
*/
public void testEqualityNanosQuery() {
SearchExecutionContext context = prepareIndexForTermQuery();
DateFieldType ft = new DateFieldType("field", Resolution.NANOSECONDS);
String date = "2015-10-12T14:10:55";
long instant = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date)).toInstant().toEpochMilli() * 1000000L;
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant, instant),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant)
);
assertEquals(expected, ft.equalityQuery(instant, context));
ft = new DateFieldType("field", false);
expected = SortedNumericDocValuesField.newSlowRangeQuery("field", instant, instant);
assertEquals(expected, ft.equalityQuery(instant, context));
assertIndexUnsearchable(Resolution.NANOSECONDS, (unsearchable) -> unsearchable.equalityQuery(instant, context));
}
/**
* If the term fields are strings of date-time format with exact seconds (no sub-seconds),
* the second field will be rounded up to the next second.
*/
public void testRangeQuery() throws IOException {
SearchExecutionContext context = prepareIndexForRangeQuery();
MappedFieldType ft = new DateFieldType("field");
String date1 = "2015-10-12T14:10:55";
String date2 = "2016-04-28T11:33:52";
long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
long instant2 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999;
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
).rewrite(newSearcher(new MultiReader()));
assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context).rewrite(newSearcher(new MultiReader())));
MappedFieldType ft2 = new DateFieldType("field", false);
Query expected2 = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2);
assertEquals(expected2, ft2.rangeQuery(date1, date2, true, true, null, null, null, context));
instant1 = nowInMillis;
instant2 = instant1 + 100;
expected = new DateRangeIncludingNowQuery(
new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
)
);
assertEquals(expected, ft.rangeQuery("now", instant2, true, true, null, null, null, context));
expected2 = new DateRangeIncludingNowQuery(SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2));
assertEquals(expected2, ft2.rangeQuery("now", instant2, true, true, null, null, null, context));
assertIndexUnsearchable(
Resolution.MILLISECONDS,
(unsearchable) -> unsearchable.rangeQuery(date1, date2, true, true, null, null, null, context)
);
}
/**
* If the term fields are strings of date-time format with sub-seconds,
* the lower and upper values will be matched inclusively to the ms.
*/
public void testRangeQuerySubseconds() throws IOException {
SearchExecutionContext context = prepareIndexForRangeQuery();
MappedFieldType ft = new DateFieldType("field");
String date1 = "2015-10-12T14:10:55.01";
String date2 = "2016-04-28T11:33:52.01";
long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
long instant2 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
).rewrite(newSearcher(new MultiReader()));
assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context).rewrite(newSearcher(new MultiReader())));
MappedFieldType ft2 = new DateFieldType("field", false);
Query expected2 = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2);
assertEquals(expected2, ft2.rangeQuery(date1, date2, true, true, null, null, null, context));
instant1 = nowInMillis;
instant2 = instant1 + 100;
expected = new DateRangeIncludingNowQuery(
new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
)
);
assertEquals(expected, ft.rangeQuery("now", instant2, true, true, null, null, null, context));
expected2 = new DateRangeIncludingNowQuery(SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2));
assertEquals(expected2, ft2.rangeQuery("now", instant2, true, true, null, null, null, context));
assertIndexUnsearchable(
Resolution.MILLISECONDS,
(unsearchable) -> unsearchable.rangeQuery(date1, date2, true, true, null, null, null, context)
);
}
/**
* If the term fields are strings of long ms, the lower and upper values will be matched inclusively to the ms.
*/
public void testRangeQueryMillis() throws IOException {
SearchExecutionContext context = prepareIndexForRangeQuery();
DateFieldType ft = new DateFieldType("field");
String date1 = "2015-10-12T14:10:55.01";
String date2 = "2016-04-28T11:33:52.01";
long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
long instant2 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli();
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
).rewrite(newSearcher(new MultiReader()));
assertEquals(expected, ft.rangeQuery(instant1, instant2, true, true, context).rewrite(newSearcher(new MultiReader())));
DateFieldType ft2 = new DateFieldType("field", false);
Query expected2 = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2);
assertEquals(expected2, ft2.rangeQuery(instant1, instant2, true, true, context));
assertIndexUnsearchable(
Resolution.MILLISECONDS,
(unsearchable) -> unsearchable.rangeQuery(instant1, instant2, true, true, context)
);
}
/**
* If the term fields are strings of long ns, the lower and upper values will be matched inclusively to the ns.
*/
public void testRangeQueryNanos() throws IOException {
SearchExecutionContext context = prepareIndexForRangeQuery();
DateFieldType ft = new DateFieldType("field", Resolution.NANOSECONDS);
String date1 = "2015-10-12T14:10:55.01";
String date2 = "2016-04-28T11:33:52.01";
long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli() * 1000000L;
long instant2 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() * 1000000L;
Query expected = new IndexOrDocValuesQuery(
LongPoint.newRangeQuery("field", instant1, instant2),
SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2)
).rewrite(newSearcher(new MultiReader()));
assertEquals(expected, ft.rangeQuery(instant1, instant2, true, true, context).rewrite(newSearcher(new MultiReader())));
DateFieldType ft2 = new DateFieldType("field", false, Resolution.NANOSECONDS);
Query expected2 = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2);
assertEquals(expected2, ft2.rangeQuery(date1, date2, true, true, null, null, null, context));
assertIndexUnsearchable(Resolution.NANOSECONDS, (unsearchable) -> unsearchable.rangeQuery(instant1, instant2, true, true, context));
}
public void testRangeQueryWithIndexSort() {
Settings settings = indexSettings(IndexVersion.current(), 1, 1).put("index.sort.field", "field").build();
IndexMetadata indexMetadata = new IndexMetadata.Builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
SearchExecutionContext context = SearchExecutionContextHelper.createSimple(indexSettings, parserConfig(), writableRegistry());
MappedFieldType ft = new DateFieldType("field");
String date1 = "2015-10-12T14:10:55";
String date2 = "2016-04-28T11:33:52";
long instant1 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date1)).toInstant().toEpochMilli();
long instant2 = DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(date2)).toInstant().toEpochMilli() + 999;
Query pointQuery = LongPoint.newRangeQuery("field", instant1, instant2);
Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery("field", instant1, instant2);
Query expected = new IndexSortSortedNumericDocValuesRangeQuery(
"field",
instant1,
instant2,
new IndexOrDocValuesQuery(pointQuery, dvQuery)
);
assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context));
ft = new DateFieldType("field", false);
expected = new IndexSortSortedNumericDocValuesRangeQuery("field", instant1, instant2, dvQuery);
assertEquals(expected, ft.rangeQuery(date1, date2, true, true, null, null, null, context));
}
public void testDateNanoDocValues() throws IOException {
// Create an index with some docValues
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
LuceneDocument doc = new LuceneDocument();
NumericDocValuesField docValuesField = new NumericDocValuesField("my_date", 1444608000000L);
doc.add(docValuesField);
w.addDocument(doc);
docValuesField.setLongValue(1459641600000L);
w.addDocument(doc);
// Create the doc values reader
SortedNumericIndexFieldData fieldData = new SortedNumericIndexFieldData(
"my_date",
IndexNumericFieldData.NumericType.DATE_NANOSECONDS,
CoreValuesSourceType.DATE,
DateNanosDocValuesField::new,
IndexType.NONE
);
// Read index and check the doc values
DirectoryReader reader = DirectoryReader.open(w);
assertTrue(reader.leaves().size() > 0);
LeafNumericFieldData a = fieldData.load(reader.leaves().get(0).reader().getContext());
SortedNumericLongValues docValues = a.getLongValues();
assertTrue(docValues.advanceExact(0));
assertTrue(docValues.advanceExact(1));
reader.close();
w.close();
dir.close();
}
private static DateFieldType fieldType(Resolution resolution, String format, String nullValue) {
DateFormatter formatter = DateFormatter.forPattern(format);
return new DateFieldType(
"field",
IndexType.points(true, true),
false,
true,
formatter,
resolution,
nullValue,
null,
Collections.emptyMap()
);
}
public void testFetchSourceValue() throws IOException {
MappedFieldType fieldType = new DateFieldType("field", Resolution.MILLISECONDS);
String date = "2020-05-15T21:33:02.000Z";
assertEquals(List.of(date), fetchSourceValue(fieldType, date));
assertEquals(List.of(date), fetchSourceValue(fieldType, 1589578382000L));
MappedFieldType fieldWithFormat = fieldType(Resolution.MILLISECONDS, "yyyy/MM/dd||epoch_millis", null);
String dateInFormat = "1990/12/29";
assertEquals(List.of(dateInFormat), fetchSourceValue(fieldWithFormat, dateInFormat));
assertEquals(List.of(dateInFormat), fetchSourceValue(fieldWithFormat, 662428800000L));
MappedFieldType millis = fieldType(Resolution.MILLISECONDS, "epoch_millis", null);
String dateInMillis = "662428800000";
assertEquals(List.of(dateInMillis), fetchSourceValue(millis, dateInMillis));
assertEquals(List.of(dateInMillis), fetchSourceValue(millis, 662428800000L));
String nullValueDate = "2020-05-15T21:33:02.000Z";
MappedFieldType nullFieldType = fieldType(Resolution.MILLISECONDS, "strict_date_time", nullValueDate);
assertEquals(List.of(nullValueDate), fetchSourceValue(nullFieldType, null));
}
public void testParseSourceValueWithFormat() throws IOException {
MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time", "1970-12-29T00:00:00.000Z");
String date = "1990-12-29T00:00:00.000Z";
assertEquals(List.of("1990/12/29"), fetchSourceValue(mapper, date, "yyyy/MM/dd"));
assertEquals(List.of("662428800000"), fetchSourceValue(mapper, date, "epoch_millis"));
assertEquals(List.of("1970/12/29"), fetchSourceValue(mapper, null, "yyyy/MM/dd"));
}
public void testParseSourceValueNanos() throws IOException {
MappedFieldType mapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", null);
String date = "2020-05-15T21:33:02.123456789Z";
assertEquals(List.of("2020-05-15T21:33:02.123456789Z"), fetchSourceValue(mapper, date));
assertEquals(List.of("2020-05-15T21:33:02.123Z"), fetchSourceValue(mapper, 1589578382123L));
String nullValueDate = "2020-05-15T21:33:02.123456789Z";
MappedFieldType nullValueMapper = fieldType(Resolution.NANOSECONDS, "strict_date_time||epoch_millis", nullValueDate);
assertEquals(List.of(nullValueDate), fetchSourceValue(nullValueMapper, null));
}
private SearchExecutionContext prepareIndexForTermQuery() {
Settings indexSettings = indexSettings(IndexVersion.current(), 1, 1).build();
return SearchExecutionContextHelper.createSimple(
new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings),
parserConfig(),
writableRegistry()
);
}
private SearchExecutionContext prepareIndexForRangeQuery() {
Settings indexSettings = indexSettings(IndexVersion.current(), 1, 1).build();
return new SearchExecutionContext(
0,
0,
new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings),
null,
null,
null,
MappingLookup.EMPTY,
null,
null,
parserConfig(),
writableRegistry(),
null,
null,
() -> nowInMillis,
null,
null,
() -> true,
null,
Collections.emptyMap(),
MapperMetrics.NOOP
);
}
private void assertIndexUnsearchable(Resolution resolution, ThrowingConsumer<DateFieldType> runnable) {
DateFieldType unsearchable = new DateFieldType(
"field",
IndexType.NONE,
false,
false,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
resolution,
null,
null,
Collections.emptyMap()
);
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> runnable.accept(unsearchable));
assertEquals("Cannot search on field [field] since it is not indexed nor has doc values.", e.getMessage());
}
}
|
DateFieldTypeTests
|
java
|
google__guava
|
guava/src/com/google/common/util/concurrent/AbstractService.java
|
{
"start": 5717,
"end": 19050
}
|
class ____ extends Guard {
IsStoppedGuard() {
super(AbstractService.this.monitor);
}
@Override
public boolean isSatisfied() {
return state().compareTo(TERMINATED) >= 0;
}
}
/** The listeners to notify during a state transition. */
private final ListenerCallQueue<Listener> listeners = new ListenerCallQueue<>();
/**
* The current state of the service. This should be written with the lock held but can be read
* without it because it is an immutable object in a volatile field. This is desirable so that
* methods like {@link #state}, {@link #failureCause} and notably {@link #toString} can be run
* without grabbing the lock.
*
* <p>To update this field correctly the lock must be held to guarantee that the state is
* consistent.
*/
private volatile StateSnapshot snapshot = new StateSnapshot(NEW);
/** Constructor for use by subclasses. */
protected AbstractService() {}
/**
* This method is called by {@link #startAsync} to initiate service startup. The invocation of
* this method should cause a call to {@link #notifyStarted()}, either during this method's run,
* or after it has returned. If startup fails, the invocation should cause a call to {@link
* #notifyFailed(Throwable)} instead.
*
* <p>This method should return promptly; prefer to do work on a different thread where it is
* convenient. It is invoked exactly once on service startup, even when {@link #startAsync} is
* called multiple times.
*/
@ForOverride
protected abstract void doStart();
/**
* This method should be used to initiate service shutdown. The invocation of this method should
* cause a call to {@link #notifyStopped()}, either during this method's run, or after it has
* returned. If shutdown fails, the invocation should cause a call to {@link
* #notifyFailed(Throwable)} instead.
*
* <p>This method should return promptly; prefer to do work on a different thread where it is
* convenient. It is invoked exactly once on service shutdown, even when {@link #stopAsync} is
* called multiple times.
*
* <p>If {@link #stopAsync} is called on a {@link State#STARTING} service, this method is not
* invoked immediately. Instead, it will be deferred until after the service is {@link
* State#RUNNING}. Services that need to cancel startup work can override {@link #doCancelStart}.
*/
@ForOverride
protected abstract void doStop();
/**
* This method is called by {@link #stopAsync} when the service is still starting (i.e. {@link
* #startAsync} has been called but {@link #notifyStarted} has not). Subclasses can override the
* method to cancel pending work and then call {@link #notifyStopped} to stop the service.
*
* <p>This method should return promptly; prefer to do work on a different thread where it is
* convenient. It is invoked exactly once on service shutdown, even when {@link #stopAsync} is
* called multiple times.
*
* <p>When this method is called {@link #state()} will return {@link State#STOPPING}, which is the
* external state observable by the caller of {@link #stopAsync}.
*
* @since 27.0
*/
@ForOverride
protected void doCancelStart() {}
@CanIgnoreReturnValue
@Override
public final Service startAsync() {
if (monitor.enterIf(isStartable)) {
try {
snapshot = new StateSnapshot(STARTING);
enqueueStartingEvent();
doStart();
} catch (Throwable startupFailure) {
restoreInterruptIfIsInterruptedException(startupFailure);
notifyFailed(startupFailure);
} finally {
monitor.leave();
dispatchListenerEvents();
}
} else {
throw new IllegalStateException("Service " + this + " has already been started");
}
return this;
}
@CanIgnoreReturnValue
@Override
public final Service stopAsync() {
if (monitor.enterIf(isStoppable)) {
try {
State previous = state();
switch (previous) {
case NEW:
snapshot = new StateSnapshot(TERMINATED);
enqueueTerminatedEvent(NEW);
break;
case STARTING:
snapshot = new StateSnapshot(STARTING, true, null);
enqueueStoppingEvent(STARTING);
doCancelStart();
break;
case RUNNING:
snapshot = new StateSnapshot(STOPPING);
enqueueStoppingEvent(RUNNING);
doStop();
break;
case STOPPING:
case TERMINATED:
case FAILED:
// These cases are impossible due to the if statement above.
throw new AssertionError("isStoppable is incorrectly implemented, saw: " + previous);
}
} catch (Throwable shutdownFailure) {
restoreInterruptIfIsInterruptedException(shutdownFailure);
notifyFailed(shutdownFailure);
} finally {
monitor.leave();
dispatchListenerEvents();
}
}
return this;
}
@Override
public final void awaitRunning() {
monitor.enterWhenUninterruptibly(hasReachedRunning);
try {
checkCurrentState(RUNNING);
} finally {
monitor.leave();
}
}
/**
* @since 28.0
*/
@Override
public final void awaitRunning(Duration timeout) throws TimeoutException {
Service.super.awaitRunning(timeout);
}
@Override
public final void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException {
if (monitor.enterWhenUninterruptibly(hasReachedRunning, timeout, unit)) {
try {
checkCurrentState(RUNNING);
} finally {
monitor.leave();
}
} else {
// It is possible due to races that we are currently in the expected state even though we
// timed out. e.g. if we weren't event able to grab the lock within the timeout we would never
// even check the guard. I don't think we care too much about this use case but it could lead
// to a confusing error message.
throw new TimeoutException("Timed out waiting for " + this + " to reach the RUNNING state.");
}
}
@Override
public final void awaitTerminated() {
monitor.enterWhenUninterruptibly(isStopped);
try {
checkCurrentState(TERMINATED);
} finally {
monitor.leave();
}
}
/**
* @since 28.0
*/
@Override
public final void awaitTerminated(Duration timeout) throws TimeoutException {
Service.super.awaitTerminated(timeout);
}
@Override
public final void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException {
if (monitor.enterWhenUninterruptibly(isStopped, timeout, unit)) {
try {
checkCurrentState(TERMINATED);
} finally {
monitor.leave();
}
} else {
// It is possible due to races that we are currently in the expected state even though we
// timed out. e.g. if we weren't event able to grab the lock within the timeout we would never
// even check the guard. I don't think we care too much about this use case but it could lead
// to a confusing error message.
throw new TimeoutException(
"Timed out waiting for "
+ this
+ " to reach a terminal state. "
+ "Current state: "
+ state());
}
}
/** Checks that the current state is equal to the expected state. */
@GuardedBy("monitor")
private void checkCurrentState(State expected) {
State actual = state();
if (actual != expected) {
if (actual == FAILED) {
// Handle this specially so that we can include the failureCause, if there is one.
throw new IllegalStateException(
"Expected the service " + this + " to be " + expected + ", but the service has FAILED",
failureCause());
}
throw new IllegalStateException(
"Expected the service " + this + " to be " + expected + ", but was " + actual);
}
}
/**
* Implementing classes should invoke this method once their service has started. It will cause
* the service to transition from {@link State#STARTING} to {@link State#RUNNING}.
*
* @throws IllegalStateException if the service is not {@link State#STARTING}.
*/
protected final void notifyStarted() {
monitor.enter();
try {
// We have to examine the internal state of the snapshot here to properly handle the stop
// while starting case.
if (snapshot.state != STARTING) {
IllegalStateException failure =
new IllegalStateException(
"Cannot notifyStarted() when the service is " + snapshot.state);
notifyFailed(failure);
throw failure;
}
if (snapshot.shutdownWhenStartupFinishes) {
snapshot = new StateSnapshot(STOPPING);
// We don't call listeners here because we already did that when we set the
// shutdownWhenStartupFinishes flag.
doStop();
} else {
snapshot = new StateSnapshot(RUNNING);
enqueueRunningEvent();
}
} finally {
monitor.leave();
dispatchListenerEvents();
}
}
/**
* Implementing classes should invoke this method once their service has stopped. It will cause
* the service to transition from {@link State#STARTING} or {@link State#STOPPING} to {@link
* State#TERMINATED}.
*
* @throws IllegalStateException if the service is not one of {@link State#STOPPING}, {@link
* State#STARTING}, or {@link State#RUNNING}.
*/
protected final void notifyStopped() {
monitor.enter();
try {
State previous = state();
switch (previous) {
case NEW:
case TERMINATED:
case FAILED:
throw new IllegalStateException("Cannot notifyStopped() when the service is " + previous);
case RUNNING:
case STARTING:
case STOPPING:
snapshot = new StateSnapshot(TERMINATED);
enqueueTerminatedEvent(previous);
break;
}
} finally {
monitor.leave();
dispatchListenerEvents();
}
}
/**
* Invoke this method to transition the service to the {@link State#FAILED}. The service will
* <b>not be stopped</b> if it is running. Invoke this method when a service has failed critically
* or otherwise cannot be started nor stopped.
*/
protected final void notifyFailed(Throwable cause) {
checkNotNull(cause);
monitor.enter();
try {
State previous = state();
switch (previous) {
case NEW:
case TERMINATED:
throw new IllegalStateException("Failed while in state:" + previous, cause);
case RUNNING:
case STARTING:
case STOPPING:
snapshot = new StateSnapshot(FAILED, false, cause);
enqueueFailedEvent(previous, cause);
break;
case FAILED:
// Do nothing
break;
}
} finally {
monitor.leave();
dispatchListenerEvents();
}
}
@Override
public final boolean isRunning() {
return state() == RUNNING;
}
@Override
public final State state() {
return snapshot.externalState();
}
/**
* @since 14.0
*/
@Override
public final Throwable failureCause() {
return snapshot.failureCause();
}
/**
* @since 13.0
*/
@Override
public final void addListener(Listener listener, Executor executor) {
listeners.addListener(listener, executor);
}
@Override
public String toString() {
return getClass().getSimpleName() + " [" + state() + "]";
}
/**
* Attempts to execute all the listeners in {@link #listeners} while not holding the {@link
* #monitor}.
*/
private void dispatchListenerEvents() {
if (!monitor.isOccupiedByCurrentThread()) {
listeners.dispatch();
}
}
private void enqueueStartingEvent() {
listeners.enqueue(STARTING_EVENT);
}
private void enqueueRunningEvent() {
listeners.enqueue(RUNNING_EVENT);
}
private void enqueueStoppingEvent(State from) {
if (from == State.STARTING) {
listeners.enqueue(STOPPING_FROM_STARTING_EVENT);
} else if (from == State.RUNNING) {
listeners.enqueue(STOPPING_FROM_RUNNING_EVENT);
} else {
throw new AssertionError();
}
}
private void enqueueTerminatedEvent(State from) {
switch (from) {
case NEW:
listeners.enqueue(TERMINATED_FROM_NEW_EVENT);
break;
case STARTING:
listeners.enqueue(TERMINATED_FROM_STARTING_EVENT);
break;
case RUNNING:
listeners.enqueue(TERMINATED_FROM_RUNNING_EVENT);
break;
case STOPPING:
listeners.enqueue(TERMINATED_FROM_STOPPING_EVENT);
break;
case TERMINATED:
case FAILED:
throw new AssertionError();
}
}
private void enqueueFailedEvent(State from, Throwable cause) {
// can't memoize this one due to the exception
listeners.enqueue(
new ListenerCallQueue.Event<Listener>() {
@Override
public void call(Listener listener) {
listener.failed(from, cause);
}
@Override
public String toString() {
return "failed({from = " + from + ", cause = " + cause + "})";
}
});
}
/**
* An immutable snapshot of the current state of the service. This
|
IsStoppedGuard
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleMultiInsertStatement.java
|
{
"start": 3642,
"end": 4393
}
|
class ____ extends OracleSQLObjectImpl {
private SQLExpr when;
private InsertIntoClause then;
public SQLExpr getWhen() {
return when;
}
public void setWhen(SQLExpr when) {
this.when = when;
}
public InsertIntoClause getThen() {
return then;
}
public void setThen(InsertIntoClause then) {
this.then = then;
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, when);
acceptChild(visitor, then);
}
visitor.endVisit(this);
}
}
public static
|
ConditionalInsertClauseItem
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-validator-resteasy-reactive/src/main/java/io/quarkus/it/hibernate/validator/HibernateValidatorTestResource.java
|
{
"start": 3856,
"end": 5699
}
|
class ____ {
private String name;
@Email
private String email;
private List<@Email String> additionalEmails;
@DecimalMin("0")
private Double score;
private Map<@Length(min = 3) String, List<@Email String>> categorizedEmails;
@Valid
private NestedBeanWithoutConstraints nestedBeanWithoutConstraints;
public MyBean(String name, String email, List<String> additionalEmails, Double score,
Map<String, List<String>> categorizedEmails) {
this.name = name;
this.email = email;
this.additionalEmails = additionalEmails;
this.score = score;
this.categorizedEmails = categorizedEmails;
this.nestedBeanWithoutConstraints = new NestedBeanWithoutConstraints();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public List<String> getAdditionalEmails() {
return additionalEmails;
}
public void setAdditionalEmails(List<String> additionalEmails) {
this.additionalEmails = additionalEmails;
}
public Double getScore() {
return score;
}
public void setScore(Double score) {
this.score = score;
}
public Map<String, List<String>> getCategorizedEmails() {
return categorizedEmails;
}
public void setCategorizedEmails(Map<String, List<String>> categorizedEmails) {
this.categorizedEmails = categorizedEmails;
}
}
private static
|
MyBean
|
java
|
apache__camel
|
core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ComponentVerifierExtension.java
|
{
"start": 16364,
"end": 16527
}
|
class ____ extends ErrorAttribute implements GroupAttribute {
GroupErrorAttribute(String name) {
super(name);
}
}
}
|
GroupErrorAttribute
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
|
{
"start": 1389,
"end": 7229
}
|
class ____ {
@Test
void testAutoboxing() {
assertThat(ExtractionUtils.isAssignable(int.class, Integer.class, Autoboxing.STRICT))
.isTrue();
// In strict autoboxing this is not allowed
assertThat(ExtractionUtils.isAssignable(Integer.class, int.class, Autoboxing.STRICT))
.isFalse();
assertThat(ExtractionUtils.isAssignable(Integer.class, int.class, Autoboxing.JVM)).isTrue();
assertThat(ExtractionUtils.isAssignable(Integer.class, Number.class, Autoboxing.STRICT))
.isTrue();
}
@Test
void testResolveParameters() {
List<Method> methods = ExtractionUtils.collectMethods(LongClass.class, "method");
Method method = methods.get(0);
Type longType =
ExtractionUtils.resolveVariableWithClassContext(
LongClass.class, method.getGenericParameterTypes()[0]);
Type futureType =
ExtractionUtils.resolveVariableWithClassContext(
LongClass.class, method.getGenericParameterTypes()[1]);
Type listOfFutures =
ExtractionUtils.resolveVariableWithClassContext(
LongClass.class, method.getGenericParameterTypes()[2]);
Type arrayType =
ExtractionUtils.resolveVariableWithClassContext(
LongClass.class, method.getGenericParameterTypes()[3]);
assertThat(longType).isEqualTo(Long.class);
assertThat(futureType).isInstanceOf(ParameterizedType.class);
assertThat(((ParameterizedType) futureType).getRawType())
.isEqualTo(CompletableFuture.class);
assertThat(((ParameterizedType) futureType).getActualTypeArguments()[0])
.isEqualTo(Long.class);
assertThat(listOfFutures).isInstanceOf(ParameterizedType.class);
assertThat(((ParameterizedType) listOfFutures).getRawType()).isEqualTo(List.class);
assertThat(((ParameterizedType) listOfFutures).getActualTypeArguments()[0])
.isInstanceOf(ParameterizedType.class);
ParameterizedType innerFuture =
((ParameterizedType)
((ParameterizedType) listOfFutures).getActualTypeArguments()[0]);
assertThat(innerFuture.getRawType()).isEqualTo(CompletableFuture.class);
assertThat(innerFuture.getActualTypeArguments()[0]).isEqualTo(Long.class);
assertThat(arrayType).isInstanceOf(GenericArrayType.class);
assertThat(((GenericArrayType) arrayType).getGenericComponentType()).isEqualTo(Long.class);
}
@Test
void testResolveParametersDeeper() {
List<Method> methods = ExtractionUtils.collectMethods(FutureClass.class, "method");
Method method = methods.get(0);
Type futureType =
ExtractionUtils.resolveVariableWithClassContext(
FutureClass.class, method.getGenericParameterTypes()[0]);
Type listOfFutures =
ExtractionUtils.resolveVariableWithClassContext(
FutureClass.class, method.getGenericParameterTypes()[1]);
assertThat(futureType).isInstanceOf(ParameterizedType.class);
assertThat(((ParameterizedType) futureType).getRawType())
.isEqualTo(CompletableFuture.class);
assertThat(((ParameterizedType) futureType).getActualTypeArguments()[0])
.isEqualTo(Long.class);
assertThat(listOfFutures).isInstanceOf(ParameterizedType.class);
assertThat(((ParameterizedType) listOfFutures).getRawType()).isEqualTo(List.class);
assertThat(((ParameterizedType) listOfFutures).getActualTypeArguments()[0])
.isInstanceOf(ParameterizedType.class);
ParameterizedType innerFuture =
((ParameterizedType)
((ParameterizedType) listOfFutures).getActualTypeArguments()[0]);
assertThat(innerFuture.getRawType()).isEqualTo(CompletableFuture.class);
assertThat(innerFuture.getActualTypeArguments()[0]).isEqualTo(Long.class);
}
@Test
void testExtractExecutableNamesWithMultiLocalVariableBlocks() {
List<String> expectedParameterNames =
ImmutableList.of("generic", "genericFuture", "listOfGenericFuture", "array");
// test the local variable is not initialized at first
List<Method> methods =
ExtractionUtils.collectMethods(
MultiLocalVariableWithoutInitializationClass.class, "method");
Method method = methods.get(0);
List<String> parameterNames = ExtractionUtils.extractExecutableNames(method);
assertThat(parameterNames).isEqualTo(expectedParameterNames);
// test the local variable is initialized at first
methods =
ExtractionUtils.collectMethods(
MultiLocalVariableBlocksWithInitializationClass.class, "method");
method = methods.get(0);
parameterNames = ExtractionUtils.extractExecutableNames(method);
assertThat(parameterNames).isEqualTo(expectedParameterNames);
}
@Test
void testExtractExecutableNamesWithParameterNameShadowed() {
List<String> expectedParameterNames =
ImmutableList.of(
"generic", "result", "genericFuture", "listOfGenericFuture", "array");
// test the local variable is not initialized at first
List<Method> methods =
ExtractionUtils.collectMethods(ParameterNameShadowedClass.class, "method");
Method method = methods.get(0);
List<String> parameterNames = ExtractionUtils.extractExecutableNames(method);
assertThat(parameterNames).isEqualTo(expectedParameterNames);
}
/** Test function. */
public static
|
ExtractionUtilsTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/xml/AbstractSingleBeanDefinitionParser.java
|
{
"start": 1134,
"end": 1873
}
|
class ____ you want to create a single bean definition
* from an arbitrarily complex XML element. You may wish to consider extending
* the {@link AbstractSimpleBeanDefinitionParser} when you want to create a
* single bean definition from a relatively simple custom XML element.
*
* <p>The resulting {@code BeanDefinition} will be automatically registered
* with the {@link org.springframework.beans.factory.support.BeanDefinitionRegistry}.
* Your job simply is to {@link #doParse parse} the custom XML {@link Element}
* into a single {@code BeanDefinition}.
*
* @author Rob Harrop
* @author Juergen Hoeller
* @author Rick Evans
* @since 2.0
* @see #getBeanClass
* @see #getBeanClassName
* @see #doParse
*/
public abstract
|
when
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/builder/EqualsBuilderTest.java
|
{
"start": 1608,
"end": 2328
}
|
class ____ {
private final int a;
TestACanEqualB(final int a) {
this.a = a;
}
@Override
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (o instanceof TestACanEqualB) {
return this.a == ((TestACanEqualB) o).getA();
}
if (o instanceof TestBCanEqualA) {
return this.a == ((TestBCanEqualA) o).getB();
}
return false;
}
public int getA() {
return this.a;
}
@Override
public int hashCode() {
return a;
}
}
public static
|
TestACanEqualB
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/collection_in_constructor/CollectionInConstructorTest.java
|
{
"start": 1318,
"end": 10085
}
|
class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
// create an SqlSessionFactory
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/collection_in_constructor/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// populate in-memory database
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/collection_in_constructor/CreateDB.sql");
}
@Test
void testSimple() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store store = mapper.getAStore(1);
List<Aisle> aisles = store.getAisles();
Assertions.assertIterableEquals(
Arrays.asList(new Aisle(101, "Aisle 101"), new Aisle(102, "Aisle 102"), new Aisle(103, "Aisle 103")), aisles);
}
}
@Test
void testSimpleList() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<Store> stores = mapper.getStores();
Assertions.assertIterableEquals(
Arrays.asList(new Aisle(101, "Aisle 101"), new Aisle(102, "Aisle 102"), new Aisle(103, "Aisle 103")),
stores.get(0).getAisles());
Assertions.assertTrue(stores.get(1).getAisles().isEmpty());
Assertions.assertIterableEquals(Arrays.asList(new Aisle(104, "Aisle 104"), new Aisle(105, "Aisle 105")),
stores.get(2).getAisles());
}
}
@Test
void shouldEmptyListBeReturned() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Assertions.assertTrue(mapper.getAStore(2).getAisles().isEmpty());
}
}
@Test
void testTwoLists() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store2 store = mapper.getAStore2(1);
List<Clerk> clerks = store.getClerks();
List<Aisle> aisles = store.getAisles();
Assertions.assertIterableEquals(Arrays.asList(new Clerk(1001, "Clerk 1001"), new Clerk(1002, "Clerk 1002"),
new Clerk(1003, "Clerk 1003"), new Clerk(1004, "Clerk 1004"), new Clerk(1005, "Clerk 1005")), clerks);
Assertions.assertIterableEquals(
Arrays.asList(new Aisle(101, "Aisle 101"), new Aisle(102, "Aisle 102"), new Aisle(103, "Aisle 103")), aisles);
}
}
@Test
void testListOfStrings() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store3 store = mapper.getAStore3(1);
List<String> aisleNames = store.getAisleNames();
Assertions.assertEquals(3, aisleNames.size());
Assertions.assertIterableEquals(Arrays.asList("Aisle 101", "Aisle 102", "Aisle 103"), aisleNames);
}
}
@Test
void testObjectWithBuilder() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store4 store = mapper.getAStore4(1);
List<Aisle> aisles = store.getAisles();
Assertions.assertIterableEquals(
Arrays.asList(new Aisle(101, "Aisle 101"), new Aisle(102, "Aisle 102"), new Aisle(103, "Aisle 103")), aisles);
}
}
@Test
void testTwoListsOfSameResultMap() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store5 store = mapper.getAStore5(1);
List<Clerk> clerks = store.getClerks();
List<Clerk> managers = store.getManagers();
Assertions.assertIterableEquals(Arrays.asList(new Clerk(1001, "Clerk 1001"), new Clerk(1002, "Clerk 1002"),
new Clerk(1003, "Clerk 1003"), new Clerk(1004, "Clerk 1004"), new Clerk(1005, "Clerk 1005")), clerks);
Assertions.assertIterableEquals(Arrays.asList(new Clerk(1002, "Clerk 1002"), new Clerk(1005, "Clerk 1005")),
managers);
}
}
@Disabled("Not sure if there is a need for this usage.")
@Test
void testPartiallyImmutableObject() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store6 store = mapper.getAStore6(1);
List<Aisle> aisles = store.getAisles();
Assertions.assertEquals("Store 1", store.getName());
Assertions.assertEquals(3, aisles.size());
}
}
@Test
void testTwoListsOfString() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store7 store = mapper.getAStore7(1);
List<String> aisleNames = store.getAisleNames();
List<String> clerkNames = store.getClerkNames();
Assertions.assertIterableEquals(Arrays.asList("Aisle 101", "Aisle 102", "Aisle 103"), aisleNames);
Assertions.assertIterableEquals(
Arrays.asList("Clerk 1001", "Clerk 1002", "Clerk 1003", "Clerk 1004", "Clerk 1005"), clerkNames);
}
}
@Test
void testCollectionArgWithTypeHandler() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Store8 store = mapper.getAStore8(1);
Assertions.assertIterableEquals(Arrays.asList("a", "b", "c"), store.getStrings());
}
}
@Test
void testCollectionArgWithNestedAndTypeHandler() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<Store10> stores10 = mapper.getStores10();
assertThat(stores10).isNotNull().hasSize(3)
.extracting(Store10::getId, Store10::getName, store -> store.getClerks().size(), Store10::getStrings)
.containsExactly(tuple(1, "Store 1", 5, List.of("a", "b", "c", "1")),
tuple(2, "Store 2", 0, List.of("a", "b", "c", "2")), tuple(3, "Store 3", 0, List.of("a", "b", "c", "3")));
assertThat(stores10.get(0).getClerks()).extracting(Clerk::getId, Clerk::getName).containsExactly(
tuple(1001, "Clerk 1001"), tuple(1002, "Clerk 1002"), tuple(1003, "Clerk 1003"), tuple(1004, "Clerk 1004"),
tuple(1005, "Clerk 1005"));
}
}
@Test
void testImmutableNestedObjects() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Container container = mapper.getAContainer();
Assertions
.assertEquals(
Arrays.asList(
new Store(1, "Store 1",
Arrays.asList(new Aisle(101, "Aisle 101"), new Aisle(102, "Aisle 102"),
new Aisle(103, "Aisle 103"))),
new Store(2, "Store 2", Collections.emptyList()),
new Store(3, "Store 3", Arrays.asList(new Aisle(104, "Aisle 104"), new Aisle(105, "Aisle 105")))),
container.getStores());
}
}
@Test
void testImmutableNestedObjectsWithBadEquals() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
List<Container1> containers = mapper.getContainers();
Container1 expectedContainer1 = new Container1();
expectedContainer1.setNum(1);
expectedContainer1.setType("storesWithClerks");
expectedContainer1.setStores(Arrays.asList(
new Store9(1, "Store 1",
Arrays.asList(new Clerk(1001, "Clerk 1001"), new Clerk(1003, "Clerk 1003"),
new Clerk(1004, "Clerk 1004"))),
new Store9(2, "Store 2", Arrays.asList()), new Store9(3, "Store 3", Arrays.asList())));
Container1 expectedContainer2 = new Container1();
expectedContainer2.setNum(1);
expectedContainer2.setType("storesWithManagers");
expectedContainer2.setStores(Arrays.asList(
new Store9(1, "Store 1", Arrays.asList(new Clerk(1002, "Clerk 1002"), new Clerk(1005, "Clerk 1005")))));
// cannot use direct equals as we overwrote it with a bad impl on purpose
assertThat(containers).isNotNull().hasSize(2);
assertContainer1(containers.get(0), expectedContainer1);
assertContainer1(containers.get(1), expectedContainer2);
}
}
private static void assertContainer1(Container1 container1, Container1 expectedContainer1) {
assertThat(container1).isNotNull().satisfies(c -> {
assertThat(c.getNum()).isEqualTo(expectedContainer1.getNum());
assertThat(c.getType()).isEqualTo(expectedContainer1.getType());
assertThat(c.getStores()).isEqualTo(expectedContainer1.getStores());
});
}
}
|
CollectionInConstructorTest
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/cfg/HandlerInstantiator.java
|
{
"start": 3967,
"end": 4714
}
|
class ____ builder to construct (to allow
* implementation use information from other annotations)
* @param builderClass Class of builder instance to return
*
* @return TypeResolverBuilder instance to use
*/
public abstract TypeResolverBuilder<?> typeResolverBuilderInstance(MapperConfig<?> config,
Annotated annotated, Class<?> builderClass);
/**
* Method called to get an instance of TypeIdResolver of specified type.
*
* @param config Mapper configuration in effect (either SerializationConfig or
* DeserializationConfig, depending on when instance is being constructed)
* @param annotated annotated Element (Class, Method, Field) that
* had annotation defining
|
of
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/transaction/manager/LookUpTxMgrNonTransactionalTests.java
|
{
"start": 1727,
"end": 1862
}
|
class ____ {
@Bean
PlatformTransactionManager transactionManager() {
return new CallCountingTransactionManager();
}
}
}
|
Config
|
java
|
spring-projects__spring-boot
|
buildSrc/src/test/java/org/springframework/boot/build/bom/bomr/ReleaseScheduleTests.java
|
{
"start": 1401,
"end": 2354
}
|
class ____ {
private final RestTemplate rest = new RestTemplate();
private final ReleaseSchedule releaseSchedule = new ReleaseSchedule(this.rest);
private final MockRestServiceServer server = MockRestServiceServer.bindTo(this.rest).build();
@Test
void releasesBetween() {
this.server
.expect(requestTo("https://calendar.spring.io/releases?start=2023-09-01T00:00Z&end=2023-09-21T23:59Z"))
.andRespond(withSuccess(new ClassPathResource("releases.json"), MediaType.APPLICATION_JSON));
Map<String, List<Release>> releases = this.releaseSchedule
.releasesBetween(OffsetDateTime.parse("2023-09-01T00:00Z"), OffsetDateTime.parse("2023-09-21T23:59Z"));
assertThat(releases).hasSize(23);
assertThat(releases.get("Spring Framework")).hasSize(3);
assertThat(releases.get("Spring Boot")).hasSize(4);
assertThat(releases.get("Spring Modulith")).hasSize(1);
assertThat(releases.get("spring graphql")).hasSize(3);
}
}
|
ReleaseScheduleTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/SpringApplicationAotProcessorTests.java
|
{
"start": 1235,
"end": 5494
}
|
class ____ {
private static final ApplicationInvoker invoker = new ApplicationInvoker();
@BeforeEach
void setup() {
invoker.clean();
}
@Test
void processApplicationInvokesMainMethod(@TempDir Path directory) {
String[] arguments = new String[] { "1", "2" };
SpringApplicationAotProcessor processor = new SpringApplicationAotProcessor(PublicMainMethod.class,
settings(directory), arguments);
processor.process();
assertThat(ApplicationInvoker.argsHolder).isEqualTo(arguments);
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void processApplicationWithMainMethodThatDoesNotRun(@TempDir Path directory) {
SpringApplicationAotProcessor processor = new SpringApplicationAotProcessor(BrokenApplication.class,
settings(directory), new String[0]);
assertThatIllegalStateException().isThrownBy(processor::process)
.withMessageContaining("Does it run a SpringApplication?");
assertThat(directory).isEmptyDirectory();
}
@Test
void invokeMainParsesArgumentsAndInvokesMainMethod(@TempDir Path directory) throws Exception {
String[] mainArguments = new String[] { PublicMainMethod.class.getName(),
directory.resolve("source").toString(), directory.resolve("resource").toString(),
directory.resolve("class").toString(), "com.example", "example", "1", "2" };
SpringApplicationAotProcessor.main(mainArguments);
assertThat(ApplicationInvoker.argsHolder).containsExactly("1", "2");
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void invokeMainParsesArgumentsAndInvokesPackagePrivateMainMethod(@TempDir Path directory) throws Exception {
String[] mainArguments = new String[] { PackagePrivateMainMethod.class.getName(),
directory.resolve("source").toString(), directory.resolve("resource").toString(),
directory.resolve("class").toString(), "com.example", "example", "1", "2" };
SpringApplicationAotProcessor.main(mainArguments);
assertThat(ApplicationInvoker.argsHolder).containsExactly("1", "2");
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void invokeMainParsesArgumentsAndInvokesParameterLessMainMethod(@TempDir Path directory) throws Exception {
String[] mainArguments = new String[] { PublicParameterlessMainMethod.class.getName(),
directory.resolve("source").toString(), directory.resolve("resource").toString(),
directory.resolve("class").toString(), "com.example", "example", "1", "2" };
SpringApplicationAotProcessor.main(mainArguments);
assertThat(ApplicationInvoker.argsHolder).isNull();
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void invokeMainParsesArgumentsAndInvokesPackagePrivateRunMethod(@TempDir Path directory) throws Exception {
String[] mainArguments = new String[] { PackagePrivateParameterlessMainMethod.class.getName(),
directory.resolve("source").toString(), directory.resolve("resource").toString(),
directory.resolve("class").toString(), "com.example", "example", "1", "2" };
SpringApplicationAotProcessor.main(mainArguments);
assertThat(ApplicationInvoker.argsHolder).isNull();
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void invokeMainParsesArgumentsAndInvokesRunMethodWithoutGroupId(@TempDir Path directory) throws Exception {
String[] mainArguments = new String[] { PublicMainMethod.class.getName(),
directory.resolve("source").toString(), directory.resolve("resource").toString(),
directory.resolve("class").toString(), "", "example", "1", "2" };
SpringApplicationAotProcessor.main(mainArguments);
assertThat(ApplicationInvoker.argsHolder).containsExactly("1", "2");
assertThat(ApplicationInvoker.postRunInvoked).isFalse();
}
@Test
void invokeMainWithMissingArguments() {
assertThatIllegalStateException().isThrownBy(() -> SpringApplicationAotProcessor.main(new String[] { "Test" }))
.withMessageContaining("Usage:");
}
private Settings settings(Path directory) {
return Settings.builder()
.sourceOutput(directory.resolve("source"))
.resourceOutput(directory.resolve("resource"))
.classOutput(directory.resolve("class"))
.groupId("com.example")
.artifactId("example")
.build();
}
@Configuration(proxyBeanMethods = false)
public static
|
SpringApplicationAotProcessorTests
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/healthcheck/extend/AbstractHealthCheckProcessorExtend.java
|
{
"start": 1072,
"end": 1567
}
|
class ____ implements BeanFactoryAware {
protected SingletonBeanRegistry registry;
/**
* Add HealthCheckProcessorV2.
*
* @param origin Origin Checker Type
* @return Extend Processor Type
*/
abstract Set<String> addProcessor(Set<String> origin);
protected String lowerFirstChar(String simpleName) {
if (StringUtils.isBlank(simpleName)) {
throw new IllegalArgumentException("can't find extend processor
|
AbstractHealthCheckProcessorExtend
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.