language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FunctionalInterfaceMethodChangedTest.java
|
{
"start": 4951,
"end": 5350
}
|
interface ____ extends SuperFI, OtherSuperFI {
void subSam();
@Override
default void superSam() {
subSam();
}
@Override
default void otherSuperSam() {
subSam();
}
}
@FunctionalInterface
|
MultipleInheritanceSubFI
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jsonb/deployment/src/test/java/io/quarkus/resteasy/reactive/jsonb/deployment/test/VertxJsonEndpoint.java
|
{
"start": 344,
"end": 1111
}
|
class ____ {
@POST
@Path("jsonObject")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JsonObject jsonObject(JsonObject input) {
JsonObject result = new JsonObject();
result.put("name", input.getString("name"));
result.put("age", 50);
result.put("nested", new JsonObject(Collections.singletonMap("foo", "bar")));
result.put("bools", new JsonArray().add(true));
return result;
}
@POST
@Path("jsonArray")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public JsonArray jsonArray(JsonArray input) {
JsonArray result = input.copy();
result.add("last");
return result;
}
}
|
VertxJsonEndpoint
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/usertype/UserType.java
|
{
"start": 4129,
"end": 4627
}
|
class ____ implements AttributeConverter<Period,String> {
* @Override
* public String convertToDatabaseColumn(Period period) {
* return period.toString();
* }
*
* @Override
* public Period convertToEntityAttribute(String string) {
* return Period.parse(string);
* }
* }
* </pre>
* <p>
* A {@code UserType} is much more useful when the persistent attribute
* type is mutable. For example:
* <p>
* <pre>
* public
|
PeriodToStringConverter
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/configproperties/MyConfigWithConstructorConfigurationInject.java
|
{
"start": 3064,
"end": 3270
}
|
class ____ {
String name;
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
@ConfigurationProperties("abc")
|
CI_OtherConfig
|
java
|
square__retrofit
|
retrofit-adapters/rxjava3/src/test/java/retrofit2/adapter/rxjava3/ObservableThrowingTest.java
|
{
"start": 1670,
"end": 10034
}
|
interface ____ {
@GET("/")
Observable<String> body();
@GET("/")
Observable<Response<String>> response();
@GET("/")
Observable<Result<String>> result();
}
private Service service;
@Before
public void setUp() {
Retrofit retrofit =
new Retrofit.Builder()
.baseUrl(server.url("/"))
.addConverterFactory(new StringConverterFactory())
.addCallAdapterFactory(RxJava3CallAdapterFactory.createSynchronous())
.build();
service = retrofit.create(Service.class);
}
@Test
public void bodyThrowingInOnNextDeliveredToError() {
server.enqueue(new MockResponse());
RecordingObserver<String> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.body()
.subscribe(
new ForwardingObserver<String>(observer) {
@Override
public void onNext(String value) {
throw e;
}
});
observer.assertError(e);
}
@Test
public void bodyThrowingInOnCompleteDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<String> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.body()
.subscribe(
new ForwardingObserver<String>(observer) {
@Override
public void onComplete() {
throw e;
}
});
observer.assertAnyValue();
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Test
public void bodyThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse().setResponseCode(404));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<String> observer = subscriberRule.create();
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
final RuntimeException e = new RuntimeException();
service
.body()
.subscribe(
new ForwardingObserver<String>(observer) {
@Override
public void onError(Throwable throwable) {
if (!errorRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
throw e;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e);
}
@Test
public void responseThrowingInOnNextDeliveredToError() {
server.enqueue(new MockResponse());
RecordingObserver<Response<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.response()
.subscribe(
new ForwardingObserver<Response<String>>(observer) {
@Override
public void onNext(Response<String> value) {
throw e;
}
});
observer.assertError(e);
}
@Test
public void responseThrowingInOnCompleteDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<Response<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.response()
.subscribe(
new ForwardingObserver<Response<String>>(observer) {
@Override
public void onComplete() {
throw e;
}
});
observer.assertAnyValue();
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Test
public void responseThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse().setSocketPolicy(DISCONNECT_AFTER_REQUEST));
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<Response<String>> observer = subscriberRule.create();
final AtomicReference<Throwable> errorRef = new AtomicReference<>();
final RuntimeException e = new RuntimeException();
service
.response()
.subscribe(
new ForwardingObserver<Response<String>>(observer) {
@Override
public void onError(Throwable throwable) {
if (!errorRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
throw e;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(errorRef.get(), e);
}
@Test
public void resultThrowingInOnNextDeliveredToError() {
server.enqueue(new MockResponse());
RecordingObserver<Result<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.result()
.subscribe(
new ForwardingObserver<Result<String>>(observer) {
@Override
public void onNext(Result<String> value) {
throw e;
}
});
observer.assertError(e);
}
@Test
public void resultThrowingInOnCompletedDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<Result<String>> observer = subscriberRule.create();
final RuntimeException e = new RuntimeException();
service
.result()
.subscribe(
new ForwardingObserver<Result<String>>(observer) {
@Override
public void onComplete() {
throw e;
}
});
observer.assertAnyValue();
assertThat(throwableRef.get()).hasCauseThat().isSameInstanceAs(e);
}
@Test
public void resultThrowingInOnErrorDeliveredToPlugin() {
server.enqueue(new MockResponse());
final AtomicReference<Throwable> throwableRef = new AtomicReference<>();
RxJavaPlugins.setErrorHandler(
throwable -> {
if (!throwableRef.compareAndSet(null, throwable)) {
throw Exceptions.propagate(throwable);
}
});
RecordingObserver<Result<String>> observer = subscriberRule.create();
final RuntimeException first = new RuntimeException();
final RuntimeException second = new RuntimeException();
service
.result()
.subscribe(
new ForwardingObserver<Result<String>>(observer) {
@Override
public void onNext(Result<String> value) {
// The only way to trigger onError for a result is if onNext throws.
throw first;
}
@Override
public void onError(Throwable throwable) {
throw second;
}
});
//noinspection ThrowableResultOfMethodCallIgnored
CompositeException composite = (CompositeException) throwableRef.get();
assertThat(composite.getExceptions()).containsExactly(first, second);
}
private abstract static
|
Service
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxCreate.java
|
{
"start": 1728,
"end": 3427
}
|
enum ____ {
PUSH_ONLY, PUSH_PULL
}
final Consumer<? super FluxSink<T>> source;
final OverflowStrategy backpressure;
final CreateMode createMode;
FluxCreate(Consumer<? super FluxSink<T>> source,
FluxSink.OverflowStrategy backpressure,
CreateMode createMode) {
this.source = Objects.requireNonNull(source, "source");
this.backpressure = Objects.requireNonNull(backpressure, "backpressure");
this.createMode = createMode;
}
static <T> BaseSink<T> createSink(CoreSubscriber<? super T> t,
OverflowStrategy backpressure) {
switch (backpressure) {
case IGNORE: {
return new IgnoreSink<>(t);
}
case ERROR: {
return new ErrorAsyncSink<>(t);
}
case DROP: {
return new DropAsyncSink<>(t);
}
case LATEST: {
return new LatestAsyncSink<>(t);
}
default: {
return new BufferAsyncSink<>(t, Queues.SMALL_BUFFER_SIZE);
}
}
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
CoreSubscriber<? super T> wrapped =
Operators.restoreContextOnSubscriberIfAutoCPEnabled(this, actual);
BaseSink<T> sink = createSink(wrapped, backpressure);
wrapped.onSubscribe(sink);
try {
source.accept(
createMode == CreateMode.PUSH_PULL ? new SerializedFluxSink<>(sink) :
sink);
}
catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
sink.error(Operators.onOperatorError(ex, wrapped.currentContext()));
}
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.ASYNC;
return SourceProducer.super.scanUnsafe(key);
}
/**
* Serializes calls to onNext, onError and onComplete.
*
* @param <T> the value type
*/
static final
|
CreateMode
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/SystemUtils.java
|
{
"start": 19327,
"end": 19907
}
|
class ____ loaded, the value will be out of sync with that System property.
* </p>
*
* @see SystemProperties#getJavaVersion()
* @since Java 1.1
*/
public static final String JAVA_VERSION = SystemProperties.getJavaVersion();
/**
* A constant for the System Property {@code java.vm.info}. Java Virtual Machine implementation info.
*
* <p>
* Defaults to {@code null} if the runtime does not have security access to read this property or the property does not exist.
* </p>
* <p>
* This value is initialized when the
|
is
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/createTable/MySqlCreateTableTest161_storage_policy.java
|
{
"start": 278,
"end": 3229
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
//for ADB
String sql = "create table event_log(log_id bigint, log_time datetime)\n" +
"distribute by hash(log_id)\n" +
"partition by value(date_format('%Y%m%d')) lifecycle 180\n" +
"storage_policy = 'HOT'";
MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) SQLUtils.parseSingleMysqlStatement(sql);
assertEquals("CREATE TABLE event_log (\n" +
"\tlog_id bigint,\n" +
"\tlog_time datetime\n" +
") STORAGE_POLICY = 'HOT'\n" +
"DISTRIBUTE BY HASH(log_id)\n" +
"PARTITION BY VALUE (date_format('%Y%m%d'))\n" +
"LIFECYCLE 180", stmt.toString());
}
public void test_1() throws Exception {
//for ADB
String sql = "create table event_log(log_id bigint, log_time datetime)\n" +
"distribute by hash(log_id)\n" +
"partition by value(date_format('%Y%m%d')) lifecycle 180\n" +
"storage_policy = 'COLD';";
MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) SQLUtils.parseSingleMysqlStatement(sql);
assertEquals("CREATE TABLE event_log (\n" +
"\tlog_id bigint,\n" +
"\tlog_time datetime\n" +
") STORAGE_POLICY = 'COLD'\n" +
"DISTRIBUTE BY HASH(log_id)\n" +
"PARTITION BY VALUE (date_format('%Y%m%d'))\n" +
"LIFECYCLE 180;", stmt.toString());
}
public void test_2() throws Exception {
//for ADB
String sql = "create table event_log(log_id bigint, log_time datetime)\n" +
"distribute by hash(log_id)\n" +
"partition by value(date_format('%Y%m%d')) lifecycle 180\n" +
"storage_policy = 'MIXED' hot_partition_count = 10;";
MySqlCreateTableStatement stmt = (MySqlCreateTableStatement) SQLUtils.parseSingleMysqlStatement(sql);
assertEquals("CREATE TABLE event_log (\n" +
"\tlog_id bigint,\n" +
"\tlog_time datetime\n" +
") STORAGE_POLICY = 'MIXED' HOT_PARTITION_COUNT = 10\n" +
"DISTRIBUTE BY HASH(log_id)\n" +
"PARTITION BY VALUE (date_format('%Y%m%d'))\n" +
"LIFECYCLE 180;", stmt.toString());
}
public void test_3() throws Exception {
//for ADB
String sql = "create table event_log(log_id bigint, log_time datetime)\n" +
"distribute by hash(log_id)\n" +
"partition by value(date_format('%Y%m%d')) lifecycle 180\n" +
"storage_policy = 'MIXED' hot_partition_count = 0.1;";
try {
SQLUtils.parseSingleMysqlStatement(sql);
fail();
} catch (ParserException e) {
}
}
}
|
MySqlCreateTableTest161_storage_policy
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/web/WebAppConfiguration.java
|
{
"start": 1724,
"end": 1925
}
|
class ____.
*
* <p>This annotation may be used as a <em>meta-annotation</em> to create custom
* <em>composed annotations</em>.
*
* <p>This annotation will be inherited from an enclosing test
|
hierarchy
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/core/JmsClient.java
|
{
"start": 5467,
"end": 6342
}
|
interface ____ {
/**
* Add a {@code MessageConverter} to use for converting payload objects to/from messages.
* Message converters will be considered in order of registration.
* @param messageConverter the message converter for payload objects
* @return this builder
*/
Builder messageConverter(MessageConverter messageConverter);
/**
* Add a {@link MessagePostProcessor} to use for modifying {@code Message} instances before sending.
* Post-processors will be executed in order of registration.
* @param messagePostProcessor the post-processor to use for outgoing messages
* @return this builder
*/
Builder messagePostProcessor(MessagePostProcessor messagePostProcessor);
/**
* Build the {@code JmsClient} instance.
*/
JmsClient build();
}
/**
* Common JMS send and receive operations with various settings.
*/
|
Builder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/cfg/MultiTenancySettings.java
|
{
"start": 2232,
"end": 2823
}
|
class ____ implements {@code TenantSchemaMapper}.
* </ul>
* When a tenant schema mapper is set, {@link java.sql.Connection#setSchema(String)}}
* is called on newly acquired JDBC connections with the schema name returned by
* {@link org.hibernate.context.spi.TenantSchemaMapper#schemaName}.
* <p>
* By default, there is no tenant schema mapper.
*
* @see org.hibernate.context.spi.TenantSchemaMapper
* @see org.hibernate.boot.SessionFactoryBuilder#applyTenantSchemaMapper
*
* @since 7.1
*/
String MULTI_TENANT_SCHEMA_MAPPER = "hibernate.multi_tenant.schema_mapper";
}
|
that
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/ConstructorInjectionTestMethodScopedExtensionContextNestedTests.java
|
{
"start": 3618,
"end": 4108
}
|
class ____ {
final String bar;
final int answer;
SpelConstructorParameterTests(@Autowired String bar, TestInfo testInfo, @Value("#{ 6 * 7 }") int answer) {
this.bar = bar;
this.answer = answer;
}
@Test
void nestedTest() {
assertThat(foo).isEqualTo("bar");
assertThat(bar).isEqualTo("bar");
assertThat(answer).isEqualTo(42);
}
}
// -------------------------------------------------------------------------
@Configuration
static
|
SpelConstructorParameterTests
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/method/configuration/ReactiveMethodSecurityConfigurationTests.java
|
{
"start": 14594,
"end": 14705
}
|
class ____ {
public void bar(String param) {
}
}
@EnableReactiveMethodSecurity
@Configuration
static
|
Foo
|
java
|
google__guice
|
core/test/com/google/inject/errors/ChildBindingAlreadySetErrorTest.java
|
{
"start": 1262,
"end": 1997
}
|
class ____ extends PrivateModule {
@Override
protected void configure() {
bind(Foo.class).to(SubFoo.class);
}
// Expose _something_ so that the child/private injector doesn't immediately get GC'd.
@Provides
@Named("ChildModule")
@Exposed
String provideExposed() {
return "a";
}
}
@Test
public void childBindingAlreadySetError() throws Exception {
Injector injector = Guice.createInjector(new ChildModule());
ConfigurationException exception =
assertThrows(ConfigurationException.class, () -> injector.getInstance(Foo.class));
assertGuiceErrorEqualsIgnoreLineNumber(
exception.getMessage(), "child_binding_already_set_error.txt");
}
static
|
ChildModule
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleUpdateStatement.java
|
{
"start": 1083,
"end": 5328
}
|
class ____ extends SQLUpdateStatement implements OracleStatement, SQLReplaceable {
private List<SQLHint> hints = new ArrayList<SQLHint>(1);
private boolean only;
private String alias;
private final List<SQLExpr> returningInto = new ArrayList<SQLExpr>();
public OracleUpdateStatement() {
super(DbType.oracle);
}
public List<SQLExpr> getReturningInto() {
return returningInto;
}
public void addReturningInto(SQLExpr returningInto) {
if (returningInto == null) {
return;
}
returningInto.setParent(this);
this.returningInto.add(returningInto);
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof OracleASTVisitor) {
accept0((OracleASTVisitor) visitor);
return;
}
super.accept(visitor);
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.hints);
acceptChild(visitor, tableSource);
acceptChild(visitor, items);
acceptChild(visitor, where);
acceptChild(visitor, returning);
acceptChild(visitor, returningInto);
}
visitor.endVisit(this);
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
boolean replace = super.replace(expr, target);
if (replace) {
return true;
}
for (int i = returningInto.size() - 1; i >= 0; i--) {
if (returningInto.get(i) == expr) {
target.setParent(this);
returningInto.set(i, target);
return true;
}
}
return false;
}
public String getAlias() {
return this.alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public boolean isOnly() {
return this.only;
}
public void setOnly(boolean only) {
this.only = only;
}
public int getHintsSize() {
if (hints == null) {
return 0;
}
return hints.size();
}
public List<SQLHint> getHints() {
if (hints == null) {
hints = new ArrayList<SQLHint>(2);
}
return hints;
}
public void setHints(List<SQLHint> hints) {
this.hints = hints;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OracleUpdateStatement that = (OracleUpdateStatement) o;
if (with != null ? !with.equals(that.getWith()) : that.getWith() != null) {
return false;
}
if (!items.equals(that.getItems())) {
return false;
}
if (where != null ? !where.equals(that.getWhere()) : that.getWhere() != null) {
return false;
}
if (from != null ? !from.equals(that.getFrom()) : that.getFrom() != null) {
return false;
}
if (hints != null ? !hints.equals(that.hints) : that.hints != null) {
return false;
}
if (tableSource != null ? !tableSource.equals(that.tableSource) : that.tableSource != null) {
return false;
}
if (returning != null ? !returning.equals(that.returning) : that.returning != null) {
return false;
}
return orderBy != null ? orderBy.equals(that.orderBy) : that.orderBy == null;
}
@Override
public int hashCode() {
int result = with != null ? with.hashCode() : 0;
result = 31 * result + items.hashCode();
result = 31 * result + (where != null ? where.hashCode() : 0);
result = 31 * result + (from != null ? from.hashCode() : 0);
result = 31 * result + (tableSource != null ? tableSource.hashCode() : 0);
result = 31 * result + (returning != null ? returning.hashCode() : 0);
result = 31 * result + (orderBy != null ? orderBy.hashCode() : 0);
result = 31 * result + (hints != null ? hints.hashCode() : 0);
return result;
}
}
|
OracleUpdateStatement
|
java
|
apache__flink
|
flink-rpc/flink-rpc-core/src/main/java/org/apache/flink/runtime/concurrent/ClassLoadingUtils.java
|
{
"start": 2588,
"end": 3130
}
|
class ____
*/
public static <T extends Throwable> void runWithContextClassLoader(
ThrowingRunnable<T> runnable, ClassLoader contextClassLoader) throws T {
try (TemporaryClassLoaderContext ignored =
TemporaryClassLoaderContext.of(contextClassLoader)) {
runnable.run();
}
}
/**
* Runs the given supplier in a {@link TemporaryClassLoaderContext} based on the given
* classloader.
*
* @param supplier supplier to run
* @param contextClassLoader
|
loader
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/IteratorsTest.java
|
{
"start": 3837,
"end": 21537
}
|
class ____ extends TestCase {
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite(IteratorsTest.class.getSimpleName());
suite.addTest(testsForRemoveAllAndRetainAll());
suite.addTestSuite(IteratorsTest.class);
return suite;
}
@SuppressWarnings("DoNotCall")
public void testEmptyIterator() {
Iterator<String> iterator = emptyIterator();
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(UnsupportedOperationException.class, () -> iterator.remove());
}
@SuppressWarnings("DoNotCall")
public void testEmptyListIterator() {
ListIterator<String> iterator = Iterators.emptyListIterator();
assertFalse(iterator.hasNext());
assertFalse(iterator.hasPrevious());
assertEquals(0, iterator.nextIndex());
assertEquals(-1, iterator.previousIndex());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(NoSuchElementException.class, () -> iterator.previous());
assertThrows(UnsupportedOperationException.class, () -> iterator.remove());
assertThrows(UnsupportedOperationException.class, () -> iterator.set("a"));
assertThrows(UnsupportedOperationException.class, () -> iterator.add("a"));
}
public void testEmptyModifiableIterator() {
Iterator<String> iterator = Iterators.emptyModifiableIterator();
assertFalse(iterator.hasNext());
assertThrows(NoSuchElementException.class, () -> iterator.next());
assertThrows(IllegalStateException.class, () -> iterator.remove());
}
public void testSize0() {
Iterator<String> iterator = emptyIterator();
assertEquals(0, Iterators.size(iterator));
}
public void testSize1() {
Iterator<Integer> iterator = singleton(0).iterator();
assertEquals(1, Iterators.size(iterator));
}
public void testSize_partiallyConsumed() {
Iterator<Integer> iterator = asList(1, 2, 3, 4, 5).iterator();
iterator.next();
iterator.next();
assertEquals(3, Iterators.size(iterator));
}
public void test_contains_nonnull_yes() {
Iterator<@Nullable String> set = Arrays.<@Nullable String>asList("a", null, "b").iterator();
assertTrue(Iterators.contains(set, "b"));
}
public void test_contains_nonnull_no() {
Iterator<String> set = asList("a", "b").iterator();
assertFalse(Iterators.contains(set, "c"));
}
public void test_contains_null_yes() {
Iterator<@Nullable String> set = Arrays.<@Nullable String>asList("a", null, "b").iterator();
assertTrue(Iterators.contains(set, null));
}
public void test_contains_null_no() {
Iterator<String> set = asList("a", "b").iterator();
assertFalse(Iterators.contains(set, null));
}
public void testGetOnlyElement_noDefault_valid() {
Iterator<String> iterator = singletonList("foo").iterator();
assertEquals("foo", getOnlyElement(iterator));
}
public void testGetOnlyElement_noDefault_empty() {
Iterator<String> iterator = emptyIterator();
assertThrows(NoSuchElementException.class, () -> getOnlyElement(iterator));
}
public void testGetOnlyElement_noDefault_moreThanOneLessThanFiveElements() {
Iterator<String> iterator = asList("one", "two").iterator();
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> getOnlyElement(iterator));
assertThat(expected).hasMessageThat().isEqualTo("expected one element but was: <one, two>");
}
public void testGetOnlyElement_noDefault_fiveElements() {
Iterator<String> iterator = asList("one", "two", "three", "four", "five").iterator();
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> getOnlyElement(iterator));
assertThat(expected)
.hasMessageThat()
.isEqualTo("expected one element but was: <one, two, three, four, five>");
}
public void testGetOnlyElement_noDefault_moreThanFiveElements() {
Iterator<String> iterator = asList("one", "two", "three", "four", "five", "six").iterator();
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> getOnlyElement(iterator));
assertThat(expected)
.hasMessageThat()
.isEqualTo("expected one element but was: <one, two, three, four, five, ...>");
}
public void testGetOnlyElement_withDefault_singleton() {
Iterator<String> iterator = singletonList("foo").iterator();
assertEquals("foo", getOnlyElement(iterator, "bar"));
}
public void testGetOnlyElement_withDefault_empty() {
Iterator<String> iterator = emptyIterator();
assertEquals("bar", getOnlyElement(iterator, "bar"));
}
public void testGetOnlyElement_withDefault_empty_null() {
Iterator<String> iterator = emptyIterator();
assertThat(Iterators.<@Nullable String>getOnlyElement(iterator, null)).isNull();
}
public void testGetOnlyElement_withDefault_two() {
Iterator<String> iterator = asList("foo", "bar").iterator();
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> getOnlyElement(iterator, "x"));
assertThat(expected).hasMessageThat().isEqualTo("expected one element but was: <foo, bar>");
}
@GwtIncompatible // Iterators.toArray(Iterator, Class)
public void testToArrayEmpty() {
Iterator<String> iterator = Collections.<String>emptyList().iterator();
String[] array = Iterators.toArray(iterator, String.class);
assertThat(array).isEmpty();
}
@GwtIncompatible // Iterators.toArray(Iterator, Class)
public void testToArraySingleton() {
Iterator<String> iterator = singletonList("a").iterator();
String[] array = Iterators.toArray(iterator, String.class);
assertThat(array).isEqualTo(new String[] {"a"});
}
@GwtIncompatible // Iterators.toArray(Iterator, Class)
public void testToArray() {
String[] sourceArray = new String[] {"a", "b", "c"};
Iterator<String> iterator = asList(sourceArray).iterator();
String[] newArray = Iterators.toArray(iterator, String.class);
assertThat(newArray).isEqualTo(sourceArray);
}
public void testFilterSimple() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = filter(unfiltered, equalTo("foo"));
List<String> expected = singletonList("foo");
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterNoMatch() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = filter(unfiltered, Predicates.alwaysFalse());
List<String> expected = emptyList();
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterMatchAll() {
Iterator<String> unfiltered = Lists.newArrayList("foo", "bar").iterator();
Iterator<String> filtered = filter(unfiltered, Predicates.alwaysTrue());
List<String> expected = Lists.newArrayList("foo", "bar");
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
public void testFilterNothing() {
Iterator<String> unfiltered = Collections.<String>emptyList().iterator();
Iterator<String> filtered =
filter(
unfiltered,
new Predicate<String>() {
@Override
public boolean apply(String s) {
throw new AssertionFailedError("Should never be evaluated");
}
});
List<String> expected = emptyList();
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
}
@GwtIncompatible // unreasonably slow
public void testFilterUsingIteratorTester() {
List<Integer> list = asList(1, 2, 3, 4, 5);
Predicate<Integer> isEven =
new Predicate<Integer>() {
@Override
public boolean apply(Integer integer) {
return integer % 2 == 0;
}
};
new IteratorTester<Integer>(
5, UNMODIFIABLE, asList(2, 4), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<Integer> newTargetIterator() {
return filter(list.iterator(), isEven);
}
}.test();
}
public void testAny() {
List<String> list = new ArrayList<>();
Predicate<String> predicate = equalTo("pants");
assertFalse(any(list.iterator(), predicate));
list.add("cool");
assertFalse(any(list.iterator(), predicate));
list.add("pants");
assertTrue(any(list.iterator(), predicate));
}
public void testAll() {
List<String> list = new ArrayList<>();
Predicate<String> predicate = equalTo("cool");
assertTrue(all(list.iterator(), predicate));
list.add("cool");
assertTrue(all(list.iterator(), predicate));
list.add("pants");
assertFalse(all(list.iterator(), predicate));
}
public void testFind_firstElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", find(iterator, equalTo("cool")));
assertEquals("pants", iterator.next());
}
public void testFind_lastElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("pants", find(iterator, equalTo("pants")));
assertFalse(iterator.hasNext());
}
public void testFind_notPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThrows(NoSuchElementException.class, () -> find(iterator, Predicates.alwaysFalse()));
assertFalse(iterator.hasNext());
}
public void testFind_matchAlways() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", find(iterator, Predicates.alwaysTrue()));
}
public void testFind_withDefault_first() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", find(iterator, equalTo("cool"), "woot"));
assertEquals("pants", iterator.next());
}
public void testFind_withDefault_last() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("pants", find(iterator, equalTo("pants"), "woot"));
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_notPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("woot", find(iterator, Predicates.alwaysFalse(), "woot"));
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_notPresent_nullReturn() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThat(find(iterator, Predicates.alwaysFalse(), null)).isNull();
assertFalse(iterator.hasNext());
}
public void testFind_withDefault_matchAlways() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("cool", find(iterator, Predicates.alwaysTrue(), "woot"));
assertEquals("pants", iterator.next());
}
public void testTryFind_firstElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThat(tryFind(iterator, equalTo("cool"))).hasValue("cool");
}
public void testTryFind_lastElement() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThat(tryFind(iterator, equalTo("pants"))).hasValue("pants");
}
public void testTryFind_alwaysTrue() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThat(tryFind(iterator, Predicates.alwaysTrue())).hasValue("cool");
}
public void testTryFind_alwaysFalse_orDefault() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertEquals("woot", tryFind(iterator, Predicates.alwaysFalse()).or("woot"));
assertFalse(iterator.hasNext());
}
public void testTryFind_alwaysFalse_isPresent() {
Iterable<String> list = Lists.newArrayList("cool", "pants");
Iterator<String> iterator = list.iterator();
assertThat(tryFind(iterator, Predicates.alwaysFalse())).isAbsent();
assertFalse(iterator.hasNext());
}
public void testTransform() {
Iterator<String> input = asList("1", "2", "3").iterator();
Iterator<Integer> result =
Iterators.transform(
input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
List<Integer> actual = Lists.newArrayList(result);
List<Integer> expected = asList(1, 2, 3);
assertEquals(expected, actual);
}
public void testTransformRemove() {
List<String> list = Lists.newArrayList("1", "2", "3");
Iterator<String> input = list.iterator();
Iterator<Integer> iterator =
Iterators.transform(
input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
assertEquals(Integer.valueOf(1), iterator.next());
assertEquals(Integer.valueOf(2), iterator.next());
iterator.remove();
assertEquals(asList("1", "3"), list);
}
public void testPoorlyBehavedTransform() {
Iterator<String> input = asList("1", "not a number", "3").iterator();
Iterator<Integer> result =
Iterators.transform(
input,
new Function<String, Integer>() {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
});
result.next();
assertThrows(NumberFormatException.class, () -> result.next());
}
public void testNullFriendlyTransform() {
Iterator<@Nullable Integer> input = Arrays.<@Nullable Integer>asList(1, 2, null, 3).iterator();
Iterator<String> result =
Iterators.transform(
input,
new Function<@Nullable Integer, String>() {
@Override
public String apply(@Nullable Integer from) {
return String.valueOf(from);
}
});
List<String> actual = Lists.newArrayList(result);
List<String> expected = asList("1", "2", "null", "3");
assertEquals(expected, actual);
}
public void testCycleOfEmpty() {
// "<String>" for javac 1.5.
Iterator<String> cycle = Iterators.<String>cycle();
assertFalse(cycle.hasNext());
}
public void testCycleOfOne() {
Iterator<String> cycle = Iterators.cycle("a");
for (int i = 0; i < 3; i++) {
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
}
}
public void testCycleOfOneWithRemove() {
Iterable<String> iterable = Lists.newArrayList("a");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
cycle.remove();
assertEquals(emptyList(), iterable);
assertFalse(cycle.hasNext());
}
public void testCycleOfTwo() {
Iterator<String> cycle = Iterators.cycle("a", "b");
for (int i = 0; i < 3; i++) {
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
}
}
public void testCycleOfTwoWithRemove() {
Iterable<String> iterable = Lists.newArrayList("a", "b");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
cycle.remove();
assertEquals(singletonList("b"), iterable);
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
assertTrue(cycle.hasNext());
assertEquals("b", cycle.next());
cycle.remove();
assertEquals(emptyList(), iterable);
assertFalse(cycle.hasNext());
}
public void testCycleRemoveWithoutNext() {
Iterator<String> cycle = Iterators.cycle("a", "b");
assertTrue(cycle.hasNext());
assertThrows(IllegalStateException.class, () -> cycle.remove());
}
public void testCycleRemoveSameElementTwice() {
Iterator<String> cycle = Iterators.cycle("a", "b");
cycle.next();
cycle.remove();
assertThrows(IllegalStateException.class, () -> cycle.remove());
}
public void testCycleWhenRemoveIsNotSupported() {
Iterable<String> iterable = asList("a", "b");
Iterator<String> cycle = Iterators.cycle(iterable);
cycle.next();
assertThrows(UnsupportedOperationException.class, () -> cycle.remove());
}
public void testCycleRemoveAfterHasNext() {
Iterable<String> iterable = Lists.newArrayList("a");
Iterator<String> cycle = Iterators.cycle(iterable);
assertTrue(cycle.hasNext());
assertEquals("a", cycle.next());
assertTrue(cycle.hasNext());
cycle.remove();
assertEquals(emptyList(), iterable);
assertFalse(cycle.hasNext());
}
/** An Iterable whose Iterator is rigorous in checking for concurrent modification. */
private static final
|
IteratorsTest
|
java
|
spring-projects__spring-framework
|
spring-tx/src/test/java/org/springframework/transaction/config/SynchTransactionManager.java
|
{
"start": 912,
"end": 986
}
|
class ____ extends CallCountingTransactionManager {
}
|
SynchTransactionManager
|
java
|
elastic__elasticsearch
|
modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
|
{
"start": 4308,
"end": 57340
}
|
class ____ extends DateHistogramAggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
private static final String NUMERIC_FIELD = "numeric";
private static final String IP_FIELD = "ip";
private static final List<ZonedDateTime> DATES_WITH_TIME = Arrays.asList(
ZonedDateTime.of(2010, 3, 12, 1, 7, 45, 0, ZoneOffset.UTC),
ZonedDateTime.of(2010, 4, 27, 3, 43, 34, 0, ZoneOffset.UTC),
ZonedDateTime.of(2012, 5, 18, 4, 11, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 5, 29, 5, 11, 31, 0, ZoneOffset.UTC),
ZonedDateTime.of(2013, 10, 31, 8, 24, 5, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 2, 13, 13, 9, 32, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 6, 24, 13, 47, 43, 0, ZoneOffset.UTC),
ZonedDateTime.of(2015, 11, 13, 16, 14, 34, 0, ZoneOffset.UTC),
ZonedDateTime.of(2016, 3, 4, 17, 9, 50, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 12, 12, 22, 55, 46, 0, ZoneOffset.UTC)
);
private static final Query DEFAULT_QUERY = new MatchAllDocsQuery();
// TODO: remove when moving DateHistogramAggregatorTestCase to aggregations module
@Override
protected List<SearchPlugin> getSearchPlugins() {
return List.of(new AggregationsPlugin());
}
public void testMatchNoDocs() throws IOException {
testSearchCase(
new MatchNoDocsQuery(),
DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
histogram -> {
assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram));
}
);
}
public void testMatchAllDocs() throws IOException {
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2010-01-01T00:00:00.000Z", 2);
expectedDocCount.put("2012-01-01T00:00:00.000Z", 1);
expectedDocCount.put("2013-01-01T00:00:00.000Z", 2);
expectedDocCount.put("2015-01-01T00:00:00.000Z", 3);
expectedDocCount.put("2016-01-01T00:00:00.000Z", 1);
expectedDocCount.put("2017-01-01T00:00:00.000Z", 1);
expectedDocCount.put("2011-01-01T00:00:00.000Z", 0);
expectedDocCount.put("2014-01-01T00:00:00.000Z", 0);
testSearchCase(
DEFAULT_QUERY,
DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testSubAggregations() throws IOException {
testSearchCase(
DEFAULT_QUERY,
DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(8)
.field(DATE_FIELD)
.subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)),
histogram -> {
assertTrue(AggregationInspectionHelper.hasValue(histogram));
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(8, buckets.size());
Histogram.Bucket bucket = buckets.get(0);
assertEquals("2010-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(2, bucket.getDocCount());
InternalStats stats = bucket.getAggregations().get("stats");
assertEquals("2010-03-12T01:07:45.000Z", stats.getMinAsString());
assertEquals("2010-04-27T03:43:34.000Z", stats.getMaxAsString());
assertEquals(2L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(1);
assertEquals("2011-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(0, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertTrue(Double.isInfinite(stats.getMin()));
assertTrue(Double.isInfinite(stats.getMax()));
assertEquals(0L, stats.getCount());
assertFalse(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(2);
assertEquals("2012-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(1, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertEquals("2012-05-18T04:11:00.000Z", stats.getMinAsString());
assertEquals("2012-05-18T04:11:00.000Z", stats.getMaxAsString());
assertEquals(1L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(3);
assertEquals("2013-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(2, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertEquals("2013-05-29T05:11:31.000Z", stats.getMinAsString());
assertEquals("2013-10-31T08:24:05.000Z", stats.getMaxAsString());
assertEquals(2L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(4);
assertEquals("2014-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(0, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertTrue(Double.isInfinite(stats.getMin()));
assertTrue(Double.isInfinite(stats.getMax()));
assertEquals(0L, stats.getCount());
assertFalse(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(5);
assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(3, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertEquals("2015-02-13T13:09:32.000Z", stats.getMinAsString());
assertEquals("2015-11-13T16:14:34.000Z", stats.getMaxAsString());
assertEquals(3L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(6);
assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(1, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertEquals("2016-03-04T17:09:50.000Z", stats.getMinAsString());
assertEquals("2016-03-04T17:09:50.000Z", stats.getMaxAsString());
assertEquals(1L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
bucket = buckets.get(7);
assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(1, bucket.getDocCount());
stats = bucket.getAggregations().get("stats");
assertEquals("2017-12-12T22:55:46.000Z", stats.getMinAsString());
assertEquals("2017-12-12T22:55:46.000Z", stats.getMaxAsString());
assertEquals(1L, stats.getCount());
assertTrue(AggregationInspectionHelper.hasValue(stats));
}
);
}
public void testAsSubAgg() throws IOException {
AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1")
.subAggregation(
new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE)
.setNumBuckets(3)
.subAggregation(new MaxAggregationBuilder("max").field("n"))
);
asSubAggTestCase(builder, (StringTerms terms) -> {
StringTerms.Bucket a = terms.getBucketByKey("a");
InternalAutoDateHistogram adh = a.getAggregations().get("dh");
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2);
expectedDocCount.put("2021-01-01T00:00:00.000Z", 2);
assertThat(bucketCountsAsMap(adh), equalTo(expectedDocCount));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", 2.0);
expectedMax.put("2021-01-01T00:00:00.000Z", 4.0);
assertThat(maxAsMap(adh), equalTo(expectedMax));
StringTerms.Bucket b = terms.getBucketByKey("b");
InternalAutoDateHistogram bdh = b.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-02-01T00:00:00.000Z", 1);
assertThat(bucketCountsAsMap(bdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-02-01T00:00:00.000Z", 5.0);
assertThat(maxAsMap(bdh), equalTo(expectedMax));
});
builder = new TermsAggregationBuilder("k2").field("k2").subAggregation(builder);
asSubAggTestCase(builder, (StringTerms terms) -> {
StringTerms.Bucket a = terms.getBucketByKey("a");
StringTerms ak1 = a.getAggregations().get("k1");
StringTerms.Bucket ak1a = ak1.getBucketByKey("a");
InternalAutoDateHistogram ak1adh = ak1a.getAggregations().get("dh");
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2);
expectedDocCount.put("2021-01-01T00:00:00.000Z", 1);
assertThat(bucketCountsAsMap(ak1adh), equalTo(expectedDocCount));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", 2.0);
expectedMax.put("2021-01-01T00:00:00.000Z", 3.0);
assertThat(maxAsMap(ak1adh), equalTo(expectedMax));
StringTerms.Bucket b = terms.getBucketByKey("b");
StringTerms bk1 = b.getAggregations().get("k1");
StringTerms.Bucket bk1a = bk1.getBucketByKey("a");
InternalAutoDateHistogram bk1adh = bk1a.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2021-03-01T00:00:00.000Z", 1);
assertThat(bucketCountsAsMap(bk1adh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2021-03-01T00:00:00.000Z", 4.0);
assertThat(maxAsMap(bk1adh), equalTo(expectedMax));
StringTerms.Bucket bk1b = bk1.getBucketByKey("b");
InternalAutoDateHistogram bk1bdh = bk1b.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-02-01T00:00:00.000Z", 1);
assertThat(bucketCountsAsMap(bk1bdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-02-01T00:00:00.000Z", 5.0);
assertThat(maxAsMap(bk1bdh), equalTo(expectedMax));
});
}
public void testAsSubAggWithIncreasedRounding() throws IOException {
CheckedBiConsumer<RandomIndexWriter, DateFieldMapper.DateFieldType, IOException> buildIndex = (iw, dft) -> {
long start = dft.parse("2020-01-01T00:00:00Z");
long end = dft.parse("2021-01-01T00:00:00Z");
long useC = dft.parse("2020-07-01T00:00Z");
long anHour = dft.resolution().convert(Instant.ofEpochSecond(TimeUnit.HOURS.toSeconds(1)));
List<List<IndexableField>> docs = new ArrayList<>();
BytesRef aBytes = new BytesRef("a");
BytesRef bBytes = new BytesRef("b");
BytesRef cBytes = new BytesRef("c");
int n = 0;
for (long d = start; d < end; d += anHour) {
docs.add(
List.of(
new SortedNumericDocValuesField(AGGREGABLE_DATE, d),
new Field("k1", aBytes, KeywordFieldMapper.Defaults.FIELD_TYPE),
new Field("k1", d < useC ? bBytes : cBytes, KeywordFieldMapper.Defaults.FIELD_TYPE),
new SortedNumericDocValuesField("n", n++)
)
);
}
/*
* Intentionally add all documents at once to put them on the
* same shard to make the reduce behavior consistent.
*/
iw.addDocuments(docs);
};
AggregationBuilder builder = new TermsAggregationBuilder("k1").field("k1")
.subAggregation(
new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE)
.setNumBuckets(4)
.subAggregation(new MaxAggregationBuilder("max").field("n"))
);
asSubAggTestCase(builder, buildIndex, (StringTerms terms) -> {
StringTerms.Bucket a = terms.getBucketByKey("a");
InternalAutoDateHistogram adh = a.getAggregations().get("dh");
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-04-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-07-01T00:00:00.000Z", 2208);
expectedDocCount.put("2020-10-01T00:00:00.000Z", 2208);
assertThat(bucketCountsAsMap(adh), equalTo(expectedDocCount));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", 2183.0);
expectedMax.put("2020-04-01T00:00:00.000Z", 4367.0);
expectedMax.put("2020-07-01T00:00:00.000Z", 6575.0);
expectedMax.put("2020-10-01T00:00:00.000Z", 8783.0);
assertThat(maxAsMap(adh), equalTo(expectedMax));
StringTerms.Bucket b = terms.getBucketByKey("b");
InternalAutoDateHistogram bdh = b.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-01-01T00:00:00.000Z", 2184);
expectedDocCount.put("2020-04-01T00:00:00.000Z", 2184);
assertThat(bucketCountsAsMap(bdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-01-01T00:00:00.000Z", 2183.0);
expectedMax.put("2020-04-01T00:00:00.000Z", 4367.0);
assertThat(maxAsMap(bdh), equalTo(expectedMax));
StringTerms.Bucket c = terms.getBucketByKey("c");
InternalAutoDateHistogram cdh = c.getAggregations().get("dh");
expectedDocCount.clear();
expectedDocCount.put("2020-07-01T00:00:00.000Z", 2208);
expectedDocCount.put("2020-10-01T00:00:00.000Z", 2208);
assertThat(bucketCountsAsMap(cdh), equalTo(expectedDocCount));
expectedMax.clear();
expectedMax.put("2020-07-01T00:00:00.000Z", 6575.0);
expectedMax.put("2020-10-01T00:00:00.000Z", 8783.0);
assertThat(maxAsMap(cdh), equalTo(expectedMax));
});
}
public void testAsSubAggInManyBuckets() throws IOException {
CheckedBiConsumer<RandomIndexWriter, DateFieldMapper.DateFieldType, IOException> buildIndex = (iw, dft) -> {
long start = dft.parse("2020-01-01T00:00:00Z");
long end = dft.parse("2021-01-01T00:00:00Z");
long anHour = dft.resolution().convert(Instant.ofEpochSecond(TimeUnit.HOURS.toSeconds(1)));
List<List<IndexableField>> docs = new ArrayList<>();
int n = 0;
for (long d = start; d < end; d += anHour) {
docs.add(List.of(new SortedNumericDocValuesField(AGGREGABLE_DATE, d), new SortedNumericDocValuesField("n", n % 100)));
n++;
}
/*
* Intentionally add all documents at once to put them on the
* same shard to make the reduce behavior consistent.
*/
iw.addDocuments(docs);
};
AggregationBuilder builder = new HistogramAggregationBuilder("n").field("n")
.interval(1)
.subAggregation(
new AutoDateHistogramAggregationBuilder("dh").field(AGGREGABLE_DATE)
.setNumBuckets(4)
.subAggregation(new MaxAggregationBuilder("max").field("n"))
);
asSubAggTestCase(builder, buildIndex, (InternalHistogram histo) -> {
assertThat(histo.getBuckets(), hasSize(100));
for (int n = 0; n < 100; n++) {
InternalHistogram.Bucket b = histo.getBuckets().get(n);
InternalAutoDateHistogram dh = b.getAggregations().get("dh");
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-01-01T00:00:00.000Z"), either(equalTo(21)).or(equalTo(22))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-04-01T00:00:00.000Z"), either(equalTo(21)).or(equalTo(22))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-07-01T00:00:00.000Z"), either(equalTo(22)).or(equalTo(23))));
assertThat(bucketCountsAsMap(dh), hasEntry(equalTo("2020-10-01T00:00:00.000Z"), either(equalTo(22)).or(equalTo(23))));
Map<String, Double> expectedMax = new TreeMap<>();
expectedMax.put("2020-01-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-04-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-07-01T00:00:00.000Z", (double) n);
expectedMax.put("2020-10-01T00:00:00.000Z", (double) n);
assertThat(maxAsMap(dh), equalTo(expectedMax));
}
});
}
public void testNoDocs() throws IOException {
final List<ZonedDateTime> dates = Collections.emptyList();
final Consumer<AutoDateHistogramAggregationBuilder> aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD);
testSearchCase(DEFAULT_QUERY, dates, aggregation, histogram -> {
assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram));
});
testSearchCase(DEFAULT_QUERY, dates, aggregation, histogram -> {
assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram));
});
}
public void testAggregateWrongField() throws IOException {
AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name").setNumBuckets(10)
.field("bogus_bogus");
final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field");
testCase(iw -> {}, (Consumer<InternalAutoDateHistogram>) histogram -> {
assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram));
}, new AggTestConfig(aggregation, fieldType).withQuery(DEFAULT_QUERY));
}
public void testBooleanFieldDeprecated() throws IOException {
final String fieldName = "bogusBoolean";
testCase(iw -> {
Document d = new Document();
d.add(new SortedNumericDocValuesField(fieldName, 0));
iw.addDocument(d);
},
a -> {},
new AggTestConfig(
new AutoDateHistogramAggregationBuilder("name").field(fieldName),
new BooleanFieldMapper.BooleanFieldType(fieldName)
)
);
assertWarnings("Running AutoIntervalDateHistogram aggregations on [boolean] fields is deprecated");
}
public void testUnmappedMissing() throws IOException {
AutoDateHistogramAggregationBuilder aggregation = new AutoDateHistogramAggregationBuilder("_name").setNumBuckets(10)
.field("bogus_bogus")
.missing("2017-12-12");
final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType("date_field");
testCase(iw -> {}, (Consumer<InternalAutoDateHistogram>) histogram -> {
assertEquals(0, histogram.getBuckets().size());
assertFalse(AggregationInspectionHelper.hasValue(histogram));
}, new AggTestConfig(aggregation, fieldType).withQuery(DEFAULT_QUERY));
}
public void testIntervalYear() throws IOException {
final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end);
testSearchCase(rangeQuery, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> {
final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
final Map<ZonedDateTime, Integer> expectedDocCount = new HashMap<>();
expectedDocCount.put(startDate, 3);
expectedDocCount.put(startDate.plusYears(1), 1);
expectedDocCount.put(startDate.plusYears(2), 1);
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(expectedDocCount.size(), buckets.size());
buckets.forEach(bucket -> assertEquals(expectedDocCount.getOrDefault(bucket.getKey(), 0).longValue(), bucket.getDocCount()));
assertTrue(AggregationInspectionHelper.hasValue(histogram));
});
}
public void testIntervalMonth() throws IOException {
final List<ZonedDateTime> datesForMonthInterval = Arrays.asList(
ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 4, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 5, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 3, 6, 0, 0, 0, 0, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-01-01T00:00:00.000Z", 1);
expectedDocCount.put("2017-02-01T00:00:00.000Z", 2);
expectedDocCount.put("2017-03-01T00:00:00.000Z", 3);
testSearchCase(
DEFAULT_QUERY,
datesForMonthInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testWithLargeNumberOfBuckets() {
final IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> testSearchCase(
DEFAULT_QUERY,
DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS + 1).field(DATE_FIELD),
// since an exception is thrown, this assertion won't be invoked.
histogram -> fail()
)
);
assertThat(exception.getMessage(), Matchers.containsString("must be less than"));
}
public void testIntervalDay() throws IOException {
final List<ZonedDateTime> datesForDayInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-02-01T00:00:00.000Z", 1);
expectedDocCount.put("2017-02-02T00:00:00.000Z", 2);
expectedDocCount.put("2017-02-03T00:00:00.000Z", 3);
expectedDocCount.put("2017-02-05T00:00:00.000Z", 1);
expectedDocCount.put("2017-02-04T00:00:00.000Z", 0);
testSearchCase(
DEFAULT_QUERY,
datesForDayInterval,
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testIntervalDayWithTZ() throws IOException {
final List<ZonedDateTime> datesForDayInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 2, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 3, 0, 0, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 5, 0, 0, 0, 0, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-01-31T00:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2);
expectedDocCount.put("2017-02-02T00:00:00.000-01:00", 3);
expectedDocCount.put("2017-02-04T00:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-03T00:00:00.000-01:00", 0);
testSearchCase(
DEFAULT_QUERY,
datesForDayInterval,
aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testIntervalHour() throws IOException {
final List<ZonedDateTime> datesForHourInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-02-01T09:00:00.000Z", 2);
expectedDocCount.put("2017-02-01T10:00:00.000Z", 1);
expectedDocCount.put("2017-02-01T13:00:00.000Z", 1);
expectedDocCount.put("2017-02-01T14:00:00.000Z", 2);
expectedDocCount.put("2017-02-01T15:00:00.000Z", 1);
expectedDocCount.put("2017-02-01T15:00:00.000Z", 1);
expectedDocCount.put("2017-02-01T16:00:00.000Z", 3);
expectedDocCount.put("2017-02-01T11:00:00.000Z", 0);
expectedDocCount.put("2017-02-01T12:00:00.000Z", 0);
testSearchCase(
DEFAULT_QUERY,
datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
expectedDocCount.clear();
expectedDocCount.put("2017-02-01T09:00:00.000Z", 3);
expectedDocCount.put("2017-02-01T12:00:00.000Z", 3);
expectedDocCount.put("2017-02-01T15:00:00.000Z", 4);
testSearchCase(
DEFAULT_QUERY,
datesForHourInterval,
aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testIntervalHourWithTZ() throws IOException {
List<ZonedDateTime> datesForHourInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 35, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 10, 15, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 13, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 4, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 14, 5, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 15, 59, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 6, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 48, 0, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 16, 59, 0, 0, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-02-01T08:00:00.000-01:00", 2);
expectedDocCount.put("2017-02-01T09:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T12:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T13:00:00.000-01:00", 2);
expectedDocCount.put("2017-02-01T14:00:00.000-01:00", 1);
expectedDocCount.put("2017-02-01T15:00:00.000-01:00", 3);
expectedDocCount.put("2017-02-01T10:00:00.000-01:00", 0);
expectedDocCount.put("2017-02-01T11:00:00.000-01:00", 0);
testSearchCase(
DEFAULT_QUERY,
datesForHourInterval,
aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testRandomSecondIntervals() throws IOException {
final int length = 120;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusSeconds(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
bucketsToExpectedDocCountMap.put(120, 1);
bucketsToExpectedDocCountMap.put(60, 5);
bucketsToExpectedDocCountMap.put(20, 10);
bucketsToExpectedDocCountMap.put(10, 30);
bucketsToExpectedDocCountMap.put(3, 60);
final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
testSearchCase(
DEFAULT_QUERY,
dataset,
aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = randomEntry.getValue();
final int expectedSize = length / expectedDocCount;
assertEquals(expectedSize, buckets.size());
final int randomIndex = randomInt(expectedSize - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusSeconds(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
}
);
}
public void testRandomMinuteIntervals() throws IOException {
final int length = 120;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusMinutes(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
bucketsToExpectedDocCountMap.put(120, 1);
bucketsToExpectedDocCountMap.put(60, 5);
bucketsToExpectedDocCountMap.put(20, 10);
bucketsToExpectedDocCountMap.put(10, 30);
bucketsToExpectedDocCountMap.put(3, 60);
final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
testSearchCase(
DEFAULT_QUERY,
dataset,
aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = randomEntry.getValue();
final int expectedSize = length / expectedDocCount;
assertEquals(expectedSize, buckets.size());
final int randomIndex = randomInt(expectedSize - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMinutes(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
}
);
}
public void testRandomHourIntervals() throws IOException {
final int length = 72;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusHours(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
bucketsToExpectedDocCountMap.put(72, 1);
bucketsToExpectedDocCountMap.put(36, 3);
bucketsToExpectedDocCountMap.put(12, 12);
bucketsToExpectedDocCountMap.put(3, 24);
final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
testSearchCase(
DEFAULT_QUERY,
dataset,
aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = randomEntry.getValue();
final int expectedSize = length / expectedDocCount;
assertEquals(expectedSize, buckets.size());
final int randomIndex = randomInt(expectedSize - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusHours(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
}
);
}
public void testRandomDayIntervals() throws IOException {
final int length = 140;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusDays(i);
dataset.add(date);
}
final int randomChoice = randomIntBetween(1, 3);
if (randomChoice == 1) {
testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(length).field(DATE_FIELD), histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(length, buckets.size());
final int randomIndex = randomInt(length - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusDays(randomIndex), bucket.getKey());
assertEquals(1, bucket.getDocCount());
});
} else if (randomChoice == 2) {
testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = 7;
assertEquals(20, buckets.size());
final int randomIndex = randomInt(19);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusDays(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
});
} else if (randomChoice == 3) {
testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(5, buckets.size());
final int randomIndex = randomInt(2);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMonths(randomIndex), bucket.getKey());
assertEquals(YearMonth.from(startDate.plusMonths(randomIndex)).lengthOfMonth(), bucket.getDocCount());
});
}
}
public void testRandomMonthIntervals() throws IOException {
final int length = 60;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusMonths(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
bucketsToExpectedDocCountMap.put(60, 1);
bucketsToExpectedDocCountMap.put(30, 3);
bucketsToExpectedDocCountMap.put(6, 12);
final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
testSearchCase(
DEFAULT_QUERY,
dataset,
aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = randomEntry.getValue();
final int expectedSize = length / expectedDocCount;
assertEquals(expectedSize, buckets.size());
final int randomIndex = randomInt(expectedSize - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusMonths(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
}
);
}
public void testRandomYearIntervals() throws IOException {
final int length = 300;
final List<ZonedDateTime> dataset = new ArrayList<>(length);
final ZonedDateTime startDate = ZonedDateTime.of(2017, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
for (int i = 0; i < length; i++) {
final ZonedDateTime date = startDate.plusYears(i);
dataset.add(date);
}
final Map<Integer, Integer> bucketsToExpectedDocCountMap = new HashMap<>();
bucketsToExpectedDocCountMap.put(300, 1);
bucketsToExpectedDocCountMap.put(150, 5);
bucketsToExpectedDocCountMap.put(50, 10);
bucketsToExpectedDocCountMap.put(25, 20);
bucketsToExpectedDocCountMap.put(10, 50);
bucketsToExpectedDocCountMap.put(5, 100);
final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
testSearchCase(
DEFAULT_QUERY,
dataset,
aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
histogram -> {
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
final int expectedDocCount = randomEntry.getValue();
final int expectedSize = length / expectedDocCount;
assertEquals(expectedSize, buckets.size());
final int randomIndex = randomInt(expectedSize - 1);
final Histogram.Bucket bucket = buckets.get(randomIndex);
assertEquals(startDate.plusYears(randomIndex * expectedDocCount), bucket.getKey());
assertEquals(expectedDocCount, bucket.getDocCount());
}
);
}
public void testIntervalMinute() throws IOException {
final List<ZonedDateTime> datesForMinuteInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 9, 2, 35, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 2, 59, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 15, 37, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 16, 4, 0, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 9, 16, 42, 0, ZoneOffset.UTC)
);
Map<String, Integer> skeletonDocCount = new TreeMap<>();
skeletonDocCount.put("2017-02-01T09:02:00.000Z", 2);
skeletonDocCount.put("2017-02-01T09:15:00.000Z", 1);
skeletonDocCount.put("2017-02-01T09:16:00.000Z", 2);
Map<String, Integer> fullDocCount = new TreeMap<>();
fullDocCount.put("2017-02-01T09:02:00.000Z", 2);
fullDocCount.put("2017-02-01T09:07:00.000Z", 0);
fullDocCount.put("2017-02-01T09:12:00.000Z", 3);
testSearchCase(
DEFAULT_QUERY,
datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount))
);
fullDocCount.clear();
fullDocCount.putAll(skeletonDocCount);
for (int minute = 3; minute < 15; minute++) {
fullDocCount.put(Strings.format("2017-02-01T09:%02d:00.000Z", minute), 0);
}
testSearchCase(
DEFAULT_QUERY,
datesForMinuteInterval,
aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount))
);
}
public void testIntervalSecond() throws IOException {
final List<ZonedDateTime> datesForSecondInterval = Arrays.asList(
ZonedDateTime.of(2017, 2, 1, 0, 0, 5, 15, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 299, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 7, 74, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 688, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 210, ZoneOffset.UTC),
ZonedDateTime.of(2017, 2, 1, 0, 0, 11, 380, ZoneOffset.UTC)
);
Map<String, Integer> expectedDocCount = new TreeMap<>();
expectedDocCount.put("2017-02-01T00:00:05.000Z", 1);
expectedDocCount.put("2017-02-01T00:00:07.000Z", 2);
expectedDocCount.put("2017-02-01T00:00:11.000Z", 3);
expectedDocCount.put("2017-02-01T00:00:06.000Z", 0);
expectedDocCount.put("2017-02-01T00:00:08.000Z", 0);
expectedDocCount.put("2017-02-01T00:00:09.000Z", 0);
expectedDocCount.put("2017-02-01T00:00:10.000Z", 0);
testSearchCase(
DEFAULT_QUERY,
datesForSecondInterval,
aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD),
result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
);
}
public void testWithPipelineReductions() throws IOException {
testSearchCase(
DEFAULT_QUERY,
DATES_WITH_TIME,
aggregation -> aggregation.setNumBuckets(1)
.field(DATE_FIELD)
.subAggregation(
AggregationBuilders.histogram("histo")
.field(NUMERIC_FIELD)
.interval(1)
.subAggregation(AggregationBuilders.max("max").field(NUMERIC_FIELD))
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "max"))
),
histogram -> {
assertTrue(AggregationInspectionHelper.hasValue(histogram));
final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
assertEquals(1, buckets.size());
Histogram.Bucket bucket = buckets.get(0);
assertEquals("2010-01-01T00:00:00.000Z", bucket.getKeyAsString());
assertEquals(10, bucket.getDocCount());
assertThat(bucket.getAggregations().asList().size(), equalTo(1));
InternalHistogram histo = (InternalHistogram) bucket.getAggregations().asList().get(0);
assertThat(histo.getBuckets().size(), equalTo(10));
for (int i = 0; i < 10; i++) {
assertThat(histo.getBuckets().get(i).getKey(), equalTo((double) i));
assertThat(((Max) histo.getBuckets().get(i).getAggregations().get("max")).value(), equalTo((double) i));
if (i > 0) {
assertThat(
((InternalSimpleValue) histo.getBuckets().get(i).getAggregations().get("deriv")).getValue(),
equalTo(1.0)
);
}
}
}
);
}
public void testSubNumericRange() throws IOException {
assertSubNumericRange(DATES_WITH_TIME, 2);
}
/**
* Tests very few documents with a sub {@code range} agg which causes
* us to collect in a very tight time range and then merge many of those
* ranges together, thus merging unmapped {@code range} aggs with mapped
* ones.
*/
public void testSmallSubNumericRange() throws IOException {
assertSubNumericRange(DATES_WITH_TIME.subList(0, 2), 1);
}
private void assertSubNumericRange(List<ZonedDateTime> dates, long firstBucketIpCount) throws IOException {
MappedFieldType dateFieldType = new DateFieldMapper.DateFieldType(DATE_FIELD);
MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG);
AutoDateHistogramAggregationBuilder b = new AutoDateHistogramAggregationBuilder("a").field(DATE_FIELD)
.subAggregation(new RangeAggregationBuilder("r").field(NUMERIC_FIELD).addRange(0, 2).addRange(3, 4));
testCase(iw -> indexSampleData(dates, iw), (InternalAutoDateHistogram h) -> {
InternalAutoDateHistogram.Bucket bucket = h.getBuckets().get(0);
InternalRange<?, ?> range = bucket.getAggregations().get("r");
assertMap(
range.getBuckets().stream().map(InternalRange.Bucket::getKeyAsString).toList(),
matchesList().item("0.0-2.0").item("3.0-4.0")
);
assertMap(
range.getBuckets().stream().map(InternalRange.Bucket::getDocCount).toList(),
matchesList().item(firstBucketIpCount).item(0L)
);
}, new AggTestConfig(b, dateFieldType, numericFieldType));
}
public void testSubIpRange() throws IOException {
assertSubIpRange(DATES_WITH_TIME, 2);
}
/**
* Tests very few documents with a sub {@code ip_range} agg which causes
* us to collect in a very tight time range and then merge many of those
* ranges together, thus merging unmapped {@code ip_range} aggs with mapped
* ones.
*/
public void testSmallSubIpRange() throws IOException {
assertSubIpRange(DATES_WITH_TIME.subList(0, 2), 1);
}
private void assertSubIpRange(List<ZonedDateTime> dates, long firstBucketIpCount) throws IOException {
MappedFieldType dateFieldType = new DateFieldMapper.DateFieldType(DATE_FIELD);
MappedFieldType ipFieldType = new IpFieldMapper.IpFieldType(IP_FIELD);
AutoDateHistogramAggregationBuilder b = new AutoDateHistogramAggregationBuilder("a").field(DATE_FIELD)
.subAggregation(
new IpRangeAggregationBuilder("r").field(IP_FIELD)
.addRange("192.168.0.0", "192.168.0.2")
.addRange("192.168.0.3", "192.168.0.4")
);
testCase(iw -> indexSampleData(dates, iw), (InternalAutoDateHistogram h) -> {
InternalAutoDateHistogram.Bucket bucket = h.getBuckets().get(0);
InternalBinaryRange range = bucket.getAggregations().get("r");
assertMap(
range.getBuckets().stream().map(InternalBinaryRange.Bucket::getKeyAsString).toList(),
matchesList().item("192.168.0.0-192.168.0.2").item("192.168.0.3-192.168.0.4")
);
assertMap(
range.getBuckets().stream().map(InternalBinaryRange.Bucket::getDocCount).toList(),
matchesList().item(firstBucketIpCount).item(0L)
);
}, new AggTestConfig(b, dateFieldType, ipFieldType));
}
@Override
protected IndexSettings createIndexSettings() {
final Settings nodeSettings = Settings.builder().put("search.max_buckets", 25000).build();
return new IndexSettings(
IndexMetadata.builder("_index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(0)
.creationDate(System.currentTimeMillis())
.build(),
nodeSettings
);
}
private void testSearchCase(
final Query query,
final List<ZonedDateTime> dataset,
final Consumer<AutoDateHistogramAggregationBuilder> configure,
final Consumer<InternalAutoDateHistogram> verify
) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
indexSampleData(dataset, indexWriter);
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
final AutoDateHistogramAggregationBuilder aggregationBuilder = new AutoDateHistogramAggregationBuilder("_name");
if (configure != null) {
configure.accept(aggregationBuilder);
}
final DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggregationBuilder.field());
MappedFieldType instantFieldType = new NumberFieldMapper.NumberFieldType(INSTANT_FIELD, NumberFieldMapper.NumberType.LONG);
MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG);
final InternalAutoDateHistogram histogram = searchAndReduce(
indexReader,
new AggTestConfig(aggregationBuilder, fieldType, instantFieldType, numericFieldType).withQuery(query)
);
verify.accept(histogram);
}
}
}
private void indexSampleData(List<ZonedDateTime> dataset, RandomIndexWriter indexWriter) throws IOException {
final Document document = new Document();
int i = 0;
for (final ZonedDateTime date : dataset) {
final long instant = date.toInstant().toEpochMilli();
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
document.add(new LongPoint(INSTANT_FIELD, instant));
document.add(new SortedNumericDocValuesField(NUMERIC_FIELD, i));
document.add(
new SortedSetDocValuesField(
IP_FIELD,
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0." + (i % 256))))
)
);
indexWriter.addDocument(document);
document.clear();
i += 1;
}
}
private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result) {
Map<String, Integer> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().forEach(b -> {
Object old = map.put(b.getKeyAsString(), Math.toIntExact(b.getDocCount()));
assertNull(old);
});
return map;
}
private Map<String, Double> maxAsMap(InternalAutoDateHistogram result) {
Map<String, Double> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().forEach(b -> {
Max max = b.getAggregations().get("max");
Object old = map.put(b.getKeyAsString(), max.value());
assertNull(old);
});
return map;
}
@Override
public void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) {
/*
* No-op.
*/
}
}
|
AutoDateHistogramAggregatorTests
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/Lifecycle.java
|
{
"start": 2309,
"end": 3944
}
|
interface ____ {
/**
* Start this component.
* <p>Should not throw an exception if the component is already running.
* <p>In the case of a container, this will propagate a hard start signal to all
* components that apply, even to non-auto-startup components.
* @see SmartLifecycle#isAutoStartup()
*/
void start();
/**
* Stop this component, typically in a synchronous fashion, such that the component is
* fully stopped upon return of this method. Consider implementing {@link SmartLifecycle}
* and its {@code stop(Runnable)} variant when asynchronous stop behavior is necessary.
* <p>Note that this stop notification is not guaranteed to come before destruction:
* On regular shutdown, {@code Lifecycle} beans will first receive a stop notification
* before the general destruction callbacks are being propagated; however, on hot
* refresh during a context's lifetime or on aborted refresh attempts, a given bean's
* destroy method will be called without any consideration of stop signals upfront.
* <p>Should not throw an exception if the component is not running (not started yet).
* <p>In the case of a container, this will propagate the stop signal to all components
* that apply.
* @see SmartLifecycle#stop(Runnable)
* @see org.springframework.beans.factory.DisposableBean#destroy()
*/
void stop();
/**
* Check whether this component is currently running.
* <p>In the case of a container, this will return {@code true} only if <i>all</i>
* components that apply are currently running.
* @return whether the component is currently running
*/
boolean isRunning();
}
|
Lifecycle
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/http/code/javaguide/http/routing/controllers/Clients.java
|
{
"start": 261,
"end": 550
}
|
class ____ extends Controller {
// #clients-show-action
public Result show(Long id) {
Client client = clientService.findById(id);
return ok(views.html.Client.show(client));
}
// #clients-show-action
public Result list() {
return ok("all clients");
}
static
|
Clients
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramParser.java
|
{
"start": 665,
"end": 6373
}
|
class ____ {
private static final ParseField COUNTS_FIELD = new ParseField("counts");
private static final ParseField VALUES_FIELD = new ParseField("values");
private static final Set<String> ROOT_FIELD_NAMES = Set.of(COUNTS_FIELD.getPreferredName(), VALUES_FIELD.getPreferredName());
public static boolean isHistogramSubFieldName(String subFieldName) {
return ROOT_FIELD_NAMES.contains(subFieldName);
}
/**
* A parsed histogram field, can represent either a T-Digest or a HDR histogram.
* @param values the centroids, guaranteed to be distinct and in increasing order
* @param counts the counts, guaranteed to be non-negative and of the same length as values
*/
public record ParsedHistogram(List<Double> values, List<Long> counts) {}
/**
* Parses an XContent object into a histogram.
* The parser is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
*
* @param mappedFieldName the name of the field being parsed, used for error messages
* @param parser the parser to use
* @return the parsed histogram
*/
public static ParsedHistogram parse(String mappedFieldName, XContentParser parser) throws IOException {
ArrayList<Double> values = null;
ArrayList<Long> counts = null;
XContentParser.Token token = parser.currentToken();
while (token != XContentParser.Token.END_OBJECT) {
// should be a field
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
String fieldName = parser.currentName();
if (fieldName.equals(VALUES_FIELD.getPreferredName())) {
token = parser.nextToken();
// should be an array
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser);
values = new ArrayList<>();
token = parser.nextToken();
double previousVal = -Double.MAX_VALUE;
while (token != XContentParser.Token.END_ARRAY) {
// should be a number
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
double val = parser.doubleValue();
if (val < previousVal) {
// values must be in increasing order
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field ["
+ mappedFieldName
+ "], ["
+ VALUES_FIELD
+ "] values must be in increasing order, got ["
+ val
+ "] but previous value was ["
+ previousVal
+ "]"
);
}
values.add(val);
previousVal = val;
token = parser.nextToken();
}
} else if (fieldName.equals(COUNTS_FIELD.getPreferredName())) {
token = parser.nextToken();
// should be an array
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser);
counts = new ArrayList<>();
token = parser.nextToken();
while (token != XContentParser.Token.END_ARRAY) {
// should be a number
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser);
long count = parser.longValue();
if (count < 0) {
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field [" + mappedFieldName + "], [" + COUNTS_FIELD + "] elements must be >= 0 but got " + count
);
}
counts.add(count);
token = parser.nextToken();
}
} else {
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field [" + mappedFieldName + "], with unknown parameter [" + fieldName + "]"
);
}
token = parser.nextToken();
}
if (values == null) {
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field [" + mappedFieldName + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]"
);
}
if (counts == null) {
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field [" + mappedFieldName + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]"
);
}
if (values.size() != counts.size()) {
throw new DocumentParsingException(
parser.getTokenLocation(),
"error parsing field ["
+ mappedFieldName
+ "], expected same length from ["
+ VALUES_FIELD.getPreferredName()
+ "] and "
+ "["
+ COUNTS_FIELD.getPreferredName()
+ "] but got ["
+ values.size()
+ " != "
+ counts.size()
+ "]"
);
}
return new ParsedHistogram(values, counts);
}
}
|
HistogramParser
|
java
|
apache__camel
|
components/camel-clickup/src/test/java/org/apache/camel/component/clickup/ClickUpWebhookRegistrationTest.java
|
{
"start": 1682,
"end": 6917
}
|
class ____ extends ClickUpTestSupport {
private final static Long WORKSPACE_ID = 12345L;
private final static String AUTHORIZATION_TOKEN = "mock-authorization-token";
private final static String WEBHOOK_SECRET = "mock-webhook-secret";
private final static Set<String> EVENTS = new HashSet<>(List.of("taskTimeTrackedUpdated"));
private static final ObjectMapper MAPPER = new ObjectMapper();
public static final String WEBHOOK_CREATED_JSON = "messages/webhook-created.json";
@Override
public void configureTest(TestExecutionConfiguration testExecutionConfiguration) {
super.configureTest(testExecutionConfiguration);
testExecutionConfiguration.withUseRouteBuilder(false);
}
@Test
public void testAutomaticRegistration() throws Exception {
final ClickUpMockRoutes.MockProcessor<String> mockProcessor
= getMockRoutes().getMock("POST", "team/" + WORKSPACE_ID + "/webhook");
mockProcessor.clearRecordedMessages();
try (final DefaultCamelContext mockContext = new DefaultCamelContext()) {
mockContext.addRoutes(getMockRoutes());
mockContext.start();
waitForClickUpMockAPI();
setupContextRoutes();
context().start();
final List<String> recordedMessages = mockProcessor.awaitRecordedMessages(1, 5000);
assertEquals(1, recordedMessages.size());
String recordedMessage = recordedMessages.get(0);
try {
WebhookCreationCommand command = MAPPER.readValue(recordedMessage, WebhookCreationCommand.class);
assertInstanceOf(WebhookCreationCommand.class, command);
} catch (IOException e) {
fail(e);
}
mockProcessor.clearRecordedMessages();
context().stop();
}
}
@Test
public void testAutomaticUnregistration() throws Exception {
final ClickUpMockRoutes.MockProcessor<String> mockProcessor = getMockRoutes().getMock("DELETE", "webhook/");
mockProcessor.clearRecordedMessages();
try (final DefaultCamelContext mockContext = new DefaultCamelContext()) {
mockContext.addRoutes(getMockRoutes());
mockContext.start();
waitForClickUpMockAPI();
setupContextRoutes();
context().start();
context().stop();
{
final List<String> readRecordedMessages = mockProcessor.awaitRecordedMessages(1, 5000);
assertEquals(1, readRecordedMessages.size());
String webhookDeleteMessage = readRecordedMessages.get(0);
assertEquals("", webhookDeleteMessage);
mockProcessor.clearRecordedMessages();
}
}
}
private static void waitForClickUpMockAPI() {
/* Make sure the ClickUp mock API is up and running */
Awaitility.await()
.atMost(5, TimeUnit.SECONDS)
.until(() -> {
HttpClient client = HttpClient.newBuilder().build();
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create("http://localhost:" + port + "/clickup-api-mock/health")).GET().build();
final HttpResponse<String> response = client.send(request, HttpResponse.BodyHandlers.ofString());
return response.statusCode() == 200;
});
}
private void setupContextRoutes() throws Exception {
context().addRoutes(new RouteBuilder() {
@Override
public void configure() {
String apiMockBaseUrl = "http://localhost:" + port + "/clickup-api-mock";
from("webhook:clickup:" + WORKSPACE_ID + "?authorizationToken=" + AUTHORIZATION_TOKEN + "&webhookSecret="
+ WEBHOOK_SECRET + "&events=" + String.join(",", EVENTS) + "&webhookAutoRegister=true&baseUrl="
+ apiMockBaseUrl)
.id("webhook")
.to("mock:endpoint");
}
});
}
@Override
protected ClickUpMockRoutes createMockRoutes() {
ClickUpMockRoutes clickUpMockRoutes = new ClickUpMockRoutes(port);
clickUpMockRoutes.addEndpoint(
"health",
"GET",
true,
String.class,
() -> "");
try (InputStream content = getClass().getClassLoader().getResourceAsStream(WEBHOOK_CREATED_JSON)) {
assert content != null;
String responseBody = new String(content.readAllBytes());
clickUpMockRoutes.addEndpoint(
"team/" + WORKSPACE_ID + "/webhook",
"POST",
true,
String.class,
() -> responseBody);
} catch (IOException e) {
throw new RuntimeException(e);
}
clickUpMockRoutes.addEndpoint(
"webhook/",
"DELETE",
false,
String.class,
() -> "{}");
return clickUpMockRoutes;
}
}
|
ClickUpWebhookRegistrationTest
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/BeforeAndAfterTestExecutionCallbackTests.java
|
{
"start": 14322,
"end": 14704
}
|
class ____ implements BeforeTestExecutionCallback, AfterTestExecutionCallback {
@Override
public void beforeTestExecution(ExtensionContext context) {
callSequence.add("fizzBeforeTestExecutionCallback");
}
@Override
public void afterTestExecution(ExtensionContext context) {
callSequence.add("fizzAfterTestExecutionCallback");
}
}
static
|
FizzTestExecutionCallbacks
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/rest/PutDefinition.java
|
{
"start": 1163,
"end": 1282
}
|
class ____ extends VerbDefinition {
@Override
public String asVerb() {
return "put";
}
}
|
PutDefinition
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/map/RetryableMapWriterAsync.java
|
{
"start": 970,
"end": 4255
}
|
class ____<K, V> implements MapWriterAsync<K, V> {
private static final Logger log = LoggerFactory.getLogger(RetryableMapWriterAsync.class);
private final MapOptions<K, V> options;
private final MapWriterAsync<K, V> mapWriterAsync;
private ServiceManager serviceManager;
public void setServiceManager(ServiceManager serviceManager) {
this.serviceManager = serviceManager;
}
public RetryableMapWriterAsync(MapOptions<K, V> options, MapWriterAsync<K, V> mapWriterAsync) {
this.options = options;
this.mapWriterAsync = mapWriterAsync;
}
@Override
public CompletionStage<Void> write(Map<K, V> addedMap) {
CompletableFuture<Void> result = new CompletableFuture<>();
retryWrite(Math.max(1, options.getWriterRetryAttempts()), addedMap, result);
return result;
}
private void retryWrite(int leftAttempts, Map<K, V> addedMap, CompletableFuture<Void> result) {
mapWriterAsync.write(addedMap).whenComplete((x, e) -> {
if (e == null) {
result.complete(null);
return;
}
if (leftAttempts - 1 <= 0) {
result.completeExceptionally(e);
return;
}
if (serviceManager == null) {
log.warn("The serviceManager is null, so cannot retry writing keys: {}", addedMap);
result.completeExceptionally(e);
return;
}
log.warn("Unable to add keys: {}, will retry after {}ms", addedMap, options.getWriterRetryInterval(), e);
serviceManager.newTimeout(t -> retryWrite(leftAttempts - 1, addedMap, result),
options.getWriterRetryInterval(), TimeUnit.MILLISECONDS);
}
);
}
@Override
public CompletionStage<Void> delete(Collection<K> keys) {
CompletableFuture<Void> result = new CompletableFuture<>();
retryDelete(Math.max(1, options.getWriterRetryAttempts()), keys, result);
return result;
}
private void retryDelete(int leftAttempts, Collection<K> keys, CompletableFuture<Void> result) {
mapWriterAsync.delete(keys).whenComplete((x, e) -> {
if (e == null) {
result.complete(null);
return;
}
if (leftAttempts - 1 <= 0) {
result.completeExceptionally(e);
return;
}
if (serviceManager == null) {
log.warn("The serviceManager is null so cannot retry deleting keys: {}", keys);
result.completeExceptionally(e);
return;
}
log.warn("Unable to delete keys: {}, will retry after {}ms", keys, options.getWriterRetryInterval(), e);
serviceManager.newTimeout(t -> retryDelete(leftAttempts - 1, keys, result),
options.getWriterRetryInterval(), TimeUnit.MILLISECONDS);
}
);
}
}
|
RetryableMapWriterAsync
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java
|
{
"start": 69966,
"end": 70235
}
|
class ____ {
@Bean
public RepositoryFactoryBean<Object> repoFactoryBean() {
return new RepositoryFactoryBean<>();
}
@Bean
public FactoryBean<Object> nullFactoryBean() {
return null;
}
}
@Configuration
public static
|
RepositoryFactoryBeanConfiguration
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/project/RepositorySessionDecorator.java
|
{
"start": 1032,
"end": 1143
}
|
interface ____ part of work in progress and can be changed or removed without notice.
* @since 3.2.4
*/
public
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/qa/server/mixed-cluster/src/yamlRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/EsqlClientYamlIT.java
|
{
"start": 715,
"end": 1373
}
|
class ____ extends ESClientYamlSuiteTestCase {
@ClassRule
public static ElasticsearchCluster cluster = Clusters.mixedVersionCluster();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
public EsqlClientYamlIT(final ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return createParameters();
}
@Before
@After
public void assertRequestBreakerEmpty() throws Exception {
EsqlSpecTestCase.assertRequestBreakerEmpty();
}
}
|
EsqlClientYamlIT
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/array/ArrayOfArraysTest.java
|
{
"start": 3405,
"end": 3557
}
|
class ____ {
@Id
@GeneratedValue
private Long id;
@JdbcTypeCode( SqlTypes.ARRAY )
private Integer[][] integers;
}
}
|
EntityWithDoubleIntegerArray
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrStorage.java
|
{
"start": 1170,
"end": 2745
}
|
class ____ {
/**
* Reads the extended attribute of an inode by name with prefix.
* <p>
*
* @param inode INode to read
* @param snapshotId the snapshotId of the requested path
* @param prefixedName xAttr name with prefix
* @return the xAttr
*/
public static XAttr readINodeXAttrByPrefixedName(INode inode, int snapshotId,
String prefixedName) {
XAttrFeature f = inode.getXAttrFeature(snapshotId);
return f == null ? null : f.getXAttr(prefixedName);
}
/**
* Reads the existing extended attributes of an inode.
* <p>
* Must be called while holding the FSDirectory read lock.
*
* @param inodeAttr INodeAttributes to read.
* @return {@code XAttr} list.
*/
public static List<XAttr> readINodeXAttrs(INodeAttributes inodeAttr) {
XAttrFeature f = inodeAttr.getXAttrFeature();
return f == null ? new ArrayList<XAttr>(0) : f.getXAttrs();
}
/**
* Update xattrs of inode.
* <p>
* Must be called while holding the FSDirectory write lock.
*
* @param inode INode to update
* @param xAttrs to update xAttrs.
* @param snapshotId id of the latest snapshot of the inode
*/
public static void updateINodeXAttrs(INode inode,
List<XAttr> xAttrs, int snapshotId) throws QuotaExceededException {
if (inode.getXAttrFeature() != null) {
inode.removeXAttrFeature(snapshotId);
}
if (xAttrs == null || xAttrs.isEmpty()) {
return;
}
inode.addXAttrFeature(new XAttrFeature(xAttrs), snapshotId);
}
}
|
XAttrStorage
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamKStreamJoinTest.java
|
{
"start": 22236,
"end": 123594
}
|
class ____ extends BuiltInDslStoreSuppliers.RocksDBDslStoreSuppliers {
final AtomicReference<WindowBytesStoreSupplier> capture = new AtomicReference<>();
@Override
public WindowBytesStoreSupplier windowStore(final DslWindowParams params) {
final WindowBytesStoreSupplier store = super.windowStore(params);
capture.set(store);
return store;
}
}
@Test
public void shouldJoinWithNonTimestampedStore() {
final JoinWindows joinWindows = JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L));
final CapturingStoreSuppliers storeSuppliers = new CapturingStoreSuppliers();
final StreamJoined<String, Integer, Integer> streamJoined =
StreamJoined.with(Serdes.String(), Serdes.Integer(), Serdes.Integer())
.withDslStoreSuppliers(storeSuppliers);
runJoin(streamJoined, joinWindows);
assertThat("Expected stream joined to supply builders that create non-timestamped stores",
!WrappedStateStore.isTimestamped(storeSuppliers.capture.get().get()));
}
@Test
public void shouldThrottleEmitNonJoinedOuterRecordsEvenWhenClockDrift() {
/*
* This test is testing something internal to [[KStreamKStreamJoin]], so we had to setup low-level api manually.
*/
final KStreamImplJoin.TimeTrackerSupplier tracker = new KStreamImplJoin.TimeTrackerSupplier();
final WindowStoreBuilder<String, String> otherStoreBuilder = new WindowStoreBuilder<>(
new InMemoryWindowBytesStoreSupplier(
"other",
1000L,
100,
false),
Serdes.String(),
Serdes.String(),
new MockTime());
final KeyValueStoreBuilder<TimestampedKeyAndJoinSide<String>, LeftOrRightValue<String, String>> outerStoreBuilder = new KeyValueStoreBuilder<>(
new InMemoryKeyValueBytesStoreSupplier("outer"),
new TimestampedKeyAndJoinSideSerde<>(Serdes.String()),
new LeftOrRightValueSerde<>(Serdes.String(), Serdes.String()),
new MockTime()
);
final KStreamKStreamJoinRightSide<String, String, String, String> join = new KStreamKStreamJoinRightSide<>(
new JoinWindowsInternal(JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(1000))),
(key, v1, v2) -> v1 + v2,
true,
tracker,
StoreBuilderWrapper.wrapStoreBuilder(otherStoreBuilder),
Optional.of(StoreBuilderWrapper.wrapStoreBuilder(outerStoreBuilder)));
final Processor<String, String, String, String> joinProcessor = join.get();
final MockInternalProcessorContext<String, String> procCtx = new MockInternalProcessorContext<>();
final WindowStore<String, String> otherStore = otherStoreBuilder.build();
final KeyValueStore<TimestampedKeyAndJoinSide<String>, LeftOrRightValue<String, String>> outerStore =
Mockito.spy(outerStoreBuilder.build());
final GenericInMemoryKeyValueStore<String, String> rootStore = new GenericInMemoryKeyValueStore<>("root");
otherStore.init(procCtx, rootStore);
procCtx.addStateStore(otherStore);
outerStore.init(procCtx, rootStore);
procCtx.addStateStore(outerStore);
joinProcessor.init(procCtx);
final Record<String, String> record1 = new Record<>("key1", "value1", 10000L);
final Record<String, String> record2 = new Record<>("key2", "value2", 13000L);
final Record<String, String> record3 = new Record<>("key3", "value3", 15000L);
final Record<String, String> record4 = new Record<>("key4", "value4", 17000L);
procCtx.setSystemTimeMs(1000L);
joinProcessor.process(record1);
procCtx.setSystemTimeMs(2100L);
joinProcessor.process(record2);
procCtx.setSystemTimeMs(2500L);
joinProcessor.process(record3);
// being throttled, so the older value still exists
assertEquals(2, iteratorToList(outerStore.all()).size());
procCtx.setSystemTimeMs(4000L);
joinProcessor.process(record4);
assertEquals(1, iteratorToList(outerStore.all()).size());
}
private <T> List<T> iteratorToList(final Iterator<T> iterator) {
return StreamSupport.stream(
Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false)
.collect(Collectors.toList());
}
private void runJoin(final StreamJoined<String, Integer, Integer> streamJoined,
final JoinWindows joinWindows) {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, Integer> left = builder.stream("left", Consumed.with(Serdes.String(), Serdes.Integer()));
final KStream<String, Integer> right = builder.stream("right", Consumed.with(Serdes.String(), Serdes.Integer()));
final MockApiProcessorSupplier<String, Integer, Void, Void> supplier = new MockApiProcessorSupplier<>();
final KStream<String, Integer> joinedStream;
joinedStream = left.join(
right,
Integer::sum,
joinWindows,
streamJoined
);
joinedStream.process(supplier);
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<String, Integer> inputTopicLeft =
driver.createInputTopic("left", new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<String, Integer> inputTopicRight =
driver.createInputTopic("right", new StringSerializer(), new IntegerSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<String, Integer, Void, Void> processor = supplier.theCapturedProcessor();
inputTopicLeft.pipeInput("A", 1, 1L);
inputTopicLeft.pipeInput("B", 1, 2L);
inputTopicRight.pipeInput("A", 1, 1L);
inputTopicRight.pipeInput("B", 2, 2L);
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>("A", 2, 1L),
new KeyValueTimestamp<>("B", 3, 2L)
);
}
}
@Test
public void testJoin() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] {0, 1, 2, 3};
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.join(
stream2,
MockValueJoiner.TOSTRING_JOINER,
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)),
StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String())
);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups =
TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(Set.of(topic1, topic2), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 =
driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 =
driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// push two items to the primary stream; the other window is empty
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0, 1:A1 }
// w2 = {}
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
// push two items to the other stream; this should produce two items
// w1 = { 0:A0, 1:A1 }
// w2 = {}
// --> w1 = { 0:A0, 1:A1 }
// w2 = { 0:a0, 1:a1 }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+a0", 0L),
new KeyValueTimestamp<>(1, "A1+a1", 0L)
);
// push all four items to the primary stream; this should produce two items
// w1 = { 0:A0, 1:A1 }
// w2 = { 0:a0, 1:a1 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1 }
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "B" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+a0", 0L),
new KeyValueTimestamp<>(1, "B1+a1", 0L)
);
// push all items to the other stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+b0", 0L),
new KeyValueTimestamp<>(0, "B0+b0", 0L),
new KeyValueTimestamp<>(1, "A1+b1", 0L),
new KeyValueTimestamp<>(1, "B1+b1", 0L),
new KeyValueTimestamp<>(2, "B2+b2", 0L),
new KeyValueTimestamp<>(3, "B3+b3", 0L)
);
// push all four items to the primary stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "C" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "C0+a0", 0L),
new KeyValueTimestamp<>(0, "C0+b0", 0L),
new KeyValueTimestamp<>(1, "C1+a1", 0L),
new KeyValueTimestamp<>(1, "C1+b1", 0L),
new KeyValueTimestamp<>(2, "C2+b2", 0L),
new KeyValueTimestamp<>(3, "C3+b3", 0L)
);
// push two items to the other stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 1:b1, 2:b2, 3:b3, 0:c0, 1:c1 }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "c" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+c0", 0L),
new KeyValueTimestamp<>(0, "B0+c0", 0L),
new KeyValueTimestamp<>(0, "C0+c0", 0L),
new KeyValueTimestamp<>(1, "A1+c1", 0L),
new KeyValueTimestamp<>(1, "B1+c1", 0L),
new KeyValueTimestamp<>(1, "C1+c1", 0L)
);
}
}
@Test
public void testOuterJoin() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] {0, 1, 2, 3};
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.outerJoin(
stream2,
MockValueJoiner.TOSTRING_JOINER,
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(100L), ofHours(24L)),
StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String())
);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups =
TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(Set.of(topic1, topic2), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 =
driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 =
driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
// push two items to the primary stream; the other window is empty; this should not produce items yet
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0, 1:A1 }
// w2 = {}
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i]);
}
processor.checkAndClearProcessResult();
// push two items to the other stream; this should produce two items
// w1 = { 0:A0, 1:A1 }
// w2 = {}
// --> w1 = { 0:A0, 1:A1 }
// w2 = { 0:a0, 1:a1 }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+a0", 0L),
new KeyValueTimestamp<>(1, "A1+a1", 0L)
);
// push all four items to the primary stream; this should produce two items
// w1 = { 0:A0, 1:A1 }
// w2 = { 0:a0, 1:a1 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1 }
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "B" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+a0", 0L),
new KeyValueTimestamp<>(1, "B1+a1", 0L)
);
// push all items to the other stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+b0", 0L),
new KeyValueTimestamp<>(0, "B0+b0", 0L),
new KeyValueTimestamp<>(1, "A1+b1", 0L),
new KeyValueTimestamp<>(1, "B1+b1", 0L),
new KeyValueTimestamp<>(2, "B2+b2", 0L),
new KeyValueTimestamp<>(3, "B3+b3", 0L)
);
// push all four items to the primary stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3 }
// w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "C" + expectedKey);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "C0+a0", 0L),
new KeyValueTimestamp<>(0, "C0+b0", 0L),
new KeyValueTimestamp<>(1, "C1+a1", 0L),
new KeyValueTimestamp<>(1, "C1+b1", 0L),
new KeyValueTimestamp<>(2, "C2+b2", 0L),
new KeyValueTimestamp<>(3, "C3+b3", 0L)
);
// push two items to the other stream; this should produce six items
// w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3 }
// --> w1 = { 0:A0, 1:A1, 0:B0, 1:B1, 2:B2, 3:B3, 0:C0, 1:C1, 2:C2, 3:C3 }
// w2 = { 0:a0, 1:a1, 0:b0, 0:b0, 1:b1, 2:b2, 3:b3, 0:c0, 1:c1 }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "c" + expectedKeys[i]);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+c0", 0L),
new KeyValueTimestamp<>(0, "B0+c0", 0L),
new KeyValueTimestamp<>(0, "C0+c0", 0L),
new KeyValueTimestamp<>(1, "A1+c1", 0L),
new KeyValueTimestamp<>(1, "B1+c1", 0L),
new KeyValueTimestamp<>(1, "C1+c1", 0L)
);
}
}
@Test
public void testWindowing() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] {0, 1, 2, 3};
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.join(
stream2,
MockValueJoiner.TOSTRING_JOINER,
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(100L)),
StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String())
);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups =
TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(Set.of(topic1, topic2), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 =
driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 =
driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
long time = 0L;
// push two items to the primary stream; the other window is empty; this should produce no items
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
// w2 = {}
for (int i = 0; i < 2; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i], time);
}
processor.checkAndClearProcessResult();
// push two items to the other stream; this should produce two items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
// w2 = {}
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0) }
for (int i = 0; i < 2; i++) {
inputTopic2.pipeInput(expectedKeys[i], "a" + expectedKeys[i], time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+a0", 0L),
new KeyValueTimestamp<>(1, "A1+a1", 0L)
);
// push four items to the primary stream with larger and increasing timestamp; this should produce no items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0) }
time = 1000L;
for (int i = 0; i < expectedKeys.length; i++) {
inputTopic1.pipeInput(expectedKeys[i], "B" + expectedKeys[i], time + i);
}
processor.checkAndClearProcessResult();
// push four items to the other stream with fixed larger timestamp; this should produce four items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100) }
time += 100L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+b0", 1100L),
new KeyValueTimestamp<>(1, "B1+b1", 1100L),
new KeyValueTimestamp<>(2, "B2+b2", 1100L),
new KeyValueTimestamp<>(3, "B3+b3", 1100L)
);
// push four items to the other stream with incremented timestamp; this should produce three items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "c" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(1, "B1+c1", 1101L),
new KeyValueTimestamp<>(2, "B2+c2", 1101L),
new KeyValueTimestamp<>(3, "B3+c3", 1101L)
);
// push four items to the other stream with incremented timestamp; this should produce two items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "d" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(2, "B2+d2", 1102L),
new KeyValueTimestamp<>(3, "B3+d3", 1102L)
);
// push four items to the other stream with incremented timestamp; this should produce one item
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "e" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(3, "B3+e3", 1103L)
);
// push four items to the other stream with incremented timestamp; this should produce no items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "f" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items to the other stream with timestamp before the window bound; this should produce no items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899) }
time = 1000L - 100L - 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "g" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items to the other stream with with incremented timestamp; this should produce one item
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "h" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+h0", 1000L)
);
// push four items to the other stream with with incremented timestamp; this should produce two items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900),
// 0:i0 (ts: 901), 1:i1 (ts: 901), 2:i2 (ts: 901), 3:i3 (ts: 901) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "i" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+i0", 1000L),
new KeyValueTimestamp<>(1, "B1+i1", 1001L)
);
// push four items to the other stream with with incremented timestamp; this should produce three items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900),
// 0:i0 (ts: 901), 1:i1 (ts: 901), 2:i2 (ts: 901), 3:i3 (ts: 901) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900),
// 0:i0 (ts: 901), 1:i1 (ts: 901), 2:i2 (ts: 901), 3:i3 (ts: 901),
// 0:j0 (ts: 902), 1:j1 (ts: 902), 2:j2 (ts: 902), 3:j3 (ts: 902) }
time += 1;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "j" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+j0", 1000L),
new KeyValueTimestamp<>(1, "B1+j1", 1001L),
new KeyValueTimestamp<>(2, "B2+j2", 1002L)
);
// push four items to the other stream with with incremented timestamp; this should produce four items
// w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900),
// 0:i0 (ts: 901), 1:i1 (ts: 901), 2:i2 (ts: 901), 3:i3 (ts: 901),
// 0:j0 (ts: 902), 1:j1 (ts: 902), 2:j2 (ts: 902), 3:j3 (ts: 902) }
// --> w1 = { 0:A0 (ts: 0), 1:A1 (ts: 0),
// 0:B0 (ts: 1000), 1:B1 (ts: 1001), 2:B2 (ts: 1002), 3:B3 (ts: 1003) }
// w2 = { 0:a0 (ts: 0), 1:a1 (ts: 0),
// 0:b0 (ts: 1100), 1:b1 (ts: 1100), 2:b2 (ts: 1100), 3:b3 (ts: 1100),
// 0:c0 (ts: 1101), 1:c1 (ts: 1101), 2:c2 (ts: 1101), 3:c3 (ts: 1101),
// 0:d0 (ts: 1102), 1:d1 (ts: 1102), 2:d2 (ts: 1102), 3:d3 (ts: 1102),
// 0:e0 (ts: 1103), 1:e1 (ts: 1103), 2:e2 (ts: 1103), 3:e3 (ts: 1103),
// 0:f0 (ts: 1104), 1:f1 (ts: 1104), 2:f2 (ts: 1104), 3:f3 (ts: 1104),
// 0:g0 (ts: 899), 1:g1 (ts: 899), 2:g2 (ts: 899), 3:g3 (ts: 899),
// 0:h0 (ts: 900), 1:h1 (ts: 900), 2:h2 (ts: 900), 3:h3 (ts: 900),
// 0:i0 (ts: 901), 1:i1 (ts: 901), 2:i2 (ts: 901), 3:i3 (ts: 901),
// 0:j0 (ts: 902), 1:j1 (ts: 902), 2:j2 (ts: 902), 3:j3 (ts: 902) }
// 0:k0 (ts: 903), 1:k1 (ts: 903), 2:k2 (ts: 903), 3:k3 (ts: 903) }
time += 1;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "k" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "B0+k0", 1000L),
new KeyValueTimestamp<>(1, "B1+k1", 1001L),
new KeyValueTimestamp<>(2, "B2+k2", 1002L),
new KeyValueTimestamp<>(3, "B3+k3", 1003L)
);
// advance time to not join with existing data
// we omit above exiting data, even if it's still in the window
//
// push four items with increasing timestamps to the other stream. the primary window is empty; this should produce no items
// w1 = {}
// w2 = {}
// --> w1 = {}
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time = 2000L;
for (int i = 0; i < expectedKeys.length; i++) {
inputTopic2.pipeInput(expectedKeys[i], "l" + expectedKeys[i], time + i);
}
processor.checkAndClearProcessResult();
// push four items with larger timestamps to the primary stream; this should produce four items
// w1 = {}
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time = 2000L + 100L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "C" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "C0+l0", 2100L),
new KeyValueTimestamp<>(1, "C1+l1", 2100L),
new KeyValueTimestamp<>(2, "C2+l2", 2100L),
new KeyValueTimestamp<>(3, "C3+l3", 2100L)
);
// push four items with increase timestamps to the primary stream; this should produce three items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "D" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(1, "D1+l1", 2101L),
new KeyValueTimestamp<>(2, "D2+l2", 2101L),
new KeyValueTimestamp<>(3, "D3+l3", 2101L)
);
// push four items with increase timestamps to the primary stream; this should produce two items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "E" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(2, "E2+l2", 2102L),
new KeyValueTimestamp<>(3, "E3+l3", 2102L)
);
// push four items with increase timestamps to the primary stream; this should produce one item
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "F" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(3, "F3+l3", 2103L)
);
// push four items with increase timestamps (now out of window) to the primary stream; this should produce no items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "G" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items with smaller timestamps (before window) to the primary stream; this should produce no items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time = 2000L - 100L - 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "H" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items with increased timestamps to the primary stream; this should produce one item
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "I" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "I0+l0", 2000L)
);
// push four items with increased timestamps to the primary stream; this should produce two items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900),
// 0:J0 (ts: 1901), 1:J1 (ts: 1901), 2:J2 (ts: 1901), 3:J3 (ts: 1901) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "J" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "J0+l0", 2000L),
new KeyValueTimestamp<>(1, "J1+l1", 2001L)
);
// push four items with increased timestamps to the primary stream; this should produce three items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900),
// 0:J0 (ts: 1901), 1:J1 (ts: 1901), 2:J2 (ts: 1901), 3:J3 (ts: 1901) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900),
// 0:J0 (ts: 1901), 1:J1 (ts: 1901), 2:J2 (ts: 1901), 3:J3 (ts: 1901),
// 0:K0 (ts: 1902), 1:K1 (ts: 1902), 2:K2 (ts: 1902), 3:K3 (ts: 1902) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "K" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "K0+l0", 2000L),
new KeyValueTimestamp<>(1, "K1+l1", 2001L),
new KeyValueTimestamp<>(2, "K2+l2", 2002L)
);
// push four items with increased timestamps to the primary stream; this should produce four items
// w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900),
// 0:J0 (ts: 1901), 1:J1 (ts: 1901), 2:J2 (ts: 1901), 3:J3 (ts: 1901) }
// 0:K0 (ts: 1902), 1:K1 (ts: 1902), 2:K2 (ts: 1902), 3:K3 (ts: 1902) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
// --> w1 = { 0:C0 (ts: 2100), 1:C1 (ts: 2100), 2:C2 (ts: 2100), 3:C3 (ts: 2100),
// 0:D0 (ts: 2101), 1:D1 (ts: 2101), 2:D2 (ts: 2101), 3:D3 (ts: 2101),
// 0:E0 (ts: 2102), 1:E1 (ts: 2102), 2:E2 (ts: 2102), 3:E3 (ts: 2102),
// 0:F0 (ts: 2103), 1:F1 (ts: 2103), 2:F2 (ts: 2103), 3:F3 (ts: 2103),
// 0:G0 (ts: 2104), 1:G1 (ts: 2104), 2:G2 (ts: 2104), 3:G3 (ts: 2104),
// 0:H0 (ts: 1899), 1:H1 (ts: 1899), 2:H2 (ts: 1899), 3:H3 (ts: 1899),
// 0:I0 (ts: 1900), 1:I1 (ts: 1900), 2:I2 (ts: 1900), 3:I3 (ts: 1900),
// 0:J0 (ts: 1901), 1:J1 (ts: 1901), 2:J2 (ts: 1901), 3:J3 (ts: 1901),
// 0:K0 (ts: 1902), 1:K1 (ts: 1902), 2:K2 (ts: 1902), 3:K3 (ts: 1902),
// 0:L0 (ts: 1903), 1:L1 (ts: 1903), 2:L2 (ts: 1903), 3:L3 (ts: 1903) }
// w2 = { 0:l0 (ts: 2000), 1:l1 (ts: 2001), 2:l2 (ts: 2002), 3:l3 (ts: 2003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic1.pipeInput(expectedKey, "L" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "L0+l0", 2000L),
new KeyValueTimestamp<>(1, "L1+l1", 2001L),
new KeyValueTimestamp<>(2, "L2+l2", 2002L),
new KeyValueTimestamp<>(3, "L3+l3", 2003L)
);
}
}
@Test
public void testAsymmetricWindowingAfter() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] {0, 1, 2, 3};
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.join(
stream2,
MockValueJoiner.TOSTRING_JOINER,
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(0)).after(ofMillis(100)),
StreamJoined.with(Serdes.Integer(),
Serdes.String(),
Serdes.String())
);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups =
TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(Set.of(topic1, topic2), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 =
driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 =
driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
long time = 1000L;
// push four items with increasing timestamps to the primary stream; the other window is empty; this should produce no items
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = {}
for (int i = 0; i < expectedKeys.length; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i], time + i);
}
processor.checkAndClearProcessResult();
// push four items smaller timestamps (out of window) to the secondary stream; this should produce no items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = {}
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999) }
time = 1000L - 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "a" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items with increased timestamps to the secondary stream; this should produce one item
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+b0", 1000L)
);
// push four items with increased timestamps to the secondary stream; this should produce two items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "c" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+c0", 1001L),
new KeyValueTimestamp<>(1, "A1+c1", 1001L)
);
// push four items with increased timestamps to the secondary stream; this should produce three items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "d" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+d0", 1002L),
new KeyValueTimestamp<>(1, "A1+d1", 1002L),
new KeyValueTimestamp<>(2, "A2+d2", 1002L)
);
// push four items with increased timestamps to the secondary stream; this should produce four items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "e" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+e0", 1003L),
new KeyValueTimestamp<>(1, "A1+e1", 1003L),
new KeyValueTimestamp<>(2, "A2+e2", 1003L),
new KeyValueTimestamp<>(3, "A3+e3", 1003L)
);
// push four items with larger timestamps to the secondary stream; this should produce four items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100) }
time = 1000 + 100L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "f" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+f0", 1100L),
new KeyValueTimestamp<>(1, "A1+f1", 1100L),
new KeyValueTimestamp<>(2, "A2+f2", 1100L),
new KeyValueTimestamp<>(3, "A3+f3", 1100L)
);
// push four items with increased timestamps to the secondary stream; this should produce three items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "g" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(1, "A1+g1", 1101L),
new KeyValueTimestamp<>(2, "A2+g2", 1101L),
new KeyValueTimestamp<>(3, "A3+g3", 1101L)
);
// push four items with increased timestamps to the secondary stream; this should produce two items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101),
// 0:h0 (ts: 1102), 1:h1 (ts: 1102), 2:h2 (ts: 1102), 3:h3 (ts: 1102) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "h" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(2, "A2+h2", 1102L),
new KeyValueTimestamp<>(3, "A3+h3", 1102L)
);
// push four items with increased timestamps to the secondary stream; this should produce one item
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101),
// 0:h0 (ts: 1102), 1:h1 (ts: 1102), 2:h2 (ts: 1102), 3:h3 (ts: 1102) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101),
// 0:h0 (ts: 1102), 1:h1 (ts: 1102), 2:h2 (ts: 1102), 3:h3 (ts: 1102),
// 0:i0 (ts: 1103), 1:i1 (ts: 1103), 2:i2 (ts: 1103), 3:i3 (ts: 1103) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "i" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(3, "A3+i3", 1103L)
);
// push four items with increased timestamps (no out of window) to the secondary stream; this should produce no items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101),
// 0:h0 (ts: 1102), 1:h1 (ts: 1102), 2:h2 (ts: 1102), 3:h3 (ts: 1102),
// 0:i0 (ts: 1103), 1:i1 (ts: 1103), 2:i2 (ts: 1103), 3:i3 (ts: 1103) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 999), 1:a1 (ts: 999), 2:a2 (ts: 999), 3:a3 (ts: 999),
// 0:b0 (ts: 1000), 1:b1 (ts: 1000), 2:b2 (ts: 1000), 3:b3 (ts: 1000),
// 0:c0 (ts: 1001), 1:c1 (ts: 1001), 2:c2 (ts: 1001), 3:c3 (ts: 1001),
// 0:d0 (ts: 1002), 1:d1 (ts: 1002), 2:d2 (ts: 1002), 3:d3 (ts: 1002),
// 0:e0 (ts: 1003), 1:e1 (ts: 1003), 2:e2 (ts: 1003), 3:e3 (ts: 1003),
// 0:f0 (ts: 1100), 1:f1 (ts: 1100), 2:f2 (ts: 1100), 3:f3 (ts: 1100),
// 0:g0 (ts: 1101), 1:g1 (ts: 1101), 2:g2 (ts: 1101), 3:g3 (ts: 1101),
// 0:h0 (ts: 1102), 1:h1 (ts: 1102), 2:h2 (ts: 1102), 3:h3 (ts: 1102),
// 0:i0 (ts: 1103), 1:i1 (ts: 1103), 2:i2 (ts: 1103), 3:i3 (ts: 1103),
// 0:j0 (ts: 1104), 1:j1 (ts: 1104), 2:j2 (ts: 1104), 3:j3 (ts: 1104) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "j" + expectedKey, time);
}
processor.checkAndClearProcessResult();
}
}
@Test
public void testAsymmetricWindowingBefore() {
final StreamsBuilder builder = new StreamsBuilder();
final int[] expectedKeys = new int[] {0, 1, 2, 3};
final KStream<Integer, String> stream1;
final KStream<Integer, String> stream2;
final KStream<Integer, String> joined;
final MockApiProcessorSupplier<Integer, String, Void, Void> supplier = new MockApiProcessorSupplier<>();
stream1 = builder.stream(topic1, consumed);
stream2 = builder.stream(topic2, consumed);
joined = stream1.join(
stream2,
MockValueJoiner.TOSTRING_JOINER,
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(0)).before(ofMillis(100)),
StreamJoined.with(Serdes.Integer(), Serdes.String(), Serdes.String())
);
joined.process(supplier);
final Collection<Set<String>> copartitionGroups =
TopologyWrapper.getInternalTopologyBuilder(builder.build()).copartitionGroups();
assertEquals(1, copartitionGroups.size());
assertEquals(Set.of(topic1, topic2), copartitionGroups.iterator().next());
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) {
final TestInputTopic<Integer, String> inputTopic1 =
driver.createInputTopic(topic1, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final TestInputTopic<Integer, String> inputTopic2 =
driver.createInputTopic(topic2, new IntegerSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO);
final MockApiProcessor<Integer, String, Void, Void> processor = supplier.theCapturedProcessor();
long time = 1000L;
// push four items with increasing timestamps to the primary stream; the other window is empty; this should produce no items
// w1 = {}
// w2 = {}
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = {}
for (int i = 0; i < expectedKeys.length; i++) {
inputTopic1.pipeInput(expectedKeys[i], "A" + expectedKeys[i], time + i);
}
processor.checkAndClearProcessResult();
// push four items with smaller timestamps (before the window) to the other stream; this should produce no items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = {}
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899) }
time = 1000L - 100L - 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "a" + expectedKey, time);
}
processor.checkAndClearProcessResult();
// push four items with increased timestamp to the other stream; this should produce one item
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "b" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+b0", 1000L)
);
// push four items with increased timestamp to the other stream; this should produce two items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "c" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+c0", 1000L),
new KeyValueTimestamp<>(1, "A1+c1", 1001L)
);
// push four items with increased timestamp to the other stream; this should produce three items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "d" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+d0", 1000L),
new KeyValueTimestamp<>(1, "A1+d1", 1001L),
new KeyValueTimestamp<>(2, "A2+d2", 1002L)
);
// push four items with increased timestamp to the other stream; this should produce four items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "e" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+e0", 1000L),
new KeyValueTimestamp<>(1, "A1+e1", 1001L),
new KeyValueTimestamp<>(2, "A2+e2", 1002L),
new KeyValueTimestamp<>(3, "A3+e3", 1003L)
);
// push four items with larger timestamp to the other stream; this should produce four items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000) }
time = 1000L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "f" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(0, "A0+f0", 1000L),
new KeyValueTimestamp<>(1, "A1+f1", 1001L),
new KeyValueTimestamp<>(2, "A2+f2", 1002L),
new KeyValueTimestamp<>(3, "A3+f3", 1003L)
);
// push four items with increase timestamp to the other stream; this should produce three items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "g" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(1, "A1+g1", 1001L),
new KeyValueTimestamp<>(2, "A2+g2", 1002L),
new KeyValueTimestamp<>(3, "A3+g3", 1003L)
);
// push four items with increase timestamp to the other stream; this should produce two items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
// 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "h" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(2, "A2+h2", 1002L),
new KeyValueTimestamp<>(3, "A3+h3", 1003L)
);
// push four items with increase timestamp to the other stream; this should produce one item
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
// 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
// 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
// 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "i" + expectedKey, time);
}
processor.checkAndClearProcessResult(
new KeyValueTimestamp<>(3, "A3+i3", 1003L)
);
// push four items with increase timestamp (no out of window) to the other stream; this should produce no items
// w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
// 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
// 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003) }
// --> w1 = { 0:A0 (ts: 1000), 1:A1 (ts: 1001), 2:A2 (ts: 1002), 3:A3 (ts: 1003) }
// w2 = { 0:a0 (ts: 899), 1:a1 (ts: 899), 2:a2 (ts: 899), 3:a3 (ts: 899),
// 0:b0 (ts: 900), 1:b1 (ts: 900), 2:b2 (ts: 900), 3:b3 (ts: 900),
// 0:c0 (ts: 901), 1:c1 (ts: 901), 2:c2 (ts: 901), 3:c3 (ts: 901),
// 0:d0 (ts: 902), 1:d1 (ts: 902), 2:d2 (ts: 902), 3:d3 (ts: 902),
// 0:e0 (ts: 903), 1:e1 (ts: 903), 2:e2 (ts: 903), 3:e3 (ts: 903),
// 0:f0 (ts: 1000), 1:f1 (ts: 1000), 2:f2 (ts: 1000), 3:f3 (ts: 1000),
// 0:g0 (ts: 1001), 1:g1 (ts: 1001), 2:g2 (ts: 1001), 3:g3 (ts: 1001),
// 0:h0 (ts: 1002), 1:h1 (ts: 1002), 2:h2 (ts: 1002), 3:h3 (ts: 1002),
// 0:i0 (ts: 1003), 1:i1 (ts: 1003), 2:i2 (ts: 1003), 3:i3 (ts: 1003),
// 0:j0 (ts: 1004), 1:j1 (ts: 1004), 2:j2 (ts: 1004), 3:j3 (ts: 1004) }
time += 1L;
for (final int expectedKey : expectedKeys) {
inputTopic2.pipeInput(expectedKey, "j" + expectedKey, time);
}
processor.checkAndClearProcessResult();
}
}
private void buildStreamsJoinThatShouldThrow(final StreamJoined<String, Integer, Integer> streamJoined,
final JoinWindows joinWindows,
final String expectedExceptionMessagePrefix) {
final StreamsBuilder builder = new StreamsBuilder();
final KStream<String, Integer> left = builder.stream("left", Consumed.with(Serdes.String(), Serdes.Integer()));
final KStream<String, Integer> right = builder.stream("right", Consumed.with(Serdes.String(), Serdes.Integer()));
final StreamsException streamsException = assertThrows(
StreamsException.class,
() -> left.join(
right,
Integer::sum,
joinWindows,
streamJoined
)
);
assertTrue(streamsException.getMessage().startsWith(expectedExceptionMessagePrefix));
}
private WindowBytesStoreSupplier buildWindowBytesStoreSupplier(final String name,
final long retentionPeriod,
final long windowSize,
final boolean retainDuplicates) {
return Stores.inMemoryWindowStore(name,
Duration.ofMillis(retentionPeriod),
Duration.ofMillis(windowSize),
retainDuplicates);
}
private final String expectedTopologyWithUserNamedRepartitionTopics = "Topologies:\n" +
" Sub-topology: 0\n" +
" Source: KSTREAM-SOURCE-0000000000 (topics: [topic])\n" +
" --> KSTREAM-MAP-0000000003\n" +
" Processor: KSTREAM-MAP-0000000003 (stores: [])\n" +
" --> second-join-left-repartition-filter, first-join-left-repartition-filter\n" +
" <-- KSTREAM-SOURCE-0000000000\n" +
" Processor: first-join-left-repartition-filter (stores: [])\n" +
" --> first-join-left-repartition-sink\n" +
" <-- KSTREAM-MAP-0000000003\n" +
" Processor: second-join-left-repartition-filter (stores: [])\n" +
" --> second-join-left-repartition-sink\n" +
" <-- KSTREAM-MAP-0000000003\n" +
" Sink: first-join-left-repartition-sink (topic: first-join-left-repartition)\n" +
" <-- first-join-left-repartition-filter\n" +
" Sink: second-join-left-repartition-sink (topic: second-join-left-repartition)\n" +
" <-- second-join-left-repartition-filter\n" +
"\n" +
" Sub-topology: 1\n" +
" Source: KSTREAM-SOURCE-0000000001 (topics: [topic2])\n" +
" --> first-join-other-windowed\n" +
" Source: first-join-left-repartition-source (topics: [first-join-left-repartition])\n" +
" --> first-join-this-windowed\n" +
" Processor: first-join-other-windowed (stores: [KSTREAM-JOINOTHER-0000000010-store])\n" +
" --> first-join-other-join\n" +
" <-- KSTREAM-SOURCE-0000000001\n" +
" Processor: first-join-this-windowed (stores: [KSTREAM-JOINTHIS-0000000009-store])\n" +
" --> first-join-this-join\n" +
" <-- first-join-left-repartition-source\n" +
" Processor: first-join-other-join (stores: [KSTREAM-JOINTHIS-0000000009-store])\n" +
" --> first-join-merge\n" +
" <-- first-join-other-windowed\n" +
" Processor: first-join-this-join (stores: [KSTREAM-JOINOTHER-0000000010-store])\n" +
" --> first-join-merge\n" +
" <-- first-join-this-windowed\n" +
" Processor: first-join-merge (stores: [])\n" +
" --> KSTREAM-SINK-0000000012\n" +
" <-- first-join-this-join, first-join-other-join\n" +
" Sink: KSTREAM-SINK-0000000012 (topic: out-one)\n" +
" <-- first-join-merge\n" +
"\n" +
" Sub-topology: 2\n" +
" Source: KSTREAM-SOURCE-0000000002 (topics: [topic3])\n" +
" --> second-join-other-windowed\n" +
" Source: second-join-left-repartition-source (topics: [second-join-left-repartition])\n" +
" --> second-join-this-windowed\n" +
" Processor: second-join-other-windowed (stores: [KSTREAM-JOINOTHER-0000000019-store])\n" +
" --> second-join-other-join\n" +
" <-- KSTREAM-SOURCE-0000000002\n" +
" Processor: second-join-this-windowed (stores: [KSTREAM-JOINTHIS-0000000018-store])\n" +
" --> second-join-this-join\n" +
" <-- second-join-left-repartition-source\n" +
" Processor: second-join-other-join (stores: [KSTREAM-JOINTHIS-0000000018-store])\n" +
" --> second-join-merge\n" +
" <-- second-join-other-windowed\n" +
" Processor: second-join-this-join (stores: [KSTREAM-JOINOTHER-0000000019-store])\n" +
" --> second-join-merge\n" +
" <-- second-join-this-windowed\n" +
" Processor: second-join-merge (stores: [])\n" +
" --> KSTREAM-SINK-0000000021\n" +
" <-- second-join-this-join, second-join-other-join\n" +
" Sink: KSTREAM-SINK-0000000021 (topic: out-two)\n" +
" <-- second-join-merge\n\n";
private final String expectedTopologyWithGeneratedRepartitionTopic = "Topologies:\n" +
" Sub-topology: 0\n" +
" Source: KSTREAM-SOURCE-0000000000 (topics: [topic])\n" +
" --> KSTREAM-MAP-0000000003\n" +
" Processor: KSTREAM-MAP-0000000003 (stores: [])\n" +
" --> KSTREAM-FILTER-0000000005\n" +
" <-- KSTREAM-SOURCE-0000000000\n" +
" Processor: KSTREAM-FILTER-0000000005 (stores: [])\n" +
" --> KSTREAM-SINK-0000000004\n" +
" <-- KSTREAM-MAP-0000000003\n" +
" Sink: KSTREAM-SINK-0000000004 (topic: KSTREAM-MAP-0000000003-repartition)\n" +
" <-- KSTREAM-FILTER-0000000005\n" +
"\n" +
" Sub-topology: 1\n" +
" Source: KSTREAM-SOURCE-0000000006 (topics: [KSTREAM-MAP-0000000003-repartition])\n" +
" --> KSTREAM-WINDOWED-0000000007, KSTREAM-WINDOWED-0000000016\n" +
" Source: KSTREAM-SOURCE-0000000001 (topics: [topic2])\n" +
" --> KSTREAM-WINDOWED-0000000008\n" +
" Source: KSTREAM-SOURCE-0000000002 (topics: [topic3])\n" +
" --> KSTREAM-WINDOWED-0000000017\n" +
" Processor: KSTREAM-WINDOWED-0000000007 (stores: [KSTREAM-JOINTHIS-0000000009-store])\n" +
" --> KSTREAM-JOINTHIS-0000000009\n" +
" <-- KSTREAM-SOURCE-0000000006\n" +
" Processor: KSTREAM-WINDOWED-0000000008 (stores: [KSTREAM-JOINOTHER-0000000010-store])\n" +
" --> KSTREAM-JOINOTHER-0000000010\n" +
" <-- KSTREAM-SOURCE-0000000001\n" +
" Processor: KSTREAM-WINDOWED-0000000016 (stores: [KSTREAM-JOINTHIS-0000000018-store])\n" +
" --> KSTREAM-JOINTHIS-0000000018\n" +
" <-- KSTREAM-SOURCE-0000000006\n" +
" Processor: KSTREAM-WINDOWED-0000000017 (stores: [KSTREAM-JOINOTHER-0000000019-store])\n" +
" --> KSTREAM-JOINOTHER-0000000019\n" +
" <-- KSTREAM-SOURCE-0000000002\n" +
" Processor: KSTREAM-JOINOTHER-0000000010 (stores: [KSTREAM-JOINTHIS-0000000009-store])\n" +
" --> KSTREAM-MERGE-0000000011\n" +
" <-- KSTREAM-WINDOWED-0000000008\n" +
" Processor: KSTREAM-JOINOTHER-0000000019 (stores: [KSTREAM-JOINTHIS-0000000018-store])\n" +
" --> KSTREAM-MERGE-0000000020\n" +
" <-- KSTREAM-WINDOWED-0000000017\n" +
" Processor: KSTREAM-JOINTHIS-0000000009 (stores: [KSTREAM-JOINOTHER-0000000010-store])\n" +
" --> KSTREAM-MERGE-0000000011\n" +
" <-- KSTREAM-WINDOWED-0000000007\n" +
" Processor: KSTREAM-JOINTHIS-0000000018 (stores: [KSTREAM-JOINOTHER-0000000019-store])\n" +
" --> KSTREAM-MERGE-0000000020\n" +
" <-- KSTREAM-WINDOWED-0000000016\n" +
" Processor: KSTREAM-MERGE-0000000011 (stores: [])\n" +
" --> KSTREAM-SINK-0000000012\n" +
" <-- KSTREAM-JOINTHIS-0000000009, KSTREAM-JOINOTHER-0000000010\n" +
" Processor: KSTREAM-MERGE-0000000020 (stores: [])\n" +
" --> KSTREAM-SINK-0000000021\n" +
" <-- KSTREAM-JOINTHIS-0000000018, KSTREAM-JOINOTHER-0000000019\n" +
" Sink: KSTREAM-SINK-0000000012 (topic: out-one)\n" +
" <-- KSTREAM-MERGE-0000000011\n" +
" Sink: KSTREAM-SINK-0000000021 (topic: out-to)\n" +
" <-- KSTREAM-MERGE-0000000020\n\n";
}
|
CapturingStoreSuppliers
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/SingletonImmutableMapWithUnhashableValueMapInterfaceTest.java
|
{
"start": 991,
"end": 1304
}
|
class ____
extends RegularImmutableMapWithUnhashableValuesMapInterfaceTest {
@Override
protected Map<Integer, UnhashableObject> makePopulatedMap() {
Unhashables unhashables = new Unhashables();
return ImmutableMap.of(0, unhashables.e0());
}
}
|
SingletonImmutableMapWithUnhashableValueMapInterfaceTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java
|
{
"start": 3473,
"end": 22905
}
|
class ____ extends AbstractS3ATestBase {
private static final Logger LOG =
LoggerFactory.getLogger(ITestDirectoryMarkerListing.class);
private static final String FILENAME = "fileUnderMarker";
private static final String HELLO = "hello";
private static final String MARKER = "marker";
private static final String MARKER_PEER = "markerpeer";
/**
* Does rename copy markers?
* Value: {@value}
* <p></p>
* Older releases: yes.
* <p></p>
* The full marker-optimized releases: no.
*/
private static final boolean RENAME_COPIES_MARKERS = false;
/**
* Path to a directory which has a marker.
*/
private Path markerDir;
/**
* Key to the object representing {@link #markerDir}.
*/
private String markerKey;
/**
* Key to the object representing {@link #markerDir} with
* a trailing / added. This references the actual object
* which has been created.
*/
private String markerKeySlash;
/**
* bucket of tests.
*/
private String bucket;
/**
* S3 Client of the FS.
*/
private S3Client s3client;
/**
* Path to a file under the marker.
*/
private Path filePathUnderMarker;
/**
* Key to a file under the marker.
*/
private String fileKeyUnderMarker;
/**
* base path for the test files; the marker dir goes under this.
*/
private Path basePath;
/**
* Path to a file a peer of markerDir.
*/
private Path markerPeer;
/**
* Key to a file a peer of markerDir.
*/
private String markerPeerKey;
@Override
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
String bucketName = getTestBucketName(conf);
removeBaseAndBucketOverrides(bucketName, conf,
FS_S3A_CREATE_PERFORMANCE,
FS_S3A_PERFORMANCE_FLAGS);
conf.setBoolean(FS_S3A_CREATE_PERFORMANCE, false);
return conf;
}
/**
* The setup phase includes creating the test objects.
*/
@BeforeEach
@Override
public void setup() throws Exception {
super.setup();
S3AFileSystem fs = getFileSystem();
s3client = getS3AInternals().getAmazonS3Client("markers");
bucket = fs.getBucket();
Path base = new Path(methodPath(), "base");
createTestObjects(base);
}
/**
* Teardown deletes the objects created before
* the superclass does the directory cleanup.
*/
@AfterEach
@Override
public void teardown() throws Exception {
if (s3client != null) {
deleteObject(markerKey);
deleteObject(markerKeySlash);
deleteObject(markerPeerKey);
deleteObject(fileKeyUnderMarker);
}
// do this ourselves to avoid audits teardown failing
// when surplus markers are found
deleteTestDirInTeardown();
super.teardown();
}
/**
* Create the test objects under the given path, setting
* various fields in the process.
* @param path parent path of everything
*/
private void createTestObjects(final Path path) throws Exception {
S3AFileSystem fs = getFileSystem();
basePath = path;
markerDir = new Path(basePath, MARKER);
// peer path has the same initial name to make sure there
// is no confusion there.
markerPeer = new Path(basePath, MARKER_PEER);
markerPeerKey = fs.pathToKey(markerPeer);
markerKey = fs.pathToKey(markerDir);
markerKeySlash = markerKey + "/";
fileKeyUnderMarker = markerKeySlash + FILENAME;
filePathUnderMarker = new Path(markerDir, FILENAME);
// put the empty dir
fs.mkdirs(markerDir);
touch(fs, markerPeer);
put(fileKeyUnderMarker, HELLO);
}
/*
=================================================================
Basic probes
=================================================================
*/
@Test
public void testMarkerExists() throws Throwable {
describe("Verify the marker exists");
head(markerKeySlash);
assertIsDirectory(markerDir);
}
@Test
public void testObjectUnderMarker() throws Throwable {
describe("verify the file under the marker dir exists");
assertIsFile(filePathUnderMarker);
head(fileKeyUnderMarker);
}
/*
=================================================================
The listing operations
=================================================================
*/
@Test
public void testListStatusMarkerDir() throws Throwable {
describe("list the marker directory and expect to see the file");
assertContainsFileUnderMarkerOnly(
toList(getFileSystem().listStatus(markerDir)));
}
@Test
public void testListFilesMarkerDirFlat() throws Throwable {
assertContainsFileUnderMarkerOnly(toList(
getFileSystem().listFiles(markerDir, false)));
}
@Test
public void testListFilesMarkerDirRecursive() throws Throwable {
List<FileStatus> statuses = toList(
getFileSystem().listFiles(markerDir, true));
assertContainsFileUnderMarkerOnly(statuses);
}
/**
* Path listing above the base dir MUST only find the file
* and not the marker.
*/
@Test
public void testListStatusBaseDirRecursive() throws Throwable {
List<FileStatus> statuses = toList(
getFileSystem().listFiles(basePath, true));
assertContainsExactlyStatusOfPaths(statuses, filePathUnderMarker,
markerPeer);
}
@Test
public void testGlobStatusBaseDirRecursive() throws Throwable {
Path escapedPath = new Path(escape(basePath.toUri().getPath()));
List<FileStatus> statuses =
exec("glob", () ->
toList(getFileSystem().globStatus(new Path(escapedPath, "*"))));
assertContainsExactlyStatusOfPaths(statuses, markerDir, markerPeer);
assertIsFileAtPath(markerPeer, statuses.get(1));
}
@Test
public void testGlobStatusMarkerDir() throws Throwable {
Path escapedPath = new Path(escape(markerDir.toUri().getPath()));
List<FileStatus> statuses =
exec("glob", () ->
toList(getFileSystem().globStatus(new Path(escapedPath, "*"))));
assertContainsFileUnderMarkerOnly(statuses);
}
/**
* Call {@code listLocatedStatus(basePath)}
* <p></p>
* The list here returns the marker peer before the
* dir. Reason: the listing iterators return
* the objects before the common prefixes, and the
* marker dir is coming back as a prefix.
*/
@Test
public void testListLocatedStatusBaseDir() throws Throwable {
List<FileStatus> statuses =
exec("listLocatedStatus", () ->
toList(getFileSystem().listLocatedStatus(basePath)));
assertContainsExactlyStatusOfPaths(statuses, markerPeer, markerDir);
}
/**
* Call {@code listLocatedStatus(markerDir)}; expect
* the file entry only.
*/
@Test
public void testListLocatedStatusMarkerDir() throws Throwable {
List<FileStatus> statuses =
exec("listLocatedStatus", () ->
toList(getFileSystem().listLocatedStatus(markerDir)));
assertContainsFileUnderMarkerOnly(statuses);
}
/*
=================================================================
Creation Rejection
=================================================================
*/
@Test
public void testCreateNoOverwriteMarkerDir() throws Throwable {
describe("create no-overwrite over the marker dir fails");
head(markerKeySlash);
intercept(FileAlreadyExistsException.class, () ->
exec("create", () ->
getFileSystem().create(markerDir, false)));
// dir is still there.
head(markerKeySlash);
}
@Test
public void testCreateNoOverwriteFile() throws Throwable {
describe("create-no-overwrite on the file fails");
head(fileKeyUnderMarker);
intercept(FileAlreadyExistsException.class, () ->
exec("create", () ->
getFileSystem().create(filePathUnderMarker, false)));
assertTestObjectsExist();
}
@Test
public void testCreateFileNoOverwrite() throws Throwable {
describe("verify the createFile() API also fails");
head(fileKeyUnderMarker);
intercept(FileAlreadyExistsException.class, () ->
exec("create", () ->
getFileSystem().createFile(filePathUnderMarker)
.overwrite(false)
.build()));
assertTestObjectsExist();
}
/*
=================================================================
Delete.
=================================================================
*/
@Test
public void testDelete() throws Throwable {
S3AFileSystem fs = getFileSystem();
// a non recursive delete MUST fail because
// it is not empty
intercept(PathIsNotEmptyDirectoryException.class, () ->
fs.delete(markerDir, false));
// file is still there
head(fileKeyUnderMarker);
// recursive delete MUST succeed
fs.delete(markerDir, true);
// and the markers are gone
head404(fileKeyUnderMarker);
head404(markerKeySlash);
// just for completeness
fs.delete(basePath, true);
}
/*
=================================================================
Rename.
=================================================================
*/
/**
* Rename the base directory, expect the source files to move.
*/
@Test
public void testRenameBase() throws Throwable {
describe("rename base directory");
Path src = basePath;
Path dest = new Path(methodPath(), "dest");
getFileSystem().delete(dest, true);
assertRenamed(src, dest);
assertPathDoesNotExist("source", src);
assertPathDoesNotExist("source", filePathUnderMarker);
assertPathExists("dest not found", dest);
// all the paths dest relative
Path destMarkerDir = new Path(dest, MARKER);
// peer path has the same initial name to make sure there
// is no confusion there.
Path destMarkerPeer = new Path(dest, MARKER_PEER);
String destMarkerKey = toKey(destMarkerDir);
String destMarkerKeySlash = destMarkerKey + "/";
String destFileKeyUnderMarker = destMarkerKeySlash + FILENAME;
Path destFilePathUnderMarker = new Path(destMarkerDir, FILENAME);
assertIsFile(destFilePathUnderMarker);
assertIsFile(destMarkerPeer);
head(destFileKeyUnderMarker);
// rename doesn't copy non-leaf markers
head404(destMarkerKeySlash);
}
/**
* Rename a file under a marker by passing in the marker
* directory as the destination; the final path is derived
* from the original filename.
* <p></p>
* After the rename:
* <ol>
* <li>The data must be at the derived destination path.</li>
* <li>The source file must not exist.</li>
* <li>The parent dir of the source file must exist.</li>
* <li>The marker above the destination file must not exist.</li>
* </ol>
*/
@Test
public void testRenameUnderMarkerDir() throws Throwable {
describe("directory rename under an existing marker");
String file = "sourceFile";
Path srcDir = new Path(basePath, "srcdir");
mkdirs(srcDir);
Path src = new Path(srcDir, file);
String srcKey = toKey(src);
put(srcKey, file);
head(srcKey);
// set the destination to be the marker directory.
Path dest = markerDir;
// rename the source file under the dest dir.
assertRenamed(src, dest);
assertIsFile(new Path(dest, file));
assertIsDirectory(srcDir);
head(markerKeySlash);
}
/**
* Rename file under a marker, giving the full path to the destination
* file.
* <p></p>
* After the rename:
* <ol>
* <li>The data must be at the explicit destination path.</li>
* <li>The source file must not exist.</li>
* <li>The parent dir of the source file must exist.</li>
* <li>The marker above the destination file must not exist.</li>
* </ol>
*/
@Test
public void testRenameUnderMarkerWithPath() throws Throwable {
describe("directory rename under an existing marker");
S3AFileSystem fs = getFileSystem();
String file = "sourceFile";
Path srcDir = new Path(basePath, "srcdir");
mkdirs(srcDir);
Path src = new Path(srcDir, file);
String srcKey = toKey(src);
put(srcKey, file);
head(srcKey);
// set the destination to be the final file
Path dest = new Path(markerDir, "destFile");
// rename the source file to the destination file
assertRenamed(src, dest);
assertIsFile(dest);
assertIsDirectory(srcDir);
head(markerKeySlash);
}
/**
* This test creates an empty dir and renames it over the directory marker.
* If the dest was considered to be empty, the rename would fail.
*/
@Test
public void testRenameEmptyDirOverMarker() throws Throwable {
describe("rename an empty directory over the marker");
S3AFileSystem fs = getFileSystem();
String dir = "sourceDir";
Path src = new Path(basePath, dir);
fs.mkdirs(src);
assertIsDirectory(src);
String srcKey = toKey(src) + "/";
head(srcKey);
Path dest = markerDir;
// renamed into the dest dir
assertFalse(getFileSystem().rename(src, dest),
"rename(" + src + ", " + dest + ") should have failed");
// source is still there
assertIsDirectory(src);
head(srcKey);
// and a non-recursive delete lets us verify it is considered
// an empty dir
assertDeleted(src, false);
assertTestObjectsExist();
}
/*
=================================================================
Utility methods and assertions.
=================================================================
*/
/**
* Assert the test objects exist.
*/
private void assertTestObjectsExist() throws Exception {
head(fileKeyUnderMarker);
head(markerKeySlash);
}
/**
* Put a string to a path.
* @param key key
* @param content string
*/
private void put(final String key, final String content) throws Exception {
exec("PUT " + key, () ->
s3client.putObject(b -> b.bucket(bucket).key(key),
RequestBody.fromString(content)));
}
/**
* Delete an object; exceptions are swallowed.
* @param key key
*/
private void deleteObject(final String key) throws Exception {
try {
exec("DELETE " + key, () -> {
s3client.deleteObject(b -> b.bucket(bucket).key(key));
return "deleted " + key;
});
} catch (IOException ignored) {
}
}
/**
* Issue a HEAD request.
* @param key
* @return a description of the object.
*/
private String head(final String key) throws Exception {
HeadObjectResponse response = exec("HEAD " + key, () ->
s3client.headObject(b -> b.bucket(bucket).key(key)));
return String.format("Object %s of length %d",
key, response.contentLength());
}
/**
* Issue a HEAD request and expect a 404 back.
* @param key
* @return the metadata
*/
private void head404(final String key) throws Exception {
intercept(FileNotFoundException.class, "",
"Expected 404 of " + key, () ->
head(key));
}
/**
* Execute an operation; translate AWS exceptions.
* Wraps the operation in an audit span, so that low-level
* calls can be safely made.
* @param op operation
* @param call call to make
* @param <T> returned type
* @return result of the call.
* @throws Exception failure
*/
private <T> T exec(String op, Callable<T> call) throws Exception {
ContractTestUtils.NanoTimer timer = new ContractTestUtils.NanoTimer();
try (AuditSpan span = getSpanSource().createSpan(op, null, null)) {
return call.call();
} catch (SdkException ex) {
throw S3AUtils.translateException(op, "", ex);
} finally {
timer.end(op);
}
}
/**
* Assert that the listing contains only the status
* of the file under the marker.
* @param statuses status objects
*/
private void assertContainsFileUnderMarkerOnly(
final List<FileStatus> statuses) {
assertContainsExactlyStatusOfPaths(statuses, filePathUnderMarker);
assertIsFileUnderMarker(statuses.get(0));
}
/**
* Expect the list of status objects to match that of the paths,
* without enforcing ordering of the values.
* @param statuses status object list
* @param paths ordered varargs list of paths
* @param <T> type of status objects
*/
private <T extends FileStatus> void assertContainsExactlyStatusOfPaths(
List<T> statuses, Path... paths) {
final List<Path> pathList = statuses.stream()
.map(FileStatus::getPath)
.collect(Collectors.toList());
Assertions.assertThat(pathList)
.containsExactlyInAnyOrder(paths);
}
/**
* Assert the status object refers to the file created
* under the marker.
* @param stat status object
*/
private void assertIsFileUnderMarker(final FileStatus stat) {
assertIsFileAtPath(filePathUnderMarker, stat);
}
/**
* Assert the status object refers to a path at the given name.
* @param path path
* @param stat status object
*/
private void assertIsFileAtPath(final Path path, final FileStatus stat) {
assertTrue(stat.isFile(), "Is not file " + stat);
assertPathEquals(path, stat);
}
/**
* Assert a status object's path matches expected.
* @param path path to expect
* @param stat status object
*/
private void assertPathEquals(final Path path, final FileStatus stat) {
assertEquals(path, stat.getPath(),
"filename is not the expected path :" + stat);
}
/**
* Given a remote iterator of status objects,
* build a list of the values.
* @param status status list
* @param <T> actual type.
* @return source.
* @throws IOException
*/
private <T extends FileStatus> List<FileStatus> toList(
RemoteIterator<T> status) throws IOException {
List<FileStatus> l = new ArrayList<>();
foreach(status, st -> l.add(st));
return dump(l);
}
/**
* Given an array of status objects,
* build a list of the values.
* @param status status list
* @param <T> actual type.
* @return source.
* @throws IOException
*/
private <T extends FileStatus> List<FileStatus> toList(
T[] status) throws IOException {
return dump(Arrays.asList(status));
}
/**
* Dump the string values of a list to the log; return
* the list.
* @param l source.
* @param <T> source type
* @return the list
*/
private <T> List<T> dump(List<T> l) {
int c = 1;
for (T t : l) {
LOG.info("{}\t{}", c++, t);
}
return l;
}
/**
* Rename: assert the outcome is true.
* @param src source path
* @param dest dest path
*/
private void assertRenamed(final Path src, final Path dest)
throws IOException {
assertTrue(getFileSystem().rename(src, dest),
"rename(" + src + ", " + dest + ") failed");
}
/**
* Convert a path to a key; does not add any trailing / .
* @param path path in
* @return key out
*/
private String toKey(final Path path) {
return getFileSystem().pathToKey(path);
}
/**
* Escape paths before handing to globStatus; this is needed as
* parameterized runs produce paths with [] in them.
* @param pathstr source path string
* @return an escaped path string
*/
private String escape(String pathstr) {
StringBuilder r = new StringBuilder();
for (char c : pathstr.toCharArray()) {
String ch = Character.toString(c);
if ("?*[{".contains(ch)) {
r.append("\\");
}
r.append(ch);
}
return r.toString();
}
}
|
ITestDirectoryMarkerListing
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/statistics/impl/OutputStreamStatistics.java
|
{
"start": 1116,
"end": 2687
}
|
class ____ implements BlockOutputStreamStatistics {
private final AtomicInteger blocksAllocated = new AtomicInteger(0);
private final AtomicInteger blocksReleased = new AtomicInteger(0);
private final AtomicInteger diskBlocksAllocated = new AtomicInteger(0);
private final AtomicInteger diskBlocksReleased = new AtomicInteger(0);
private final AtomicLong bytesAllocated = new AtomicLong(0);
private final AtomicLong bytesReleased = new AtomicLong(0);
@Override
public void blockAllocated() {
blocksAllocated.incrementAndGet();
}
@Override
public void blockReleased() {
blocksReleased.incrementAndGet();
}
@Override
public void diskBlockAllocated() {
diskBlocksAllocated.incrementAndGet();
}
@Override
public void diskBlockReleased() {
diskBlocksReleased.incrementAndGet();
}
@Override
public int getBlocksAllocated() {
return blocksAllocated.get();
}
@Override
public int getBlocksReleased() {
return blocksReleased.get();
}
@Override
public int getDiskBlocksAllocated() {
return diskBlocksAllocated.get();
}
@Override
public int getDiskBlocksReleased() {
return diskBlocksReleased.get();
}
@Override
public void bytesAllocated(long size) {
bytesAllocated.getAndAdd(size);
}
@Override
public void bytesReleased(long size) {
bytesReleased.getAndAdd(size);
}
@Override
public long getBytesAllocated() {
return bytesAllocated.get();
}
@Override
public long getBytesReleased() {
return bytesReleased.get();
}
}
|
OutputStreamStatistics
|
java
|
apache__camel
|
components/camel-consul/src/main/java/org/apache/camel/component/consul/cluster/ConsulClusterView.java
|
{
"start": 5464,
"end": 6557
}
|
class ____ implements CamelClusterMember {
private final AtomicBoolean master = new AtomicBoolean();
void setMaster(boolean master) {
if (master && this.master.compareAndSet(false, true)) {
LOGGER.debug("Leadership taken for session id {}", sessionId.get());
fireLeadershipChangedEvent(this);
return;
}
if (!master && this.master.compareAndSet(true, false)) {
LOGGER.debug("Leadership lost for session id {}", sessionId.get());
fireLeadershipChangedEvent(getLeader().orElse(null));
}
}
@Override
public boolean isLeader() {
return master.get();
}
@Override
public boolean isLocal() {
return true;
}
@Override
public String getId() {
return sessionId.get();
}
@Override
public String toString() {
return "ConsulLocalMember{" + "master=" + master + '}';
}
}
private final
|
ConsulLocalMember
|
java
|
apache__rocketmq
|
tools/src/main/java/org/apache/rocketmq/tools/monitor/MonitorListener.java
|
{
"start": 952,
"end": 1293
}
|
interface ____ {
void beginRound();
void reportUndoneMsgs(UndoneMsgs undoneMsgs);
void reportFailedMsgs(FailedMsgs failedMsgs);
void reportDeleteMsgsEvent(DeleteMsgsEvent deleteMsgsEvent);
void reportConsumerRunningInfo(TreeMap<String/* clientId */, ConsumerRunningInfo> criTable);
void endRound();
}
|
MonitorListener
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/test/java/io/quarkus/arc/test/config/ConfigMappingTest.java
|
{
"start": 6913,
"end": 7377
}
|
interface ____ {
@WithName("strings")
List<String> listStrings();
@WithName("ints")
List<Integer> listInts();
}
@Inject
Collections collections;
@Test
void collections() {
assertEquals(Stream.of("foo", "bar").collect(toList()), collections.listStrings());
assertEquals(Stream.of(1, 2, 3).collect(toList()), collections.listInts());
}
@ConfigMapping(prefix = "maps")
public
|
Collections
|
java
|
quarkusio__quarkus
|
integration-tests/injectmock/src/test/java/io/quarkus/it/mockbean/NestedTest.java
|
{
"start": 321,
"end": 416
}
|
class ____ {
@InjectMock
MessageService messageService;
@Nested
public
|
NestedTest
|
java
|
dropwizard__dropwizard
|
dropwizard-jersey/src/test/java/io/dropwizard/jersey/jackson/JacksonMessageBodyProviderTest.java
|
{
"start": 2572,
"end": 9384
}
|
interface ____ extends Ignorable {
}
private final ObjectMapper mapper = spy(Jackson.newObjectMapper());
private final JacksonMessageBodyProvider provider =
new JacksonMessageBodyProvider(mapper);
@Test
void readsDeserializableTypes() {
assertThat(provider.isReadable(Example.class, null, new Annotation[0], null))
.isTrue();
}
@Test
void writesSerializableTypes() {
assertThat(provider.isWriteable(Example.class, null, new Annotation[0], null))
.isTrue();
}
@Test
void doesNotWriteIgnoredTypes() {
assertThat(provider.isWriteable(Ignorable.class, null, new Annotation[0], null))
.isFalse();
}
@Test
void writesUnIgnoredTypes() {
assertThat(provider.isWriteable(NonIgnorable.class, null, new Annotation[0], null))
.isTrue();
}
@Test
void doesNotReadIgnoredTypes() {
assertThat(provider.isReadable(Ignorable.class, null, new Annotation[0], null))
.isFalse();
}
@Test
void readsUnIgnoredTypes() {
assertThat(provider.isReadable(NonIgnorable.class, null, new Annotation[0], null))
.isTrue();
}
@Test
void isChunked() {
assertThat(provider.getSize(null, null, null, new Annotation[0], null))
.isEqualTo(-1);
}
@Test
void deserializesRequestEntities() throws Exception {
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes(StandardCharsets.UTF_8));
final Class<?> klass = Example.class;
final Object obj = provider.readFrom((Class<Object>) klass,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
entity);
assertThat(obj)
.isInstanceOf(Example.class);
assertThat(((Example) obj).id)
.isEqualTo(1);
}
@Test
void returnsPartialValidatedRequestEntities() throws Exception {
final Validated valid = mock(Validated.class);
doReturn(Validated.class).when(valid).annotationType();
when(valid.value()).thenReturn(new Class<?>[]{Partial1.class, Partial2.class});
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1,\"text\":\"hello Cemo\"}".getBytes(StandardCharsets.UTF_8));
final Class<?> klass = PartialExample.class;
final Object obj = provider.readFrom((Class<Object>) klass,
PartialExample.class,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
entity);
assertThat(obj)
.isInstanceOfSatisfying(PartialExample.class, partialExample ->
assertThat(partialExample.id).isEqualTo(1));
}
@Test
void returnsPartialValidatedByGroupRequestEntities() throws Exception {
final Validated valid = mock(Validated.class);
doReturn(Validated.class).when(valid).annotationType();
when(valid.value()).thenReturn(new Class<?>[]{Partial1.class});
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes(StandardCharsets.UTF_8));
final Class<?> klass = PartialExample.class;
final Object obj = provider.readFrom((Class<Object>) klass,
PartialExample.class,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
entity);
assertThat(obj)
.isInstanceOfSatisfying(PartialExample.class, partialExample ->
assertThat(partialExample.id).isEqualTo(1));
}
@Test
void throwsAJsonProcessingExceptionForMalformedRequestEntities() {
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":-1d".getBytes(StandardCharsets.UTF_8));
final Class<?> klass = Example.class;
assertThatExceptionOfType(JsonProcessingException.class)
.isThrownBy(() -> provider.readFrom((Class<Object>) klass,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
entity))
.withMessageStartingWith("Unexpected character ('d' (code 100)): " +
"was expecting comma to separate Object entries\n");
}
@Test
void serializesResponseEntities() throws Exception {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final Example example = new Example();
example.id = 500;
provider.writeTo(example,
Example.class,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
output);
assertThat(output)
.hasToString("{\"id\":500}");
}
@Test
void returnsValidatedCollectionRequestEntities() throws Exception {
testValidatedCollectionType(Collection.class,
new TypeReference<Collection<Example>>() {
}.getType());
}
@Test
void returnsValidatedSetRequestEntities() throws Exception {
testValidatedCollectionType(Set.class,
new TypeReference<Set<Example>>() {
}.getType());
}
@Test
void returnsValidatedListRequestEntities() throws Exception {
testValidatedCollectionType(List.class,
new TypeReference<List<Example>>() {
}.getType());
}
private void testValidatedCollectionType(Class<?> klass, Type type) throws IOException {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":1}, {\"id\":2}]".getBytes(StandardCharsets.UTF_8));
final Object obj = provider.readFrom((Class<Object>) klass,
type,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedHashMap<>(),
entity);
assertThat(obj).isInstanceOf(klass);
assertThat((Iterable<Example>) obj)
.extracting(item -> item.id)
.contains(1, 2);
}
private static boolean isDefaultLocaleEnglish() {
return "en".equals(Locale.getDefault().getLanguage());
}
}
|
NonIgnorable
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/support/AbstractDelegatingSmartContextLoader.java
|
{
"start": 2036,
"end": 2611
}
|
class ____ that is annotated with
* {@link ContextConfiguration @ContextConfiguration}, and the candidate that
* supports the merged, processed configuration will be used to actually
* {@linkplain #loadContext(MergedContextConfiguration) load} the context.
*
* <p>Any reference to an <em>XML-based loader</em> can be interpreted to mean
* a context loader that supports only XML configuration files or one that
* supports both XML configuration files and Groovy scripts simultaneously.
*
* <p>Placing an empty {@code @ContextConfiguration} annotation on a test
|
hierarchy
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/SinkManyUnicastTest.java
|
{
"start": 1800,
"end": 11274
}
|
class ____ {
@Test
public void currentSubscriberCount() {
Sinks.Many<Integer> sink = SinkManyUnicast.create();
assertThat(sink.currentSubscriberCount()).isZero();
sink.asFlux().subscribe();
assertThat(sink.currentSubscriberCount()).isOne();
}
@Test
public void secondSubscriberRejectedProperly() {
SinkManyUnicast<Integer> up = SinkManyUnicast.create(new ConcurrentLinkedQueue<>());
up.subscribe();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
up.subscribe(ts);
ts.assertNoValues()
.assertError(IllegalStateException.class)
.assertNotComplete();
}
@Test
public void multiThreadedProducer() {
Sinks.Many<Integer> sink = Sinks.many().unicast().onBackpressureBuffer();
int nThreads = 5;
int countPerThread = 10000;
ExecutorService executor = Executors.newFixedThreadPool(nThreads);
for (int i = 0; i < 5; i++) {
Runnable generator = () -> {
for (int j = 0; j < countPerThread; j++) {
while (sink.tryEmitNext(j).isFailure()) {
LockSupport.parkNanos(10);
}
}
};
executor.submit(generator);
}
StepVerifier.create(sink.asFlux())
.expectNextCount(nThreads * countPerThread)
.thenCancel()
.verify();
executor.shutdownNow();
}
@Test
public void createDefault() {
SinkManyUnicast<Integer> processor = SinkManyUnicast.create();
assertProcessor(processor, null, null);
}
@Test
public void createOverrideQueue() {
Queue<Integer> queue = Queues.<Integer>get(10).get();
SinkManyUnicast<Integer> processor = SinkManyUnicast.create(queue);
assertProcessor(processor, queue, null);
}
@Test
public void createOverrideQueueOnTerminate() {
Disposable onTerminate = () -> {};
Queue<Integer> queue = Queues.<Integer>get(10).get();
SinkManyUnicast<Integer> processor = SinkManyUnicast.create(queue, onTerminate);
assertProcessor(processor, queue, onTerminate);
}
void assertProcessor(SinkManyUnicast<Integer> processor,
@Nullable Queue<Integer> queue,
@Nullable Disposable onTerminate) {
Queue<Integer> expectedQueue = queue != null ? queue : Queues.<Integer>unbounded().get();
Disposable expectedOnTerminate = onTerminate;
assertThat(processor.queue.getClass()).isEqualTo(expectedQueue.getClass());
assertThat(processor.onTerminate).isEqualTo(expectedOnTerminate);
}
@Test
void scanCapacityReactorUnboundedQueue() {
SinkManyUnicast processor = SinkManyUnicast.create(
Queues.unbounded(2).get());
assertThat(processor.scan(Attr.CAPACITY)).isEqualTo(Integer.MAX_VALUE);
}
@Test
void scanCapacityReactorBoundedQueue() {
//the bounded queue floors at 8 and rounds to the next power of 2
assertThat(SinkManyUnicast.create(Queues.get(2).get()).scan(Attr.CAPACITY))
.isEqualTo(8);
assertThat(SinkManyUnicast.create(Queues.get(8).get()).scan(Attr.CAPACITY))
.isEqualTo(8);
assertThat(SinkManyUnicast.create(Queues.get(9).get()).scan(Attr.CAPACITY))
.isEqualTo(16);
}
@Test
void scanCapacityBoundedBlockingQueue() {
SinkManyUnicast processor = SinkManyUnicast.create(
new LinkedBlockingQueue<>(10));
assertThat(processor.scan(Attr.CAPACITY)).isEqualTo(10);
}
@Test
void scanCapacityUnboundedBlockingQueue() {
SinkManyUnicast processor = SinkManyUnicast.create(new LinkedBlockingQueue<>());
assertThat(processor.scan(Attr.CAPACITY)).isEqualTo(Integer.MAX_VALUE);
}
@Test
void scanCapacityOtherQueue() {
SinkManyUnicast processor = SinkManyUnicast.create(new PriorityQueue<>(10));
assertThat(processor.scan(Attr.CAPACITY))
.isEqualTo(Integer.MIN_VALUE)
.isEqualTo(Queues.CAPACITY_UNSURE);
}
@Test
public void contextTest() {
SinkManyUnicast<Integer> p = SinkManyUnicast.create();
p.contextWrite(ctx -> ctx.put("foo", "bar")).subscribe();
assertThat(p.currentContext().get("foo").toString()).isEqualTo("bar");
}
@Test
public void subscriptionCancelUpdatesDownstreamCount() {
SinkManyUnicast<String> processor = SinkManyUnicast.create();
assertThat(processor.currentSubscriberCount())
.as("before subscribe")
.isZero();
LambdaSubscriber<String> subscriber = new LambdaSubscriber<>(null, null, null, null);
Disposable subscription = processor.subscribeWith(subscriber);
assertThat(processor.currentSubscriberCount())
.as("after subscribe")
.isPositive();
assertThat(processor.actual)
.as("after subscribe has actual")
.isSameAs(subscriber);
subscription.dispose();
assertThat(processor.currentSubscriberCount())
.as("after subscription cancel")
.isZero();
}
@Test
public void shouldNotThrowFromTryEmitNext() {
SinkManyUnicast<Object> processor = new SinkManyUnicast<>(Queues.empty().get());
StepVerifier.create(processor, 0)
.expectSubscription()
.then(() -> {
assertThat(processor.tryEmitNext("boom"))
.as("emission")
.isEqualTo(Sinks.EmitResult.FAIL_OVERFLOW);
})
.then(() -> processor.tryEmitComplete().orThrow())
.verifyComplete();
}
@Test
public void shouldSignalErrorOnOverflow() {
SinkManyUnicast<Object> processor = new SinkManyUnicast<>(Queues.empty().get());
StepVerifier.create(processor, 0)
.expectSubscription()
.then(() -> processor.emitNext("boom", FAIL_FAST))
.verifyErrorMatches(Exceptions::isOverflow);
}
@Test
public void tryEmitNextWithNoSubscriberAndBoundedQueueFailsZeroSubscriber() {
SinkManyUnicast<Integer> sinkManyUnicast = SinkManyUnicast.create(Queues.<Integer>one().get());
assertThat(sinkManyUnicast.tryEmitNext(1)).isEqualTo(Sinks.EmitResult.OK);
assertThat(sinkManyUnicast.tryEmitNext(2)).isEqualTo(Sinks.EmitResult.FAIL_ZERO_SUBSCRIBER);
StepVerifier.create(sinkManyUnicast)
.expectNext(1)
.then(() -> sinkManyUnicast.tryEmitComplete().orThrow())
.verifyComplete();
}
@Test
public void tryEmitNextWithBoundedQueueAndNoRequestFailsWithOverflow() {
SinkManyUnicast<Integer> sinkManyUnicast = SinkManyUnicast.create(Queues.<Integer>one().get());
StepVerifier.create(sinkManyUnicast, 0) //important to make no initial request
.expectSubscription()
.then(() -> {
assertThat(sinkManyUnicast.tryEmitNext(1)).isEqualTo(Sinks.EmitResult.OK);
assertThat(sinkManyUnicast.tryEmitNext(2)).isEqualTo(Sinks.EmitResult.FAIL_OVERFLOW);
assertThat(sinkManyUnicast.tryEmitComplete()).isEqualTo(Sinks.EmitResult.OK);
})
.thenRequest(1)
.expectNext(1)
.verifyComplete();
}
@Test
public void emitNextWithNoSubscriberAndBoundedQueueIgnoresValueAndKeepsSinkOpen() {
SinkManyUnicast<Integer> sinkManyUnicast = SinkManyUnicast.create(Queues.<Integer>one().get());
//fill the buffer
sinkManyUnicast.tryEmitNext(1);
//this "overflows" but keeps the sink open. since there's no subscriber, there's no Context so no real discarding
sinkManyUnicast.emitNext(2, FAIL_FAST);
//let's verify we get the buffer's content
StepVerifier.create(sinkManyUnicast)
.expectNext(1) //from the buffer
.expectNoEvent(Duration.ofMillis(500))
.then(() -> sinkManyUnicast.tryEmitComplete().orThrow())
.verifyComplete();
}
@Test
public void emitWithoutSubscriberAndSubscribeCancellingSubscriptionDiscards() {
Sinks.Many<String> sink = Sinks.many()
.unicast()
.onBackpressureBuffer();
sink.tryEmitNext("Hello");
List<String> discarded = new CopyOnWriteArrayList<String>();
sink.asFlux()
.doOnSubscribe(Subscription::cancel)
.contextWrite(ctx -> Operators.enableOnDiscard(ctx,
item -> discarded.add((String) item)))
.subscribe();
assertThat(discarded).containsExactly("Hello");
}
@Test
public void scanTerminatedCancelled() {
Sinks.Many<Integer> sink = SinkManyUnicast.create();
assertThat(sink.scan(Attr.TERMINATED)).as("not yet terminated").isFalse();
sink.tryEmitError(new IllegalStateException("boom")).orThrow();
assertThat(sink.scan(Attr.TERMINATED)).as("terminated with error").isTrue();
assertThat(sink.scan(Attr.ERROR)).as("error").hasMessage("boom");
assertThat(sink.scan(Attr.CANCELLED)).as("pre-cancellation").isFalse();
((SinkManyUnicast<?>) sink).cancel();
assertThat(sink.scan(Attr.CANCELLED)).as("cancelled").isTrue();
}
@Test
public void inners() {
Sinks.Many<Integer> sink1 = SinkManyUnicast.create();
Sinks.Many<Integer> sink2 = SinkManyUnicast.create();
CoreSubscriber<Integer> notScannable = new BaseSubscriber<Integer>() {};
InnerConsumer<Integer> scannable = new LambdaSubscriber<>(null, null, null, null);
assertThat(sink1.inners()).as("before subscription notScannable").isEmpty();
assertThat(sink2.inners()).as("before subscription notScannable").isEmpty();
sink1.asFlux().subscribe(notScannable);
sink2.asFlux().subscribe(scannable);
assertThat(sink1.inners())
.asInstanceOf(InstanceOfAssertFactories.LIST)
.as("after notScannable subscription")
.containsExactly(Scannable.from("NOT SCANNABLE"));
assertThat(sink2.inners())
.asInstanceOf(InstanceOfAssertFactories.LIST)
.as("after scannable subscription")
.containsExactly(scannable);
}
}
|
SinkManyUnicastTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/lib/TestZKClient.java
|
{
"start": 1583,
"end": 6118
}
|
class ____ {
private static int CONNECTION_TIMEOUT = 30000;
private static int DEFAULT_PORT = 20384;
static final File BASETEST =
new File(System.getProperty("build.test.dir", "target/zookeeper-build"));
protected String hostPort = "127.0.0.1:" + getOpenPort();
protected int maxCnxns = 0;
protected NIOServerCnxnFactory factory = null;
protected ZooKeeperServer zks;
protected File tmpDir = null;
public static String send4LetterWord(String host, int port, String cmd)
throws IOException
{
Socket sock = new Socket(host, port);
BufferedReader reader = null;
try {
OutputStream outstream = sock.getOutputStream();
outstream.write(cmd.getBytes());
outstream.flush();
// this replicates NC - close the output stream before reading
sock.shutdownOutput();
reader =
new BufferedReader(
new InputStreamReader(sock.getInputStream()));
StringBuilder sb = new StringBuilder();
String line;
while((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
return sb.toString();
} finally {
sock.close();
if (reader != null) {
reader.close();
}
}
}
public static boolean waitForServerDown(String hp, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
String host = hp.split(":")[0];
int port = Integer.parseInt(hp.split(":")[1]);
send4LetterWord(host, port, "stat");
} catch (IOException e) {
return true;
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
public static boolean waitForServerUp(String hp, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
String host = hp.split(":")[0];
int port = Integer.parseInt(hp.split(":")[1]);
// if there are multiple hostports, just take the first one
String result = send4LetterWord(host, port, "stat");
if (result.startsWith("Zookeeper version:")) {
return true;
}
} catch (IOException e) {
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
public static File createTmpDir(File parentDir) throws IOException {
File tmpFile = File.createTempFile("test", ".junit", parentDir);
// don't delete tmpFile - this ensures we don't attempt to create
// a tmpDir with a duplicate name
File tmpDir = new File(tmpFile + ".dir");
assertFalse(tmpDir.exists());
assertTrue(tmpDir.mkdirs());
return tmpDir;
}
@BeforeEach
public void setUp() throws IOException, InterruptedException {
System.setProperty("zookeeper.preAllocSize", "100");
System.setProperty("zookeeper.4lw.commands.whitelist", "*");
FileTxnLog.setPreallocSize(100 * 1024);
if (!BASETEST.exists()) {
BASETEST.mkdirs();
}
File dataDir = createTmpDir(BASETEST);
zks = new ZooKeeperServer(dataDir, dataDir, 3000);
final int PORT = Integer.parseInt(hostPort.split(":")[1]);
if (factory == null) {
factory = new NIOServerCnxnFactory();
factory.configure(new InetSocketAddress(PORT), maxCnxns);
}
factory.startup(zks);
assertTrue(waitForServerUp("127.0.0.1:" + PORT,
CONNECTION_TIMEOUT), "waiting for server up");
}
@AfterEach
public void tearDown() throws IOException, InterruptedException {
if (zks != null) {
ZKDatabase zkDb = zks.getZKDatabase();
factory.shutdown();
try {
zkDb.close();
} catch (IOException ie) {
}
final int PORT = Integer.parseInt(hostPort.split(":")[1]);
assertTrue(waitForServerDown("127.0.0.1:" + PORT,
CONNECTION_TIMEOUT), "waiting for server down");
}
}
@Test
public void testzkClient() throws Exception {
test("/some/test");
}
private void test(String testClient) throws Exception {
ZKClient client = new ZKClient(hostPort);
client.registerService("/nodemanager", "hostPort");
client.unregisterService("/nodemanager");
}
private int getOpenPort() {
try {
return ServerSocketUtil.getPorts(1)[0];
} catch (IOException e) {
return DEFAULT_PORT;
}
}
}
|
TestZKClient
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-ses/src/main/java/org/apache/camel/component/aws2/ses/client/impl/Ses2ClientOptimizedImpl.java
|
{
"start": 1775,
"end": 4283
}
|
class ____ implements Ses2InternalClient {
private static final Logger LOG = LoggerFactory.getLogger(Ses2ClientOptimizedImpl.class);
private Ses2Configuration configuration;
/**
* Constructor that uses the config file.
*/
public Ses2ClientOptimizedImpl(Ses2Configuration configuration) {
LOG.trace("Creating an AWS SES client for an ec2 instance with IAM temporary credentials (normal for ec2s).");
this.configuration = configuration;
}
/**
* Getting the SES AWS client that is used.
*
* @return SES Client.
*/
@Override
public SesClient getSesClient() {
SesClient client = null;
SesClientBuilder clientBuilder = SesClient.builder();
ProxyConfiguration.Builder proxyConfig = null;
ApacheHttpClient.Builder httpClientBuilder = null;
if (ObjectHelper.isNotEmpty(configuration.getProxyHost()) && ObjectHelper.isNotEmpty(configuration.getProxyPort())) {
proxyConfig = ProxyConfiguration.builder();
URI proxyEndpoint = URI.create(configuration.getProxyProtocol() + "://" + configuration.getProxyHost() + ":"
+ configuration.getProxyPort());
proxyConfig.endpoint(proxyEndpoint);
httpClientBuilder = ApacheHttpClient.builder().proxyConfiguration(proxyConfig.build());
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder);
}
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
clientBuilder = clientBuilder.region(Region.of(configuration.getRegion()));
}
if (configuration.isOverrideEndpoint()) {
clientBuilder.endpointOverride(URI.create(configuration.getUriEndpointOverride()));
}
if (configuration.isTrustAllCertificates()) {
if (httpClientBuilder == null) {
httpClientBuilder = ApacheHttpClient.builder();
}
SdkHttpClient ahc = httpClientBuilder.buildWithDefaults(AttributeMap
.builder()
.put(
SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES,
Boolean.TRUE)
.build());
// set created http client to use instead of builder
clientBuilder.httpClient(ahc);
clientBuilder.httpClientBuilder(null);
}
client = clientBuilder.build();
return client;
}
}
|
Ses2ClientOptimizedImpl
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java
|
{
"start": 35250,
"end": 36101
}
|
class ____ {
private final long blockSize;
private final int numTargets;
private final byte storagePolicyID;
private final String clientMachine;
private final BlockType blockType;
private final ErasureCodingPolicy ecPolicy;
ValidateAddBlockResult(
long blockSize, int numTargets, byte storagePolicyID,
String clientMachine, BlockType blockType,
ErasureCodingPolicy ecPolicy) {
this.blockSize = blockSize;
this.numTargets = numTargets;
this.storagePolicyID = storagePolicyID;
this.clientMachine = clientMachine;
this.blockType = blockType;
this.ecPolicy = ecPolicy;
if (blockType == BlockType.STRIPED) {
Preconditions.checkArgument(ecPolicy != null,
"ecPolicy is not specified for striped block");
}
}
}
}
|
ValidateAddBlockResult
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/content/ObjectParser.java
|
{
"start": 1023,
"end": 1427
}
|
class ____<Value, Context> extends AbstractObjectParser<Value, Context> {
private final Map<String, FieldParser> fieldParsers = new HashMap<>();
private final List<String[]> requiredFieldSets = new ArrayList<>();
private final String name;
private final Function<Context, Value> valueBuilder;
private final UnknownFieldParser<Value, Context> unknownFieldParser;
public
|
ObjectParser
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/db2/parser/DB2ExprParser.java
|
{
"start": 1001,
"end": 11696
}
|
class ____ extends SQLExprParser {
public static final String[] AGGREGATE_FUNCTIONS;
public static final long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
long hash = FnvHash.fnv1a_64_lower(str);
int index = Arrays.binarySearch(AGGREGATE_FUNCTIONS_CODES, hash);
AGGREGATE_FUNCTIONS[index] = str;
}
}
public DB2ExprParser(String sql) {
this(new DB2Lexer(sql));
this.lexer.nextToken();
this.dbType = DbType.db2;
}
public DB2ExprParser(String sql, SQLParserFeature... features) {
this(new DB2Lexer(sql, features));
this.lexer.nextToken();
}
public DB2ExprParser(Lexer lexer) {
super(lexer);
this.aggregateFunctions = AGGREGATE_FUNCTIONS;
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
this.dbType = DbType.db2;
}
public SQLExpr primaryRest(SQLExpr expr) {
if (lexer.identifierEquals(FnvHash.Constants.VALUE)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.NEXT) {
lexer.nextToken();
accept(Token.FOR);
SQLName seqName = this.name();
SQLSequenceExpr seqExpr = new SQLSequenceExpr(seqName, SQLSequenceExpr.Function.NextVal);
return seqExpr;
} else if (identExpr.hashCode64() == FnvHash.Constants.PREVIOUS) {
lexer.nextToken();
accept(Token.FOR);
SQLName seqName = this.name();
SQLSequenceExpr seqExpr = new SQLSequenceExpr(seqName, SQLSequenceExpr.Function.PrevVal);
return seqExpr;
}
}
} else if (lexer.identifierEquals(FnvHash.Constants.DATE)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.CURRENT) {
lexer.nextToken();
expr = new SQLIdentifierExpr("CURRENT DATE");
}
}
} else if (lexer.identifierEquals(FnvHash.Constants.DAY) && expr instanceof SQLIntegerExpr) {
lexer.nextToken();
expr = new SQLIntervalExpr(expr, SQLIntervalUnit.DAY);
} else if (lexer.identifierEquals(FnvHash.Constants.TIMESTAMP)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.CURRENT) {
lexer.nextToken();
expr = new SQLIdentifierExpr("CURRENT TIMESTAMP");
}
}
} else if (lexer.identifierEquals(FnvHash.Constants.TIME)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.CURRENT) {
lexer.nextToken();
expr = new SQLIdentifierExpr("CURRENT TIME");
}
}
} else if (lexer.token() == Token.SCHEMA) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.CURRENT) {
lexer.nextToken();
expr = new SQLIdentifierExpr("CURRENT SCHEMA");
}
}
} else if (lexer.identifierEquals(FnvHash.Constants.PATH)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
if (identExpr.hashCode64() == FnvHash.Constants.CURRENT) {
lexer.nextToken();
expr = new SQLIdentifierExpr("CURRENT PATH");
}
}
} else if (lexer.identifierEquals(FnvHash.Constants.MONTHS)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.MONTH);
lexer.nextToken();
expr = intervalExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.YEARS)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.YEAR);
lexer.nextToken();
expr = intervalExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.DAYS)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.DAY);
lexer.nextToken();
expr = intervalExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.HOUR) || lexer.identifierEquals(FnvHash.Constants.HOURS)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.HOUR);
lexer.nextToken();
expr = intervalExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.MINUTES)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.MINUTE);
lexer.nextToken();
expr = intervalExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.SECONDS)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr(expr, SQLIntervalUnit.SECOND);
lexer.nextToken();
expr = intervalExpr;
}
return super.primaryRest(expr);
}
protected SQLExpr dotRest(SQLExpr expr) {
if (lexer.identifierEquals(FnvHash.Constants.NEXTVAL)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
SQLSequenceExpr seqExpr = new SQLSequenceExpr(identExpr, SQLSequenceExpr.Function.NextVal);
lexer.nextToken();
return seqExpr;
}
} else if (lexer.identifierEquals(FnvHash.Constants.PREVVAL)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
SQLSequenceExpr seqExpr = new SQLSequenceExpr(identExpr, SQLSequenceExpr.Function.PrevVal);
lexer.nextToken();
return seqExpr;
}
} else if (lexer.identifierEquals(FnvHash.Constants.CURRVAL)) {
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) expr;
SQLSequenceExpr seqExpr = new SQLSequenceExpr(identExpr, SQLSequenceExpr.Function.CurrVal);
lexer.nextToken();
return seqExpr;
}
}
return super.dotRest(expr);
}
public SQLColumnDefinition parseColumnRest(SQLColumnDefinition column) {
column = super.parseColumnRest(column);
if (lexer.identifierEquals(FnvHash.Constants.GENERATED)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ALWAYS)) {
lexer.nextToken();
} else {
throw new ParserException("TODO " + lexer.info());
}
accept(Token.AS);
if (lexer.token() == Token.IDENTITY) {
SQLColumnDefinition.Identity identity = parseIdentity();
column.setIdentity(identity);
} else {
SQLExpr expr = this.expr();
column.setGeneratedAlwaysAs(expr);
}
parseColumnRest(column);
}
return column;
}
protected SQLColumnDefinition.Identity parseIdentity() {
SQLColumnDefinition.Identity identity = new SQLColumnDefinition.Identity();
accept(Token.IDENTITY);
if (lexer.token() == Token.LPAREN) {
accept(Token.LPAREN);
if (lexer.identifierEquals(FnvHash.Constants.START)) {
lexer.nextToken();
accept(Token.WITH);
if (lexer.token() == Token.LITERAL_INT) {
identity.setSeed((Integer) lexer.integerValue());
lexer.nextToken();
} else {
throw new ParserException("TODO " + lexer.info());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
if (lexer.identifierEquals(FnvHash.Constants.INCREMENT)) {
lexer.nextToken();
accept(Token.BY);
if (lexer.token() == Token.LITERAL_INT) {
identity.setIncrement((Integer) lexer.integerValue());
lexer.nextToken();
} else {
throw new ParserException("TODO " + lexer.info());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
if (lexer.identifierEquals(FnvHash.Constants.CYCLE)) {
lexer.nextToken();
identity.setCycle(true);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
if (lexer.identifierEquals(FnvHash.Constants.MINVALUE)) {
lexer.nextTokenValue();
if (lexer.token() == Token.LITERAL_INT) {
identity.setMinValue((Integer) lexer.integerValue());
lexer.nextToken();
} else {
throw new ParserException("TODO " + lexer.info());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
if (lexer.identifierEquals(FnvHash.Constants.MAXVALUE)) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_INT) {
identity.setMaxValue((Integer) lexer.integerValue());
lexer.nextToken();
} else {
throw new ParserException("TODO " + lexer.info());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
accept(Token.RPAREN);
}
return identity;
}
}
|
DB2ExprParser
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/testing/TearDownAccepter.java
|
{
"start": 990,
"end": 1322
}
|
interface ____ {
/**
* Registers a TearDown implementor which will be run after the test proper.
*
* <p>In JUnit4 language, that means as an {@code @After}.
*
* <p>In JUnit3 language, that means during the {@link junit.framework.TestCase#tearDown()} step.
*/
void addTearDown(TearDown tearDown);
}
|
TearDownAccepter
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/context/ShutdownEndpoint.java
|
{
"start": 2490,
"end": 2930
}
|
class ____ implements OperationResponseBody {
private static final ShutdownDescriptor DEFAULT = new ShutdownDescriptor("Shutting down, bye...");
private static final ShutdownDescriptor NO_CONTEXT = new ShutdownDescriptor("No context to shutdown.");
private final String message;
ShutdownDescriptor(String message) {
this.message = message;
}
public String getMessage() {
return this.message;
}
}
}
|
ShutdownDescriptor
|
java
|
quarkusio__quarkus
|
tcks/microprofile-rest-client-reactive/src/test/java/io/quarkus/tck/restclient/invalid/InterfaceHasMethodWithMismatchedPathParameterTest.java
|
{
"start": 502,
"end": 981
}
|
class ____ {
@Deployment
@ShouldThrowException(RestClientDefinitionException.class)
public static Archive<?> createDeployment() {
return ShrinkWrap
.create(WebArchive.class,
InterfaceHasMethodWithMismatchedPathParameterTest.class.getSimpleName() + ".war")
.addClasses(TemplateMismatch.class);
}
@Test
public void shouldNotBeInvoked() {
}
}
|
InterfaceHasMethodWithMismatchedPathParameterTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LumberjackEndpointBuilderFactory.java
|
{
"start": 9120,
"end": 9943
}
|
interface ____ which to listen for Lumberjack
*
* Path parameter: port
* Network port on which to listen for Lumberjack
* Default value: 5044
*
* @param path host:port
* @return the dsl builder
*/
default LumberjackEndpointBuilder lumberjack(String path) {
return LumberjackEndpointBuilderFactory.endpointBuilder("lumberjack", path);
}
/**
* Lumberjack (camel-lumberjack)
* Receive logs messages using the Lumberjack protocol.
*
* Category: monitoring
* Since: 2.18
* Maven coordinates: org.apache.camel:camel-lumberjack
*
* Syntax: <code>lumberjack:host:port</code>
*
* Path parameter: host (required)
* Network
|
on
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/VarTypeNameTest.java
|
{
"start": 1638,
"end": 1797
}
|
class ____ {
int var;
}
""")
.setArgs(ImmutableList.of("-source", "8", "-target", "8"))
.doTest();
}
}
|
Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/MisplacedScopeAnnotationsTest.java
|
{
"start": 6152,
"end": 6361
}
|
interface ____ {}
@Provides
int something(@RequestScoped Integer bar) {
return 42;
}
}
""")
.doTest();
}
}
|
RequestScoped
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NettyEndpointBuilderFactory.java
|
{
"start": 188727,
"end": 189039
}
|
class ____ extends AbstractEndpointBuilder implements NettyEndpointBuilder, AdvancedNettyEndpointBuilder {
public NettyEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new NettyEndpointBuilderImpl(path);
}
}
|
NettyEndpointBuilderImpl
|
java
|
apache__camel
|
components/camel-thrift/src/main/java/org/apache/camel/component/thrift/ThriftUtils.java
|
{
"start": 3621,
"end": 3766
}
|
class ____ found: " + clientClassName);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("Thrift client
|
not
|
java
|
google__guice
|
extensions/grapher/src/com/google/inject/grapher/InjectorGrapher.java
|
{
"start": 1058,
"end": 1451
}
|
interface ____ {
/** Graphs the guice dependency graph for the given injector using default starting keys. */
void graph(Injector injector) throws IOException;
/**
* Graphs the guice dependency graph for the given injector using the given starting keys and
* their transitive dependencies.
*/
void graph(Injector injector, Set<Key<?>> root) throws IOException;
}
|
InjectorGrapher
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java
|
{
"start": 1357,
"end": 1486
}
|
class ____ extends MasterNodeRequest<Request> implements ToXContentObject {
// Note that Request should be the Value
|
Request
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java
|
{
"start": 27855,
"end": 28194
}
|
class ____ {
final int[] xs = null;
}
""")
.doTest();
}
@Test
public void twoFieldsInSource() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.ThreadSafe;
@ThreadSafe
|
Test
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
|
{
"start": 1722,
"end": 39886
}
|
class ____ {
@TempDir
public File DIR;
@Test
void record() {
Schema schema = SchemaBuilder.record("myrecord").namespace("org.example").aliases("oldrecord").fields().name("f0")
.aliases("f0alias").type().stringType().noDefault().name("f1").doc("This is f1").type().longType().noDefault()
.name("f2").type().nullable().booleanType().booleanDefault(true).name("f3").type().unionOf().nullType().and()
.booleanType().endUnion().nullDefault().endRecord();
assertEquals("myrecord", schema.getName());
assertEquals("org.example", schema.getNamespace());
assertEquals("org.example.oldrecord", schema.getAliases().iterator().next());
assertFalse(schema.isError());
List<Schema.Field> fields = schema.getFields();
assertEquals(4, fields.size());
assertEquals(new Schema.Field("f0", Schema.create(Schema.Type.STRING)), fields.get(0));
assertTrue(fields.get(0).aliases().contains("f0alias"));
assertEquals(new Schema.Field("f1", Schema.create(Schema.Type.LONG), "This is f1"), fields.get(1));
List<Schema> types = new ArrayList<>();
types.add(Schema.create(Schema.Type.BOOLEAN));
types.add(Schema.create(Schema.Type.NULL));
Schema optional = Schema.createUnion(types);
assertEquals(new Schema.Field("f2", optional, null, true), fields.get(2));
List<Schema> types2 = new ArrayList<>();
types2.add(Schema.create(Schema.Type.NULL));
types2.add(Schema.create(Schema.Type.BOOLEAN));
Schema optional2 = Schema.createUnion(types2);
assertNotEquals(new Schema.Field("f3", optional2, null, (Object) null), fields.get(3));
assertEquals(new Schema.Field("f3", optional2, null, Schema.Field.NULL_DEFAULT_VALUE), fields.get(3));
}
@Test
void doc() {
Schema s = SchemaBuilder.fixed("myfixed").doc("mydoc").size(1);
assertEquals("mydoc", s.getDoc());
}
@Test
void props() {
Schema s = SchemaBuilder.builder().intBuilder().prop("p1", "v1").prop("p2", "v2").prop("p2", "v2real") // overwrite
.endInt();
int size = s.getObjectProps().size();
assertEquals(2, size);
assertEquals("v1", s.getProp("p1"));
assertEquals("v2real", s.getProp("p2"));
}
@Test
void objectProps() {
Schema s = SchemaBuilder.builder().intBuilder().prop("booleanProp", true).prop("intProp", Integer.MAX_VALUE)
.prop("longProp", Long.MAX_VALUE).prop("floatProp", 1.0f).prop("doubleProp", Double.MAX_VALUE)
.prop("byteProp", new byte[] { 0x41, 0x42, 0x43 }).prop("stringProp", "abc").endInt();
// object properties
assertEquals(7, s.getObjectProps().size());
assertTrue(s.getObjectProp("booleanProp") instanceof Boolean);
assertEquals(true, s.getObjectProp("booleanProp"));
assertTrue(s.getObjectProp("intProp") instanceof Integer);
assertEquals(Integer.MAX_VALUE, s.getObjectProp("intProp"));
assertTrue(s.getObjectProp("intProp") instanceof Integer);
assertTrue(s.getObjectProp("longProp") instanceof Long);
assertEquals(Long.MAX_VALUE, s.getObjectProp("longProp"));
assertTrue(s.getObjectProp("floatProp") instanceof Float);
// float converts to double
assertEquals(1.0f, s.getObjectProp("floatProp"));
assertTrue(s.getObjectProp("doubleProp") instanceof Double);
assertEquals(Double.MAX_VALUE, s.getObjectProp("doubleProp"));
// byte[] converts to string
assertTrue(s.getObjectProp("byteProp") instanceof byte[]);
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) s.getObjectProp("byteProp"));
assertTrue(s.getObjectProp("stringProp") instanceof String);
assertEquals("abc", s.getObjectProp("stringProp"));
}
@Test
void fieldObjectProps() {
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("booleanProp", true)
.prop("intProp", Integer.MAX_VALUE).prop("longProp", Long.MAX_VALUE).prop("floatProp", 1.0f)
.prop("doubleProp", Double.MAX_VALUE).prop("byteProp", new byte[] { 0x41, 0x42, 0x43 })
.prop("stringProp", "abc").type().intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertEquals(7, f.getObjectProps().size());
assertTrue(f.getObjectProp("booleanProp") instanceof Boolean);
assertEquals(true, f.getObjectProp("booleanProp"));
assertTrue(f.getObjectProp("intProp") instanceof Integer);
assertEquals(Integer.MAX_VALUE, f.getObjectProp("intProp"));
assertTrue(f.getObjectProp("intProp") instanceof Integer);
assertTrue(f.getObjectProp("longProp") instanceof Long);
assertEquals(Long.MAX_VALUE, f.getObjectProp("longProp"));
assertTrue(f.getObjectProp("floatProp") instanceof Float);
// float converts to double
assertEquals(1.0f, f.getObjectProp("floatProp"));
assertTrue(f.getObjectProp("doubleProp") instanceof Double);
assertEquals(Double.MAX_VALUE, f.getObjectProp("doubleProp"));
// byte[] converts to string
assertTrue(f.getObjectProp("byteProp") instanceof byte[]);
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) f.getObjectProp("byteProp"));
assertTrue(f.getObjectProp("stringProp") instanceof String);
assertEquals("abc", f.getObjectProp("stringProp"));
assertEquals("abc", f.getObjectProp("stringProp", "default"));
assertEquals("default", f.getObjectProp("unknwon", "default"));
}
@Test
void arrayObjectProp() {
List<Object> values = new ArrayList<>();
values.add(true);
values.add(Integer.MAX_VALUE);
values.add(Long.MAX_VALUE);
values.add(1.0f);
values.add(Double.MAX_VALUE);
values.add(new byte[] { 0x41, 0x42, 0x43 });
values.add("abc");
Schema s = SchemaBuilder.builder().intBuilder().prop("arrayProp", values).endInt();
// object properties
assertEquals(1, s.getObjectProps().size());
assertTrue(s.getObjectProp("arrayProp") instanceof Collection);
@SuppressWarnings("unchecked")
Collection<Object> valueCollection = (Collection<Object>) s.getObjectProp("arrayProp");
Iterator<Object> iter = valueCollection.iterator();
assertEquals(7, valueCollection.size());
assertEquals(true, iter.next());
assertEquals(Integer.MAX_VALUE, iter.next());
assertEquals(Long.MAX_VALUE, iter.next());
assertEquals(1.0f, iter.next());
assertEquals(Double.MAX_VALUE, iter.next());
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) iter.next());
assertEquals("abc", iter.next());
}
@Test
void fieldArrayObjectProp() {
List<Object> values = new ArrayList<>();
values.add(true);
values.add(Integer.MAX_VALUE);
values.add(Long.MAX_VALUE);
values.add(1.0f);
values.add(Double.MAX_VALUE);
values.add(new byte[] { 0x41, 0x42, 0x43 });
values.add("abc");
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("arrayProp", values).type()
.intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertEquals(1, f.getObjectProps().size());
assertTrue(f.getObjectProp("arrayProp") instanceof Collection);
@SuppressWarnings("unchecked")
Collection<Object> valueCollection = (Collection<Object>) f.getObjectProp("arrayProp");
Iterator<Object> iter = valueCollection.iterator();
assertEquals(7, valueCollection.size());
assertEquals(true, iter.next());
assertEquals(Integer.MAX_VALUE, iter.next());
assertEquals(Long.MAX_VALUE, iter.next());
assertEquals(1.0f, iter.next());
assertEquals(Double.MAX_VALUE, iter.next());
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) iter.next());
assertEquals("abc", iter.next());
}
@Test
void mapObjectProp() {
Map<String, Object> values = new HashMap<>();
values.put("booleanKey", true);
values.put("intKey", Integer.MAX_VALUE);
values.put("longKey", Long.MAX_VALUE);
values.put("floatKey", 1.0f);
values.put("doubleKey", Double.MAX_VALUE);
values.put("byteKey", new byte[] { 0x41, 0x42, 0x43 });
values.put("stringKey", "abc");
Schema s = SchemaBuilder.builder().intBuilder().prop("mapProp", values).endInt();
// object properties
assertTrue(s.getObjectProp("mapProp") instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> valueMap = (Map<String, Object>) s.getObjectProp("mapProp");
assertEquals(values.size(), valueMap.size());
assertTrue(valueMap.get("booleanKey") instanceof Boolean);
assertEquals(true, valueMap.get("booleanKey"));
assertTrue(valueMap.get("intKey") instanceof Integer);
assertEquals(Integer.MAX_VALUE, valueMap.get("intKey"));
assertTrue(valueMap.get("longKey") instanceof Long);
assertEquals(Long.MAX_VALUE, valueMap.get("longKey"));
assertTrue(valueMap.get("floatKey") instanceof Float);
assertEquals(1.0f, valueMap.get("floatKey"));
assertTrue(valueMap.get("doubleKey") instanceof Double);
assertEquals(Double.MAX_VALUE, valueMap.get("doubleKey"));
assertTrue(valueMap.get("byteKey") instanceof byte[]);
assertArrayEquals("ABC".getBytes(StandardCharsets.UTF_8), (byte[]) valueMap.get("byteKey"));
assertTrue(valueMap.get("stringKey") instanceof String);
assertEquals("abc", valueMap.get("stringKey"));
}
@Test
void fieldMapObjectProp() {
Map<String, Object> values = new HashMap<>();
values.put("booleanKey", true);
values.put("intKey", Integer.MAX_VALUE);
values.put("longKey", Long.MAX_VALUE);
values.put("floatKey", 1.0f);
values.put("doubleKey", Double.MAX_VALUE);
values.put("byteKey", new byte[] { 0x41, 0x42, 0x43 });
values.put("stringKey", "abc");
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("mapProp", values).type()
.intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertTrue(f.getObjectProp("mapProp") instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> valueMap = (Map<String, Object>) f.getObjectProp("mapProp");
assertEquals(values.size(), valueMap.size());
assertTrue(valueMap.get("booleanKey") instanceof Boolean);
assertEquals(true, valueMap.get("booleanKey"));
assertTrue(valueMap.get("intKey") instanceof Integer);
assertEquals(Integer.MAX_VALUE, valueMap.get("intKey"));
assertTrue(valueMap.get("longKey") instanceof Long);
assertEquals(Long.MAX_VALUE, valueMap.get("longKey"));
assertTrue(valueMap.get("floatKey") instanceof Float);
assertEquals(1.0f, valueMap.get("floatKey"));
assertTrue(valueMap.get("doubleKey") instanceof Double);
assertEquals(Double.MAX_VALUE, valueMap.get("doubleKey"));
assertTrue(valueMap.get("byteKey") instanceof byte[]);
assertEquals("ABC", new String((byte[]) valueMap.get("byteKey")));
assertTrue(valueMap.get("stringKey") instanceof String);
assertEquals("abc", valueMap.get("stringKey"));
}
@Test
void nullObjectProp() {
assertThrows(AvroRuntimeException.class, () -> {
SchemaBuilder.builder().intBuilder().prop("nullProp", (Object) null).endInt();
});
}
@Test
void fieldNullObjectProp() {
assertThrows(AvroRuntimeException.class, () -> {
SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("nullProp", (Object) null).type()
.intType().noDefault().endRecord();
});
}
@Test
void namespaces() {
Schema s1 = SchemaBuilder.record("myrecord").namespace("org.example").fields().name("myint").type().intType()
.noDefault().endRecord();
Schema s2 = SchemaBuilder.record("org.example.myrecord").fields().name("myint").type().intType().noDefault()
.endRecord();
Schema s3 = SchemaBuilder.record("org.example.myrecord").namespace("org.example2").fields().name("myint").type()
.intType().noDefault().endRecord();
Schema s4 = SchemaBuilder.builder("org.example").record("myrecord").fields().name("myint").type().intType()
.noDefault().endRecord();
assertEquals("myrecord", s1.getName());
assertEquals("myrecord", s2.getName());
assertEquals("myrecord", s3.getName());
assertEquals("myrecord", s4.getName());
assertEquals("org.example", s1.getNamespace());
assertEquals("org.example", s2.getNamespace());
assertEquals("org.example", s3.getNamespace()); // namespace call is ignored
assertEquals("org.example", s4.getNamespace());
assertEquals("org.example.myrecord", s1.getFullName());
assertEquals("org.example.myrecord", s2.getFullName());
assertEquals("org.example.myrecord", s3.getFullName());
assertEquals("org.example.myrecord", s4.getFullName());
}
@Test
void missingRecordName() {
assertThrows(NullPointerException.class, () -> {
SchemaBuilder.record(null).fields() // null name
.name("f0").type().stringType().noDefault().endRecord();
});
}
@Test
void testBoolean() {
Schema.Type type = Schema.Type.BOOLEAN;
Schema simple = SchemaBuilder.builder().booleanType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().booleanBuilder().prop("p", "v").endBoolean();
assertEquals(expected, built1);
}
@Test
void testInt() {
Schema.Type type = Schema.Type.INT;
Schema simple = SchemaBuilder.builder().intType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().intBuilder().prop("p", "v").endInt();
assertEquals(expected, built1);
}
@Test
void testLong() {
Schema.Type type = Schema.Type.LONG;
Schema simple = SchemaBuilder.builder().longType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().longBuilder().prop("p", "v").endLong();
assertEquals(expected, built1);
}
@Test
void testFloat() {
Schema.Type type = Schema.Type.FLOAT;
Schema simple = SchemaBuilder.builder().floatType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().floatBuilder().prop("p", "v").endFloat();
assertEquals(expected, built1);
}
@Test
void duble() {
Schema.Type type = Schema.Type.DOUBLE;
Schema simple = SchemaBuilder.builder().doubleType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().doubleBuilder().prop("p", "v").endDouble();
assertEquals(expected, built1);
}
@Test
void string() {
Schema.Type type = Schema.Type.STRING;
Schema simple = SchemaBuilder.builder().stringType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().stringBuilder().prop("p", "v").endString();
assertEquals(expected, built1);
}
@Test
void bytes() {
Schema.Type type = Schema.Type.BYTES;
Schema simple = SchemaBuilder.builder().bytesType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().bytesBuilder().prop("p", "v").endBytes();
assertEquals(expected, built1);
}
@Test
void testNull() {
Schema.Type type = Schema.Type.NULL;
Schema simple = SchemaBuilder.builder().nullType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().nullBuilder().prop("p", "v").endNull();
assertEquals(expected, built1);
}
private Schema primitive(Schema.Type type, Schema bare) {
// test creation of bare schema by name
Schema bareByName = SchemaBuilder.builder().type(type.getName());
assertEquals(Schema.create(type), bareByName);
assertEquals(bareByName, bare);
// return a schema with custom prop set
Schema p = Schema.create(type);
p.addProp("p", "v");
return p;
}
// @Test
// public void testError() {
// Schema schema = SchemaBuilder
// .errorType("myerror")
// .requiredString("message")
// .build();
//
// Assert.assertEquals("myerror", schema.getName());
// Assert.assertTrue(schema.isError());
// }
@Test
void recursiveRecord() {
Schema schema = SchemaBuilder.record("LongList").fields().name("value").type().longType().noDefault().name("next")
.type().optional().type("LongList").endRecord();
assertEquals("LongList", schema.getName());
List<Schema.Field> fields = schema.getFields();
assertEquals(2, fields.size());
assertEquals(new Schema.Field("value", Schema.create(Schema.Type.LONG), null), fields.get(0));
assertEquals(Schema.Type.UNION, fields.get(1).schema().getType());
assertEquals(Schema.Type.NULL, fields.get(1).schema().getTypes().get(0).getType());
Schema recordSchema = fields.get(1).schema().getTypes().get(1);
assertEquals(Schema.Type.RECORD, recordSchema.getType());
assertEquals("LongList", recordSchema.getName());
assertEquals(NullNode.getInstance(), fields.get(1).defaultValue());
}
@Test
void testEnum() {
List<String> symbols = Arrays.asList("a", "b");
Schema expected = Schema.createEnum("myenum", null, null, symbols);
expected.addProp("p", "v");
Schema schema = SchemaBuilder.enumeration("myenum").prop("p", "v").symbols("a", "b");
assertEquals(expected, schema);
}
@Test
void enumWithDefault() {
List<String> symbols = Arrays.asList("a", "b");
String enumDefault = "a";
Schema expected = Schema.createEnum("myenum", null, null, symbols, enumDefault);
expected.addProp("p", "v");
Schema schema = SchemaBuilder.enumeration("myenum").prop("p", "v").defaultSymbol(enumDefault).symbols("a", "b");
assertEquals(expected, schema);
}
@Test
void fixed() {
Schema expected = Schema.createFixed("myfixed", null, null, 16);
expected.addAlias("myOldFixed");
Schema schema = SchemaBuilder.fixed("myfixed").aliases("myOldFixed").size(16);
assertEquals(expected, schema);
}
@Test
void array() {
Schema longSchema = Schema.create(Schema.Type.LONG);
Schema expected = Schema.createArray(longSchema);
Schema schema1 = SchemaBuilder.array().items().longType();
assertEquals(expected, schema1);
Schema schema2 = SchemaBuilder.array().items(longSchema);
assertEquals(expected, schema2);
Schema schema3 = SchemaBuilder.array().prop("p", "v").items().type("long");
expected.addProp("p", "v");
assertEquals(expected, schema3);
}
@Test
void map() {
Schema intSchema = Schema.create(Schema.Type.INT);
Schema expected = Schema.createMap(intSchema);
Schema schema1 = SchemaBuilder.map().values().intType();
assertEquals(expected, schema1);
Schema schema2 = SchemaBuilder.map().values(intSchema);
assertEquals(expected, schema2);
Schema schema3 = SchemaBuilder.map().prop("p", "v").values().type("int");
expected.addProp("p", "v");
assertEquals(expected, schema3);
}
@Test
void unionAndNullable() {
List<Schema> types = new ArrayList<>();
types.add(Schema.create(Schema.Type.LONG));
types.add(Schema.create(Schema.Type.NULL));
Schema expected = Schema.createUnion(types);
Schema schema = SchemaBuilder.unionOf().longType().and().nullType().endUnion();
assertEquals(expected, schema);
schema = SchemaBuilder.nullable().longType();
assertEquals(expected, schema);
}
@Test
void fields() {
Schema rec = SchemaBuilder.record("Rec").fields().name("documented").doc("documented").type().nullType().noDefault()
.name("ascending").orderAscending().type().booleanType().noDefault().name("descending").orderDescending().type()
.floatType().noDefault().name("ignored").orderIgnore().type().doubleType().noDefault().name("aliased")
.aliases("anAlias").type().stringType().noDefault().endRecord();
assertEquals("documented", rec.getField("documented").doc());
assertEquals(Order.ASCENDING, rec.getField("ascending").order());
assertEquals(Order.DESCENDING, rec.getField("descending").order());
assertEquals(Order.IGNORE, rec.getField("ignored").order());
assertTrue(rec.getField("aliased").aliases().contains("anAlias"));
}
@Test
void fieldShortcuts() {
Schema full = SchemaBuilder.record("Blah").fields().name("rbool").type().booleanType().noDefault().name("obool")
.type().optional().booleanType().name("nbool").type().nullable().booleanType().booleanDefault(true).name("rint")
.type().intType().noDefault().name("oint").type().optional().intType().name("nint").type().nullable().intType()
.intDefault(1).name("rlong").type().longType().noDefault().name("olong").type().optional().longType()
.name("nlong").type().nullable().longType().longDefault(2L).name("rfloat").type().floatType().noDefault()
.name("ofloat").type().optional().floatType().name("nfloat").type().nullable().floatType().floatDefault(-1.1f)
.name("rdouble").type().doubleType().noDefault().name("odouble").type().optional().doubleType().name("ndouble")
.type().nullable().doubleType().doubleDefault(99.9d).name("rstring").type().stringType().noDefault()
.name("ostring").type().optional().stringType().name("nstring").type().nullable().stringType()
.stringDefault("def").name("rbytes").type().bytesType().noDefault().name("obytes").type().optional().bytesType()
.name("nbytes").type().nullable().bytesType().bytesDefault(new byte[] { 1, 2, 3 }).endRecord();
Schema shortcut = SchemaBuilder.record("Blah").fields().requiredBoolean("rbool").optionalBoolean("obool")
.nullableBoolean("nbool", true).requiredInt("rint").optionalInt("oint").nullableInt("nint", 1)
.requiredLong("rlong").optionalLong("olong").nullableLong("nlong", 2L).requiredFloat("rfloat")
.optionalFloat("ofloat").nullableFloat("nfloat", -1.1f).requiredDouble("rdouble").optionalDouble("odouble")
.nullableDouble("ndouble", 99.9d).requiredString("rstring").optionalString("ostring")
.nullableString("nstring", "def").requiredBytes("rbytes").optionalBytes("obytes")
.nullableBytes("nbytes", new byte[] { 1, 2, 3 }).endRecord();
assertEquals(full, shortcut);
}
@Test
void names() {
// no contextual namespace
Schema r = SchemaBuilder.record("Rec").fields().name("f0").type().fixed("org.foo.MyFixed").size(1).noDefault()
.name("f1").type("org.foo.MyFixed").noDefault().name("f2").type("org.foo.MyFixed", "").noDefault().name("f3")
.type("org.foo.MyFixed", null).noDefault().name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
.name("f5").type("MyFixed", "org.foo").noDefault().endRecord();
Schema expected = Schema.createFixed("org.foo.MyFixed", null, null, 1);
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
// context namespace
Schema f = SchemaBuilder.builder("").fixed("Foo").size(1);
assertEquals(Schema.createFixed("Foo", null, null, 1), f);
// context namespace from record matches
r = SchemaBuilder.record("Rec").namespace("org.foo").fields().name("f0").type().fixed("MyFixed").size(1).noDefault()
.name("f1").type("org.foo.MyFixed").noDefault().name("f2").type("org.foo.MyFixed", "").noDefault().name("f3")
.type("org.foo.MyFixed", null).noDefault().name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
.name("f5").type("MyFixed", "org.foo").noDefault().name("f6").type("MyFixed", null).noDefault().name("f7")
.type("MyFixed").noDefault().endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
checkField(r, expected, "f6");
checkField(r, expected, "f7");
// context namespace from record does not match
r = SchemaBuilder.record("Rec").namespace("org.rec").fields().name("f0").type().fixed("MyFixed")
.namespace("org.foo").size(1).noDefault().name("f1").type("org.foo.MyFixed").noDefault().name("f2")
.type("org.foo.MyFixed", "").noDefault().name("f3").type("org.foo.MyFixed", null).noDefault().name("f4")
.type("org.foo.MyFixed", "ignorethis").noDefault().name("f5").type("MyFixed", "org.foo").noDefault()
.endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
// context namespace from record, nested has no namespace
expected = Schema.createFixed("MyFixed", null, null, 1);
r = SchemaBuilder.record("Rec").namespace("org.rec").fields().name("f0").type().fixed("MyFixed").namespace("")
.size(1).noDefault().name("f1").type("MyFixed", "").noDefault().endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
// mimic names of primitives, but with a namesapce. This is OK
SchemaBuilder.fixed("org.test.long").size(1);
SchemaBuilder.fixed("long").namespace("org.test").size(1);
SchemaBuilder.builder("org.test").fixed("long").size(1);
}
private void checkField(Schema r, Schema expected, String name) {
assertEquals(expected, r.getField(name).schema());
}
@Test
void namesFailRedefined() {
assertThrows(SchemaParseException.class, () -> {
SchemaBuilder.record("Rec").fields().name("f0").type().enumeration("MyEnum").symbols("A", "B").enumDefault("A")
.name("f1").type().enumeration("MyEnum").symbols("X", "Y").noDefault().endRecord();
});
}
@Test
void namesFailAbsent() {
assertThrows(SchemaParseException.class, () -> {
SchemaBuilder.builder().type("notdefined");
});
}
@Test
void nameReserved() {
assertThrows(AvroTypeException.class, () -> {
SchemaBuilder.fixed("long").namespace("").size(1);
});
}
@Test
void fieldTypesAndDefaultValues() {
byte[] bytedef = new byte[] { 3 };
ByteBuffer bufdef = ByteBuffer.wrap(bytedef);
String strdef = "\u0003";
HashMap<String, String> mapdef = new HashMap<>();
mapdef.put("a", "A");
List<String> arrdef = Collections.singletonList("arr");
Schema rec = SchemaBuilder.record("inner").fields().name("f").type().intType().noDefault().endRecord();
Schema rec2 = SchemaBuilder.record("inner2").fields().name("f2").type().intType().noDefault().endRecord();
GenericData.Record recdef = new GenericRecordBuilder(rec).set("f", 1).build();
GenericData.Record recdef2 = new GenericRecordBuilder(rec2).set("f2", 2).build();
Schema r = SchemaBuilder.record("r").fields().name("boolF").type().booleanType().booleanDefault(false).name("intF")
.type().intType().intDefault(1).name("longF").type().longType().longDefault(2L).name("floatF").type()
.floatType().floatDefault(3.0f).name("doubleF").type().doubleType().doubleDefault(4.0d).name("stringF").type()
.stringType().stringDefault("def").name("bytesF1").type().bytesType().bytesDefault(bytedef).name("bytesF2")
.type().bytesType().bytesDefault(bufdef).name("bytesF3").type().bytesType().bytesDefault(strdef).name("nullF")
.type().nullType().nullDefault().name("fixedF1").type().fixed("F1").size(1).fixedDefault(bytedef)
.name("fixedF2").type().fixed("F2").size(1).fixedDefault(bufdef).name("fixedF3").type().fixed("F3").size(1)
.fixedDefault(strdef).name("enumF").type().enumeration("E1").symbols("S").enumDefault("S").name("mapF").type()
.map().values().stringType().mapDefault(mapdef).name("arrayF").type().array().items().stringType()
.arrayDefault(arrdef).name("recordF").type().record("inner").fields().name("f").type().intType().noDefault()
.endRecord().recordDefault(recdef).name("byName").type("E1").withDefault("S")
// union builders, one for each 'first type' in a union:
.name("boolU").type().unionOf().booleanType().and().intType().endUnion().booleanDefault(false).name("intU")
.type().unionOf().intType().and().longType().endUnion().intDefault(1).name("longU").type().unionOf().longType()
.and().intType().endUnion().longDefault(2L).name("floatU").type().unionOf().floatType().and().intType()
.endUnion().floatDefault(3.0f).name("doubleU").type().unionOf().doubleType().and().intType().endUnion()
.doubleDefault(4.0d).name("stringU").type().unionOf().stringType().and().intType().endUnion()
.stringDefault("def").name("bytesU").type().unionOf().bytesType().and().intType().endUnion()
.bytesDefault(bytedef).name("nullU").type().unionOf().nullType().and().intType().endUnion().nullDefault()
.name("fixedU").type().unionOf().fixed("F4").size(1).and().intType().endUnion().fixedDefault(bytedef)
.name("enumU").type().unionOf().enumeration("E2").symbols("SS").and().intType().endUnion().enumDefault("SS")
.name("mapU").type().unionOf().map().values().stringType().and().intType().endUnion().mapDefault(mapdef)
.name("arrayU").type().unionOf().array().items().stringType().and().intType().endUnion().arrayDefault(arrdef)
.name("recordU").type().unionOf().record("inner2").fields().name("f2").type().intType().noDefault().endRecord()
.and().intType().endUnion().recordDefault(recdef2).endRecord();
GenericData.Record newRec = new GenericRecordBuilder(r).build();
assertEquals(false, newRec.get("boolF"));
assertEquals(false, newRec.get("boolU"));
assertEquals(1, newRec.get("intF"));
assertEquals(1, newRec.get("intU"));
assertEquals(2L, newRec.get("longF"));
assertEquals(2L, newRec.get("longU"));
assertEquals(3f, newRec.get("floatF"));
assertEquals(3f, newRec.get("floatU"));
assertEquals(4d, newRec.get("doubleF"));
assertEquals(4d, newRec.get("doubleU"));
assertEquals("def", newRec.get("stringF").toString());
assertEquals("def", newRec.get("stringU").toString());
assertEquals(bufdef, newRec.get("bytesF1"));
assertEquals(bufdef, newRec.get("bytesF2"));
assertEquals(bufdef, newRec.get("bytesF3"));
assertEquals(bufdef, newRec.get("bytesU"));
assertNull(newRec.get("nullF"));
assertNull(newRec.get("nullU"));
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF1")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF2")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF3")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedU")).bytes());
assertEquals("S", newRec.get("enumF").toString());
assertEquals("SS", newRec.get("enumU").toString());
@SuppressWarnings("unchecked")
Map<CharSequence, CharSequence> map = (Map<CharSequence, CharSequence>) newRec.get("mapF");
assertEquals(mapdef.size(), map.size());
for (Map.Entry<CharSequence, CharSequence> e : map.entrySet()) {
assertEquals(mapdef.get(e.getKey().toString()), e.getValue().toString());
}
assertEquals(newRec.get("mapF"), newRec.get("mapU"));
@SuppressWarnings("unchecked")
GenericData.Array<CharSequence> arr = (GenericData.Array<CharSequence>) newRec.get("arrayF");
assertEquals(arrdef.size(), arr.size());
for (CharSequence c : arr) {
assertTrue(arrdef.contains(c.toString()));
}
assertEquals(newRec.get("arrayF"), newRec.get("arrayU"));
assertEquals(recdef, newRec.get("recordF"));
assertEquals(recdef2, newRec.get("recordU"));
assertEquals("S", newRec.get("byName").toString());
}
@Test
void badDefault() {
assertThrows(SchemaBuilderException.class, () -> {
SchemaBuilder.record("r").fields().name("f").type(Schema.create(Schema.Type.INT)).withDefault(new Object())
.endRecord();
});
}
@Test
void unionFieldBuild() {
SchemaBuilder.record("r").fields().name("allUnion").type().unionOf().booleanType().and().intType().and().longType()
.and().floatType().and().doubleType().and().stringType().and().bytesType().and().nullType().and().fixed("Fix")
.size(1).and().enumeration("Enu").symbols("Q").and().array().items().intType().and().map().values().longType()
.and().record("Rec").fields().name("one").type("Fix").noDefault().endRecord().endUnion().booleanDefault(false)
.endRecord();
}
@Test
void defaults() throws IOException {
Schema writeSchema = SchemaBuilder.record("r").fields().name("requiredInt").type().intType().noDefault()
.name("optionalInt").type().optional().intType().name("nullableIntWithDefault").type().nullable().intType()
.intDefault(3).endRecord();
GenericData.Record rec1 = new GenericRecordBuilder(writeSchema).set("requiredInt", 1).build();
assertEquals(1, rec1.get("requiredInt"));
assertNull(rec1.get("optionalInt"));
assertEquals(3, rec1.get("nullableIntWithDefault"));
GenericData.Record rec2 = new GenericRecordBuilder(writeSchema).set("requiredInt", 1).set("optionalInt", 2)
.set("nullableIntWithDefault", 13).build();
assertEquals(1, rec2.get("requiredInt"));
assertEquals(2, rec2.get("optionalInt"));
assertEquals(13, rec2.get("nullableIntWithDefault"));
// write to file
File file = new File(DIR.getPath(), "testDefaults.avro");
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>())) {
writer.create(writeSchema, file);
writer.append(rec1);
writer.append(rec2);
}
Schema readSchema = SchemaBuilder.record("r").fields().name("requiredInt").type().intType().noDefault()
.name("optionalInt").type().optional().intType().name("nullableIntWithDefault").type().nullable().intType()
.intDefault(3).name("newOptionalInt").type().optional().intType().name("newNullableIntWithDefault").type()
.nullable().intType().intDefault(5).endRecord();
try (DataFileReader<GenericData.Record> reader = new DataFileReader<>(file,
new GenericDatumReader<>(writeSchema, readSchema))) {
GenericData.Record rec1read = reader.iterator().next();
assertEquals(1, rec1read.get("requiredInt"));
assertNull(rec1read.get("optionalInt"));
assertEquals(3, rec1read.get("nullableIntWithDefault"));
assertNull(rec1read.get("newOptionalInt"));
assertEquals(5, rec1read.get("newNullableIntWithDefault"));
GenericData.Record rec2read = reader.iterator().next();
assertEquals(1, rec2read.get("requiredInt"));
assertEquals(2, rec2read.get("optionalInt"));
assertEquals(13, rec2read.get("nullableIntWithDefault"));
assertNull(rec2read.get("newOptionalInt"));
assertEquals(5, rec2read.get("newNullableIntWithDefault"));
}
}
@Test
void defaultTypes() {
Integer intDef = 1;
Long longDef = 2L;
Float floatDef = 3F;
Double doubleDef = 4D;
Schema schema = SchemaBuilder.record("r").fields().name("int").type().intType().intDefault(intDef).name("long")
.type().longType().longDefault(longDef).name("float").type().floatType().floatDefault(floatDef).name("double")
.type().doubleType().doubleDefault(doubleDef).endRecord();
assertEquals(intDef, schema.getField("int").defaultVal(), "int field default type or value mismatch");
assertEquals(longDef, schema.getField("long").defaultVal(), "long field default type or value mismatch");
assertEquals(floatDef, schema.getField("float").defaultVal(), "float field default type or value mismatch");
assertEquals(doubleDef, schema.getField("double").defaultVal(), "double field default type or value mismatch");
}
@Test
void validateDefaultsEnabled() {
assertThrows(AvroRuntimeException.class, () -> {
try {
SchemaBuilder.record("ValidationRecord").fields().name("IntegerField").type("int").withDefault("Invalid")
.endRecord();
} catch (AvroRuntimeException e) {
assertEquals("Invalid default for field IntegerField: \"Invalid\" not a \"int\"", e.getMessage(),
"Default behavior is to raise an exception due to record having an invalid default");
throw e;
}
});
}
@Test
void validateDefaultsDisabled() {
final String fieldName = "IntegerField";
final String defaultValue = "foo";
Schema schema = SchemaBuilder.record("ValidationRecord").fields().name(fieldName).notValidatingDefaults()
.type("int").withDefault(defaultValue) // Would throw an exception on endRecord() if validations enabled
.endRecord();
assertNull(schema.getField(fieldName).defaultVal(), "Differing types, so this returns null");
assertEquals(defaultValue, schema.getField(fieldName).defaultValue().asText(),
"Schema is able to be successfully created as is without validation");
}
/**
* https://issues.apache.org/jira/browse/AVRO-1965
*/
@Test
void namespaceDefaulting() {
Schema d = SchemaBuilder.builder().intType();
Schema c = SchemaBuilder.record("c").fields().name("d").type(d).noDefault().endRecord();
Schema b = SchemaBuilder.record("b").fields().name("c").type(c).noDefault().endRecord();
Schema a1 = SchemaBuilder.record("default.a").fields().name("b").type(b).noDefault().endRecord();
Schema a2 = new Schema.Parser().parse(a1.toString());
assertEquals(a2, a1);
}
@Test
void namesAcceptAll() throws InterruptedException {
// Ensure that Schema.setNameValidator won't interfere with others unit tests.
Runnable r = () -> {
Schema.setNameValidator(NameValidator.NO_VALIDATION);
final Schema schema = SchemaBuilder.record("7name").fields().name("123").type(Schema.create(Schema.Type.INT))
.noDefault().endRecord();
Assertions.assertNotNull(schema);
Assertions.assertEquals("7name", schema.getName());
final Schema.Field field = schema.getField("123");
Assertions.assertEquals("123", field.name());
};
final Throwable[] exception = new Throwable[] { null };
Thread t = new Thread(r);
t.setUncaughtExceptionHandler((Thread th, Throwable e) -> exception[0] = e);
t.start();
t.join();
Assertions.assertNull(exception[0], () -> exception[0].getMessage());
}
}
|
TestSchemaBuilder
|
java
|
alibaba__nacos
|
client/src/test/java/com/alibaba/nacos/client/naming/selector/NamingSelectorFactoryTest.java
|
{
"start": 1318,
"end": 7107
}
|
class ____ {
@Test
public void testNewClusterSelector1() {
Instance ins1 = new Instance();
ins1.setClusterName("a");
Instance ins2 = new Instance();
ins2.setClusterName("b");
Instance ins3 = new Instance();
ins3.setClusterName("c");
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
NamingSelector namingSelector1 = NamingSelectorFactory.newClusterSelector(Collections.singletonList("a"));
NamingResult result1 = namingSelector1.select(namingContext);
assertEquals("a", result1.getResult().get(0).getClusterName());
NamingSelector namingSelector2 = NamingSelectorFactory.newClusterSelector(Collections.emptyList());
NamingResult result2 = namingSelector2.select(namingContext);
assertEquals(3, result2.getResult().size());
}
@Test
public void testNewClusterSelector2() {
NamingSelector namingSelector1 = NamingSelectorFactory.newClusterSelector(Arrays.asList("a", "b", "c"));
NamingSelector namingSelector2 = NamingSelectorFactory.newClusterSelector(Arrays.asList("c", "b", "a"));
NamingSelector namingSelector3 = NamingSelectorFactory.newClusterSelector(Arrays.asList("a", "b", "c", "c"));
NamingSelector namingSelector4 = NamingSelectorFactory.newClusterSelector(Arrays.asList("d", "e"));
assertEquals(namingSelector1, namingSelector2);
assertEquals(namingSelector1, namingSelector3);
assertNotEquals(namingSelector1, namingSelector4);
}
@Test
public void testNewIpSelector() {
Instance ins1 = new Instance();
ins1.setIp("172.18.137.120");
Instance ins2 = new Instance();
ins2.setIp("172.18.137.121");
Instance ins3 = new Instance();
ins3.setIp("172.18.136.111");
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
NamingSelector ipSelector = NamingSelectorFactory.newIpSelector("^172\\.18\\.137.*");
NamingResult result = ipSelector.select(namingContext);
List<Instance> list = result.getResult();
assertEquals(2, list.size());
assertEquals(ins1.getIp(), list.get(0).getIp());
assertEquals(ins2.getIp(), list.get(1).getIp());
}
@Test
public void testNewMetadataSelector() {
Instance ins1 = new Instance();
ins1.setMetadata(new LinkedHashMap<>());
ins1.addMetadata("a", "1");
ins1.addMetadata("b", "2");
Instance ins2 = new Instance();
ins2.addMetadata("a", "1");
Instance ins3 = new Instance();
ins3.addMetadata("b", "2");
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
NamingSelector metadataSelector = NamingSelectorFactory.newMetadataSelector(new LinkedHashMap() {
{
put("a", "1");
put("b", "2");
}
});
List<Instance> result = metadataSelector.select(namingContext).getResult();
assertEquals(1, result.size());
assertEquals(ins1, result.get(0));
}
@Test
public void testNewMetadataSelector2() {
Instance ins1 = new Instance();
ins1.setMetadata(new LinkedHashMap<>());
ins1.addMetadata("a", "1");
ins1.addMetadata("c", "3");
Instance ins2 = new Instance();
ins2.addMetadata("b", "2");
Instance ins3 = new Instance();
ins3.addMetadata("c", "3");
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
NamingSelector metadataSelector = NamingSelectorFactory.newMetadataSelector(new LinkedHashMap() {
{
put("a", "1");
put("b", "2");
}
}, true);
List<Instance> result = metadataSelector.select(namingContext).getResult();
assertEquals(2, result.size());
assertEquals(ins1, result.get(0));
assertEquals(ins2, result.get(1));
}
@Test
public void testHealthSelector() {
Instance ins1 = new Instance();
Instance ins2 = new Instance();
Instance ins3 = new Instance();
ins3.setHealthy(false);
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
List<Instance> result = NamingSelectorFactory.HEALTHY_SELECTOR.select(namingContext).getResult();
assertEquals(2, result.size());
assertTrue(result.contains(ins1));
assertTrue(result.contains(ins2));
assertTrue(result.get(0).isHealthy());
assertTrue(result.get(1).isHealthy());
}
@Test
public void testEmptySelector() {
Instance ins1 = new Instance();
Instance ins2 = new Instance();
Instance ins3 = new Instance();
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
List<Instance> result = NamingSelectorFactory.EMPTY_SELECTOR.select(namingContext).getResult();
assertEquals(3, result.size());
assertTrue(result.contains(ins1));
assertTrue(result.contains(ins2));
assertTrue(result.contains(ins3));
}
}
|
NamingSelectorFactoryTest
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/filecache/DistributedCache.java
|
{
"start": 5901,
"end": 12861
}
|
class ____ extends
org.apache.hadoop.mapreduce.filecache.DistributedCache {
/**
* Warning: {@link #CACHE_FILES_SIZES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_FILES_SIZES}
*/
@Deprecated
public static final String CACHE_FILES_SIZES =
"mapred.cache.files.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_SIZES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_ARCHIVES_SIZES}
*/
@Deprecated
public static final String CACHE_ARCHIVES_SIZES =
"mapred.cache.archives.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_TIMESTAMPS} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_ARCHIVES_TIMESTAMPS}
*/
@Deprecated
public static final String CACHE_ARCHIVES_TIMESTAMPS =
"mapred.cache.archives.timestamps";
/**
* Warning: {@link #CACHE_FILES_TIMESTAMPS} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_FILE_TIMESTAMPS}
*/
@Deprecated
public static final String CACHE_FILES_TIMESTAMPS =
"mapred.cache.files.timestamps";
/**
* Warning: {@link #CACHE_ARCHIVES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_ARCHIVES}
*/
@Deprecated
public static final String CACHE_ARCHIVES = "mapred.cache.archives";
/**
* Warning: {@link #CACHE_FILES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_FILES}
*/
@Deprecated
public static final String CACHE_FILES = "mapred.cache.files";
/**
* Warning: {@link #CACHE_LOCALARCHIVES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_LOCALARCHIVES}
*/
@Deprecated
public static final String CACHE_LOCALARCHIVES =
"mapred.cache.localArchives";
/**
* Warning: {@link #CACHE_LOCALFILES} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_LOCALFILES}
*/
@Deprecated
public static final String CACHE_LOCALFILES = "mapred.cache.localFiles";
/**
* Warning: {@link #CACHE_SYMLINK} is not a *public* constant.
* The variable is kept for M/R 1.x applications, M/R 2.x applications should
* use {@link MRJobConfig#CACHE_SYMLINK}
*/
@Deprecated
public static final String CACHE_SYMLINK = "mapred.create.symlink";
/**
* Add a archive that has been localized to the conf. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
@Deprecated
public static void addLocalArchives(Configuration conf, String str) {
String archives = conf.get(CACHE_LOCALARCHIVES);
conf.set(CACHE_LOCALARCHIVES, archives == null ? str
: archives + "," + str);
}
/**
* Add a file that has been localized to the conf.. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
@Deprecated
public static void addLocalFiles(Configuration conf, String str) {
String files = conf.get(CACHE_LOCALFILES);
conf.set(CACHE_LOCALFILES, files == null ? str
: files + "," + str);
}
/**
* This method create symlinks for all files in a given dir in another
* directory. Currently symlinks cannot be disabled. This is a NO-OP.
*
* @param conf the configuration
* @param jobCacheDir the target directory for creating symlinks
* @param workDir the directory in which the symlinks are created
* @throws IOException
* @deprecated Internal to MapReduce framework. Use DistributedCacheManager
* instead.
*/
@Deprecated
public static void createAllSymlink(
Configuration conf, File jobCacheDir, File workDir)
throws IOException{
// Do nothing
}
/**
* Returns {@link FileStatus} of a given cache file on hdfs. Internal to
* MapReduce.
* @param conf configuration
* @param cache cache file
* @return <code>FileStatus</code> of a given cache file on hdfs
* @throws IOException
*/
@Deprecated
public static FileStatus getFileStatus(Configuration conf, URI cache)
throws IOException {
FileSystem fileSystem = FileSystem.get(cache, conf);
return fileSystem.getFileStatus(new Path(cache.getPath()));
}
/**
* Returns mtime of a given cache file on hdfs. Internal to MapReduce.
* @param conf configuration
* @param cache cache file
* @return mtime of a given cache file on hdfs
* @throws IOException
*/
@Deprecated
public static long getTimestamp(Configuration conf, URI cache)
throws IOException {
return getFileStatus(conf, cache).getModificationTime();
}
/**
* This is to check the timestamp of the archives to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of archives.
* The order should be the same as the order in which the archives are added.
*/
@Deprecated
public static void setArchiveTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_ARCHIVES_TIMESTAMPS, timestamps);
}
/**
* This is to check the timestamp of the files to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of files.
* The order should be the same as the order in which the files are added.
*/
@Deprecated
public static void setFileTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_FILES_TIMESTAMPS, timestamps);
}
/**
* Set the conf to contain the location for localized archives. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
@Deprecated
public static void setLocalArchives(Configuration conf, String str) {
conf.set(CACHE_LOCALARCHIVES, str);
}
/**
* Set the conf to contain the location for localized files. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
@Deprecated
public static void setLocalFiles(Configuration conf, String str) {
conf.set(CACHE_LOCALFILES, str);
}
}
|
DistributedCache
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLLateralViewTableSource.java
|
{
"start": 959,
"end": 4325
}
|
class ____ extends SQLTableSourceImpl {
private SQLTableSource tableSource;
private boolean outer;
private SQLMethodInvokeExpr method;
private List<SQLName> columns = new ArrayList<SQLName>(2);
private SQLExpr on; // odps
public SQLLateralViewTableSource() {
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, tableSource);
acceptChild(visitor, method);
acceptChild(visitor, columns);
super.accept0(visitor);
}
visitor.endVisit(this);
}
public SQLTableSource getTableSource() {
return tableSource;
}
public void setTableSource(SQLTableSource tableSource) {
if (tableSource != null) {
tableSource.setParent(this);
}
this.tableSource = tableSource;
}
public SQLMethodInvokeExpr getMethod() {
return method;
}
public void setMethod(SQLMethodInvokeExpr method) {
if (method != null) {
method.setParent(this);
}
this.method = method;
}
public List<SQLName> getColumns() {
return columns;
}
public void setColumns(List<SQLName> columns) {
this.columns = columns;
}
public SQLTableSource findTableSource(long alias_hash) {
long hash = this.aliasHashCode64();
if (hash != 0 && hash == alias_hash) {
return this;
}
for (SQLName column : columns) {
if (column.nameHashCode64() == alias_hash) {
return this;
}
}
if (tableSource != null) {
return tableSource.findTableSource(alias_hash);
}
return null;
}
public SQLTableSource findTableSourceWithColumn(long columnNameHash, String columnName, int option) {
for (SQLName column : columns) {
if (column.nameHashCode64() == columnNameHash) {
return this;
}
}
if (tableSource != null) {
return tableSource.findTableSourceWithColumn(columnNameHash, columnName, option);
}
return null;
}
@Override
public SQLLateralViewTableSource clone() {
SQLLateralViewTableSource x = new SQLLateralViewTableSource();
x.setAlias(this.alias);
x.outer = outer;
if (this.tableSource != null) {
x.setTableSource(this.tableSource.clone());
}
if (this.method != null) {
x.setMethod(this.method.clone());
}
for (SQLName column : this.columns) {
SQLName e2 = column.clone();
e2.setParent(x);
x.getColumns().add(e2);
}
if (this.flashback != null) {
x.setFlashback(this.flashback.clone());
}
if (this.hints != null) {
for (SQLHint e : this.hints) {
SQLHint e2 = e.clone();
e2.setParent(x);
x.getHints().add(e2);
}
}
return x;
}
public boolean isOuter() {
return outer;
}
public void setOuter(boolean outer) {
this.outer = outer;
}
public SQLExpr getOn() {
return on;
}
public void setOn(SQLExpr on) {
this.on = on;
}
}
|
SQLLateralViewTableSource
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/MinWithRetractAggFunctionTest.java
|
{
"start": 12603,
"end": 14072
}
|
class ____
extends MinWithRetractAggFunctionTestBase<TimestampData> {
@Override
protected List<List<TimestampData>> getInputValueSets() {
return Arrays.asList(
Arrays.asList(
TimestampData.fromEpochMillis(0, 1),
TimestampData.fromEpochMillis(0, 2),
TimestampData.fromEpochMillis(1000, 0),
TimestampData.fromEpochMillis(100, 0),
null,
TimestampData.fromEpochMillis(10, 0)),
Arrays.asList(null, null, null, null, null),
Arrays.asList(
null,
TimestampData.fromEpochMillis(1, 1),
TimestampData.fromEpochMillis(1, 2)));
}
@Override
protected List<TimestampData> getExpectedResults() {
return Arrays.asList(
TimestampData.fromEpochMillis(0, 1), null, TimestampData.fromEpochMillis(1, 1));
}
@Override
protected AggregateFunction<TimestampData, MinWithRetractAccumulator<TimestampData>>
getAggregator() {
return new MinWithRetractAggFunction<>(DataTypes.TIMESTAMP(9).getLogicalType());
}
}
/** Test for {@link LocalZonedTimestampType}. */
@Nested
final
|
Timestamp9MinWithRetractAggFunctionTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/htmlunit/MockMvcWebConnectionBuilderSupport.java
|
{
"start": 1238,
"end": 1676
}
|
class ____ simplifies the creation of a {@link WebConnection} that
* uses {@link MockMvc} and optionally delegates to a real {@link WebConnection}
* for specific requests.
*
* <p>The default is to use {@link MockMvc} for requests to {@code localhost}
* and otherwise use a real {@link WebConnection}.
*
* @author Rob Winch
* @author Sam Brannen
* @since 4.2
* @param <T> a self reference to the builder type
*/
public abstract
|
that
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/CompositeTypeTest.java
|
{
"start": 11509,
"end": 11579
}
|
class ____ {
public String a;
public int b;
}
}
|
MyPojo
|
java
|
spring-projects__spring-boot
|
configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/specific/DeprecatedUnrelatedMethodPojo.java
|
{
"start": 964,
"end": 1491
}
|
class ____ {
private Integer counter;
private boolean flag;
public Integer getCounter() {
return this.counter;
}
public void setCounter(Integer counter) {
this.counter = counter;
}
@Deprecated
public void setCounter(String counterAsString) {
this.counter = Integer.valueOf(counterAsString);
}
public boolean isFlag() {
return this.flag;
}
public void setFlag(boolean flag) {
this.flag = flag;
}
@Deprecated
public void setFlag(Boolean flag) {
this.flag = flag;
}
}
|
DeprecatedUnrelatedMethodPojo
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/util/ASMUtils.java
|
{
"start": 428,
"end": 6074
}
|
class ____ {
public static final String JAVA_VM_NAME = System.getProperty("java.vm.name");
public static final boolean IS_ANDROID = isAndroid(JAVA_VM_NAME);
public static boolean isAndroid(String vmName) {
if (vmName == null) { // default is false
return false;
}
String lowerVMName = vmName.toLowerCase();
return lowerVMName.contains("dalvik") //
|| lowerVMName.contains("lemur") // aliyun-vm name
;
}
public static String desc(Method method) {
Class<?>[] types = method.getParameterTypes();
StringBuilder buf = new StringBuilder((types.length + 1) << 4);
buf.append('(');
for (int i = 0; i < types.length; ++i) {
buf.append(desc(types[i]));
}
buf.append(')');
buf.append(desc(method.getReturnType()));
return buf.toString();
}
public static String desc(Class<?> returnType) {
if (returnType.isPrimitive()) {
return getPrimitiveLetter(returnType);
} else if (returnType.isArray()) {
return "[" + desc(returnType.getComponentType());
} else {
return "L" + type(returnType) + ";";
}
}
public static String type(Class<?> parameterType) {
if (parameterType.isArray()) {
return "[" + desc(parameterType.getComponentType());
} else {
if (!parameterType.isPrimitive()) {
String clsName = parameterType.getName();
return clsName.replace('.', '/'); // 直接基于字符串替换,不使用正则替换
} else {
return getPrimitiveLetter(parameterType);
}
}
}
public static String getPrimitiveLetter(Class<?> type) {
if (Integer.TYPE == type) {
return "I";
} else if (Void.TYPE == type) {
return "V";
} else if (Boolean.TYPE == type) {
return "Z";
} else if (Character.TYPE == type) {
return "C";
} else if (Byte.TYPE == type) {
return "B";
} else if (Short.TYPE == type) {
return "S";
} else if (Float.TYPE == type) {
return "F";
} else if (Long.TYPE == type) {
return "J";
} else if (Double.TYPE == type) {
return "D";
}
throw new IllegalStateException("Type: " + type.getCanonicalName() + " is not a primitive type");
}
public static Type getMethodType(Class<?> clazz, String methodName) {
try {
Method method = clazz.getMethod(methodName);
return method.getGenericReturnType();
} catch (Exception ex) {
return null;
}
}
public static boolean checkName(String name) {
for (int i = 0; i < name.length(); ++i) {
char c = name.charAt(i);
if (c < '\001' || c > '\177' || c == '.') {
return false;
}
}
return true;
}
public static String[] lookupParameterNames(AccessibleObject methodOrCtor) {
if (IS_ANDROID) {
return new String[0];
}
final Class<?>[] types;
final Class<?> declaringClass;
final String name;
Annotation[][] parameterAnnotations;
if (methodOrCtor instanceof Method) {
Method method = (Method) methodOrCtor;
types = method.getParameterTypes();
name = method.getName();
declaringClass = method.getDeclaringClass();
parameterAnnotations = TypeUtils.getParameterAnnotations(method);
} else {
Constructor<?> constructor = (Constructor<?>) methodOrCtor;
types = constructor.getParameterTypes();
declaringClass = constructor.getDeclaringClass();
name = "<init>";
parameterAnnotations = TypeUtils.getParameterAnnotations(constructor);
}
if (types.length == 0) {
return new String[0];
}
ClassLoader classLoader = declaringClass.getClassLoader();
if (classLoader == null) {
classLoader = ClassLoader.getSystemClassLoader();
}
String className = declaringClass.getName();
String resourceName = className.replace('.', '/') + ".class";
InputStream is = classLoader.getResourceAsStream(resourceName);
if (is == null) {
return new String[0];
}
try {
ClassReader reader = new ClassReader(is, false);
TypeCollector visitor = new TypeCollector(name, types);
reader.accept(visitor);
String[] parameterNames = visitor.getParameterNamesForMethod();
for (int i = 0; i < parameterNames.length; i++) {
Annotation[] annotations = parameterAnnotations[i];
if (annotations != null) {
for (int j = 0; j < annotations.length; j++) {
if (annotations[j] instanceof JSONField) {
JSONField jsonField = (JSONField) annotations[j];
String fieldName = jsonField.name();
if (fieldName != null && fieldName.length() > 0) {
parameterNames[i] = fieldName;
}
}
}
}
}
return parameterNames;
} catch (IOException e) {
return new String[0];
} finally {
IOUtils.close(is);
}
}
}
|
ASMUtils
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/MulticastWithOnExceptionLastTest.java
|
{
"start": 894,
"end": 1596
}
|
class ____ extends MulticastWithOnExceptionTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
onException(Exception.class).handled(true).to("mock:handled").transform(simple("Damn ${exception.message}"));
from("direct:start").multicast().to("direct:foo", "direct:baz", "direct:bar").end().to("mock:result");
from("direct:foo").to("mock:foo");
from("direct:bar").process(new MyProcessor()).to("mock:bar");
from("direct:baz").to("mock:baz");
}
};
}
}
|
MulticastWithOnExceptionLastTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java
|
{
"start": 6239,
"end": 10690
}
|
class ____ implements FieldCollector {
private final String sourceFieldName;
private final String targetFieldName;
private final boolean missingBucket;
private final Set<String> changedTerms;
// although we could add null to the hash set, its easier to handle null separately
private boolean foundNullBucket;
TermsFieldCollector(final String sourceFieldName, final String targetFieldName, final boolean missingBucket) {
assert sourceFieldName != null;
this.sourceFieldName = sourceFieldName;
this.targetFieldName = targetFieldName;
this.missingBucket = missingBucket;
this.changedTerms = new HashSet<>();
this.foundNullBucket = false;
}
@Override
public int getMaxPageSize() {
// TODO: based on index.max_terms_count, however this is per index, which we don't have access to here,
// because the page size is limit to 64k anyhow, return 64k
return 65536;
}
@Override
public CompositeValuesSourceBuilder<?> getCompositeValueSourceBuilder() {
return new TermsValuesSourceBuilder(targetFieldName).field(sourceFieldName).missingBucket(missingBucket);
}
@Override
public boolean collectChangesFromCompositeBuckets(Collection<? extends Bucket> buckets) {
changedTerms.clear();
foundNullBucket = false;
for (Bucket b : buckets) {
Object term = b.getKey().get(targetFieldName);
if (term != null) {
changedTerms.add(term.toString());
} else {
// we should not find a null bucket if missing bucket is false
assert missingBucket;
foundNullBucket = true;
}
}
// if buckets have been found, we need another run
return buckets.isEmpty();
}
@Override
public QueryBuilder filterByChanges(long lastCheckpointTimestamp, long nextcheckpointTimestamp) {
if (missingBucket && foundNullBucket) {
QueryBuilder missingBucketQuery = new BoolQueryBuilder().mustNot(new ExistsQueryBuilder(sourceFieldName));
if (changedTerms.isEmpty()) {
return missingBucketQuery;
}
/**
* Combined query with terms and missing bucket:
*
* "bool": {
* "should": [
* {
* "terms": {
* "source_field": [
* "term1",
* "term2",
* ...
* ]
* }
* },
* {
* "bool": {
* "must_not": [
* {
* "exists": {
* "field": "source_field"
* }
* }
* ]
* }
* }
* ]
* }
*/
return new BoolQueryBuilder().should(new TermsQueryBuilder(sourceFieldName, changedTerms)).should(missingBucketQuery);
} else if (changedTerms.isEmpty() == false) {
return new TermsQueryBuilder(sourceFieldName, changedTerms);
}
return null;
}
@Override
public void clear() {
changedTerms.clear();
foundNullBucket = false;
}
@Override
public Collection<AggregationBuilder> aggregateChanges() {
return Collections.emptyList();
}
@Override
public boolean collectChangesFromAggregations(InternalAggregations aggregations) {
return true;
}
@Override
public boolean isOptimized() {
return true;
}
@Override
public boolean queryForChanges() {
return true;
}
}
/**
* Date histogram field collector for the case that it shares the same timestamp field as for sync
*
* Note: does not support missing_bucket
*/
static
|
TermsFieldCollector
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/ops/TimestampedEntity.java
|
{
"start": 234,
"end": 787
}
|
class ____ {
private String id;
private String name;
private Date timestamp;
public TimestampedEntity() {
}
public TimestampedEntity(String id, String name) {
this.id = id;
this.name = name;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
}
|
TimestampedEntity
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/template/VoidExpressionPlaceholderTemplate.java
|
{
"start": 982,
"end": 1293
}
|
class ____<T> {
@Placeholder
abstract void consume(T t);
@BeforeTemplate
void before(Collection<T> collection) {
collection.stream().forEach(x -> consume(x));
}
@AfterTemplate
void after(Collection<T> collection) {
collection.forEach(x -> consume(x));
}
}
|
VoidExpressionPlaceholderTemplate
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/taskexecutor/TaskExecutorOperatorEventHandlingTest.java
|
{
"start": 10912,
"end": 11552
}
|
class ____ extends CancelableInvokable {
public CoordinationRequestSendingInvokable(Environment environment) {
super(environment);
}
@Override
protected void doInvoke() throws Exception {
getEnvironment()
.getOperatorCoordinatorEventGateway()
.sendRequestToCoordinator(
new OperatorID(),
new SerializedValue<>(
new TestingCoordinationRequestHandler.Request<>(0L)));
waitUntilCancelled();
}
}
}
|
CoordinationRequestSendingInvokable
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/SerializeEnumAsJavaBeanTest.java
|
{
"start": 1451,
"end": 1546
}
|
class ____ {
public OrderType orderType;
public OrderType orderType1;
}
}
|
Model
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/MoreAnnotationsTest.java
|
{
"start": 3942,
"end": 4047
}
|
interface ____ {}
@Target({TYPE, CONSTRUCTOR, FIELD, LOCAL_VARIABLE, METHOD, PARAMETER, TYPE_PARAMETER})
@
|
TA
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/apidiff/ApiDiffCheckerTest.java
|
{
"start": 18458,
"end": 18635
}
|
class ____ {
@RequiresNewApiVersion
public void foo() {}
}
""")
.addSourceLines(
"Test.java",
"""
import my.lib.Lib;
|
Lib
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecVectorSearchTableFunction.java
|
{
"start": 4277,
"end": 14819
}
|
class ____ extends ExecNodeBase<RowData> {
public static final String VECTOR_SEARCH_TRANSFORMATION = "vector-search-table-function";
protected static final String FIELD_NAME_TABLE_SOURCE_SPEC = "tableSourceSpec";
protected static final String FIELD_NAME_VECTOR_SEARCH_SPEC = "vectorSearchSpec";
protected static final String FIELD_NAME_ASYNC_OPTIONS = "asyncOptions";
@JsonProperty(FIELD_NAME_TABLE_SOURCE_SPEC)
protected final VectorSearchTableSourceSpec tableSourceSpec;
@JsonProperty(FIELD_NAME_VECTOR_SEARCH_SPEC)
protected final VectorSearchSpec vectorSearchSpec;
@JsonProperty(FIELD_NAME_ASYNC_OPTIONS)
protected final @Nullable FunctionCallUtil.AsyncOptions asyncOptions;
protected CommonExecVectorSearchTableFunction(
int id,
ExecNodeContext context,
ReadableConfig persistedConfig,
VectorSearchTableSourceSpec tableSourceSpec,
VectorSearchSpec vectorSearchSpec,
@Nullable FunctionCallUtil.AsyncOptions asyncOptions,
List<InputProperty> inputProperties,
RowType outputType,
String description) {
super(id, context, persistedConfig, inputProperties, outputType, description);
this.tableSourceSpec = tableSourceSpec;
this.vectorSearchSpec = vectorSearchSpec;
this.asyncOptions = asyncOptions;
}
@Override
protected Transformation<RowData> translateToPlanInternal(
PlannerBase planner, ExecNodeConfig config) {
// 1. translate input node
ExecEdge inputEdge = getInputEdges().get(0);
Transformation<RowData> inputTransformation =
(Transformation<RowData>) inputEdge.translateToPlan(planner);
// 2. extract search function
TableSourceTable searchTable =
tableSourceSpec.getSearchTable(planner.getFlinkContext(), planner.getTypeFactory());
boolean isAsyncEnabled = asyncOptions != null;
UserDefinedFunction vectorSearchFunction =
findVectorSearchFunction(
VectorSearchUtil.createVectorSearchRuntimeProvider(
searchTable,
vectorSearchSpec.getSearchColumns().keySet(),
Configuration.fromMap(
Optional.ofNullable(vectorSearchSpec.getRuntimeConfig())
.orElse(Collections.emptyMap()))),
isAsyncEnabled);
UserDefinedFunctionHelper.prepareInstance(config, vectorSearchFunction);
// 3. build the operator
RowType inputType = (RowType) inputEdge.getOutputType();
RowType outputType = (RowType) getOutputType();
DataTypeFactory dataTypeFactory =
ShortcutUtils.unwrapContext(planner.getFlinkContext())
.getCatalogManager()
.getDataTypeFactory();
StreamOperatorFactory<RowData> operatorFactory =
isAsyncEnabled
? createAsyncVectorSearchOperator(
searchTable,
config,
planner.getFlinkContext().getClassLoader(),
(AsyncVectorSearchFunction) vectorSearchFunction,
dataTypeFactory,
inputType,
vectorSearchSpec.getOutputType(),
outputType)
: createSyncVectorSearchOperator(
searchTable,
config,
planner.getFlinkContext().getClassLoader(),
(VectorSearchFunction) vectorSearchFunction,
dataTypeFactory,
inputType,
vectorSearchSpec.getOutputType(),
outputType);
return ExecNodeUtil.createOneInputTransformation(
inputTransformation,
createTransformationMeta(VECTOR_SEARCH_TRANSFORMATION, config),
operatorFactory,
InternalTypeInfo.of(outputType),
inputTransformation.getParallelism(),
false);
}
// ~ Utilities --------------------------------------------------------------
private UserDefinedFunction findVectorSearchFunction(
VectorSearchTableSource.VectorSearchRuntimeProvider provider, boolean async) {
if (async) {
if (provider instanceof AsyncVectorSearchFunctionProvider) {
return ((AsyncVectorSearchFunctionProvider) provider)
.createAsyncVectorSearchFunction();
}
} else {
if (provider instanceof VectorSearchFunctionProvider) {
return ((VectorSearchFunctionProvider) provider).createVectorSearchFunction();
}
}
throw new TableException(
"Required "
+ (async ? "async" : "sync")
+ " vector search function by planner, but VectorSearchRuntimeProvider "
+ "does not offer a valid vector search function.");
}
private StreamOperatorFactory<RowData> createSyncVectorSearchOperator(
RelOptTable searchTable,
ExecNodeConfig config,
ClassLoader jobClassLoader,
VectorSearchFunction vectorSearchFunction,
DataTypeFactory dataTypeFactory,
RowType inputType,
RowType searchOutputType,
RowType outputType) {
return SimpleOperatorFactory.of(
new ProcessOperator<>(
createSyncVectorSearchFunction(
searchTable,
config,
jobClassLoader,
vectorSearchFunction,
dataTypeFactory,
inputType,
searchOutputType,
outputType)));
}
private ProcessFunction<RowData, RowData> createSyncVectorSearchFunction(
RelOptTable searchTable,
ExecNodeConfig config,
ClassLoader jobClassLoader,
VectorSearchFunction vectorSearchFunction,
DataTypeFactory dataTypeFactory,
RowType inputType,
RowType searchOutputType,
RowType outputType) {
ArrayList<FunctionCallUtil.FunctionParam> parameters =
new ArrayList<>(1 + vectorSearchSpec.getSearchColumns().size());
parameters.add(vectorSearchSpec.getTopK());
parameters.addAll(vectorSearchSpec.getSearchColumns().values());
GeneratedFunction<FlatMapFunction<RowData, RowData>> generatedFetcher =
VectorSearchCodeGenerator.generateSyncVectorSearchFunction(
config,
jobClassLoader,
dataTypeFactory,
inputType,
searchOutputType,
outputType,
parameters,
vectorSearchFunction,
((TableSourceTable) searchTable)
.contextResolvedTable()
.getIdentifier()
.asSummaryString(),
config.get(PipelineOptions.OBJECT_REUSE));
GeneratedCollector<ListenableCollector<RowData>> generatedCollector =
VectorSearchCodeGenerator.generateCollector(
new CodeGeneratorContext(config, jobClassLoader),
inputType,
searchOutputType,
outputType);
boolean isLeftOuterJoin = vectorSearchSpec.getJoinType() == JoinRelType.LEFT;
return new VectorSearchRunner(
generatedFetcher,
generatedCollector,
isLeftOuterJoin,
searchOutputType.getFieldCount());
}
@SuppressWarnings("unchecked")
private StreamOperatorFactory<RowData> createAsyncVectorSearchOperator(
RelOptTable searchTable,
ExecNodeConfig config,
ClassLoader jobClassLoader,
AsyncVectorSearchFunction vectorSearchFunction,
DataTypeFactory dataTypeFactory,
RowType inputType,
RowType searchOutputType,
RowType outputType) {
ArrayList<FunctionCallUtil.FunctionParam> parameters =
new ArrayList<>(1 + vectorSearchSpec.getSearchColumns().size());
parameters.add(vectorSearchSpec.getTopK());
parameters.addAll(vectorSearchSpec.getSearchColumns().values());
FunctionCallCodeGenerator.GeneratedTableFunctionWithDataType<AsyncFunction<RowData, Object>>
generatedFetcher =
VectorSearchCodeGenerator.generateAsyncVectorSearchFunction(
config,
jobClassLoader,
dataTypeFactory,
inputType,
searchOutputType,
outputType,
parameters,
vectorSearchFunction,
((TableSourceTable) searchTable)
.contextResolvedTable()
.getIdentifier()
.asSummaryString());
boolean isLeftOuterJoin = vectorSearchSpec.getJoinType() == JoinRelType.LEFT;
Preconditions.checkNotNull(asyncOptions, "Async Options can not be null.");
return new AsyncWaitOperatorFactory<>(
new AsyncVectorSearchRunner(
(GeneratedFunction) generatedFetcher.tableFunc(),
isLeftOuterJoin,
asyncOptions.asyncBufferCapacity,
searchOutputType.getFieldCount()),
asyncOptions.asyncTimeout,
asyncOptions.asyncBufferCapacity,
asyncOptions.asyncOutputMode);
}
}
|
CommonExecVectorSearchTableFunction
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/fetch/ScrollQueryFetchSearchResult.java
|
{
"start": 823,
"end": 2500
}
|
class ____ extends SearchPhaseResult {
private final QueryFetchSearchResult result;
public ScrollQueryFetchSearchResult(StreamInput in) throws IOException {
SearchShardTarget searchShardTarget = new SearchShardTarget(in);
result = new QueryFetchSearchResult(in);
setSearchShardTarget(searchShardTarget);
}
public ScrollQueryFetchSearchResult(QueryFetchSearchResult result, SearchShardTarget shardTarget) {
this.result = result;
setSearchShardTarget(shardTarget);
}
public QueryFetchSearchResult result() {
return result;
}
@Override
public void setSearchShardTarget(SearchShardTarget shardTarget) {
super.setSearchShardTarget(shardTarget);
result.setSearchShardTarget(shardTarget);
}
@Override
public void setShardIndex(int shardIndex) {
super.setShardIndex(shardIndex);
result.setShardIndex(shardIndex);
}
@Override
public QuerySearchResult queryResult() {
return result.queryResult();
}
@Override
public FetchSearchResult fetchResult() {
return result.fetchResult();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
getSearchShardTarget().writeTo(out);
result.writeTo(out);
}
@Override
public void incRef() {
result.incRef();
}
@Override
public boolean tryIncRef() {
return result.tryIncRef();
}
@Override
public boolean decRef() {
return result.decRef();
}
@Override
public boolean hasReferences() {
return result.hasReferences();
}
}
|
ScrollQueryFetchSearchResult
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringAiChatEndpointBuilderFactory.java
|
{
"start": 23124,
"end": 23699
}
|
class ____ use for BEAN output format conversion. Required
* when outputFormat is BEAN.
*
* The option is a: <code>java.lang.Class<java.lang.Object></code>
* type.
*
* Group: advanced
*
* @param outputClass the value to set
* @return the dsl builder
*/
default AdvancedSpringAiChatEndpointBuilder outputClass(Class<java.lang.Object> outputClass) {
doSetProperty("outputClass", outputClass);
return this;
}
/**
* The Java
|
to
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/StAXEndpointBuilderFactory.java
|
{
"start": 5986,
"end": 6293
}
|
class ____ extends AbstractEndpointBuilder implements StAXEndpointBuilder, AdvancedStAXEndpointBuilder {
public StAXEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new StAXEndpointBuilderImpl(path);
}
}
|
StAXEndpointBuilderImpl
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KerberosAuthException.java
|
{
"start": 1334,
"end": 3275
}
|
class ____ extends IOException {
static final long serialVersionUID = 31L;
private String user;
private String principal;
private String keytabFile;
private String ticketCacheFile;
private String initialMessage;
public KerberosAuthException(String msg) {
super(msg);
}
public KerberosAuthException(Throwable cause) {
super(cause);
}
public KerberosAuthException(String initialMsg, Throwable cause) {
this(cause);
initialMessage = initialMsg;
}
public void setUser(final String u) {
user = u;
}
public void setPrincipal(final String p) {
principal = p;
}
public void setKeytabFile(final String k) {
keytabFile = k;
}
public void setTicketCacheFile(final String t) {
ticketCacheFile = t;
}
/** @return The initial message, or null if not set. */
public String getInitialMessage() {
return initialMessage;
}
/** @return The keytab file path, or null if not set. */
public String getKeytabFile() {
return keytabFile;
}
/** @return The principal, or null if not set. */
public String getPrincipal() {
return principal;
}
/** @return The ticket cache file path, or null if not set. */
public String getTicketCacheFile() {
return ticketCacheFile;
}
/** @return The user, or null if not set. */
public String getUser() {
return user;
}
@Override
public String getMessage() {
final StringBuilder sb = new StringBuilder();
if (initialMessage != null) {
sb.append(initialMessage);
}
if (user != null) {
sb.append(FOR_USER + user);
}
if (principal != null) {
sb.append(FOR_PRINCIPAL + principal);
}
if (keytabFile != null) {
sb.append(FROM_KEYTAB + keytabFile);
}
if (ticketCacheFile != null) {
sb.append(USING_TICKET_CACHE_FILE+ ticketCacheFile);
}
sb.append(" " + super.getMessage());
return sb.toString();
}
}
|
KerberosAuthException
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/file/Syncable.java
|
{
"start": 874,
"end": 1369
}
|
interface ____ {
/**
* Sync the file to disk. On supported platforms, this method behaves like POSIX
* <code>fsync</code> and syncs all underlying OS buffers for this file
* descriptor to disk. On these platforms, if this method returns, the data
* written to this instance is guaranteed to be persisted on disk.
*
* @throws IOException - if an error occurred while attempting to sync the data
* to disk.
*/
void sync() throws IOException;
}
|
Syncable
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/inject/beans/AbstractEnumBeanIntrospectionAndReference.java
|
{
"start": 1297,
"end": 2029
}
|
class ____<E extends Enum<E>> extends AbstractInitializableBeanIntrospectionAndReference<E> implements EnumBeanIntrospection<E> {
private final List<EnumConstant<E>> enumConstantRefs;
/**
* The default constructor.
*
* @param beanType The bean type
* @param annotationMetadata The annotation metadata
* @param constructorAnnotationMetadata The constructor annotation metadata
* @param constructorArguments The constructor arguments
* @param propertiesRefs The property references
* @param methodsRefs The method references
* @param enumValueRefs The
|
AbstractEnumBeanIntrospectionAndReference
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/struct/UnwrappedCaching2461Test.java
|
{
"start": 285,
"end": 377
}
|
class ____ extends DatabindTestUtil {
// [databind#2461]
static
|
UnwrappedCaching2461Test
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/support/ResourceHolderSynchronization.java
|
{
"start": 972,
"end": 5638
}
|
class ____<H extends ResourceHolder, K>
implements TransactionSynchronization {
private final H resourceHolder;
private final K resourceKey;
private volatile boolean holderActive = true;
/**
* Create a new ResourceHolderSynchronization for the given holder.
* @param resourceHolder the ResourceHolder to manage
* @param resourceKey the key to bind the ResourceHolder for
* @see TransactionSynchronizationManager#bindResource
*/
public ResourceHolderSynchronization(H resourceHolder, K resourceKey) {
this.resourceHolder = resourceHolder;
this.resourceKey = resourceKey;
}
@Override
public void suspend() {
if (this.holderActive) {
TransactionSynchronizationManager.unbindResource(this.resourceKey);
}
}
@Override
public void resume() {
if (this.holderActive) {
TransactionSynchronizationManager.bindResource(this.resourceKey, this.resourceHolder);
}
}
@Override
public void flush() {
flushResource(this.resourceHolder);
}
@Override
public void beforeCommit(boolean readOnly) {
}
@Override
public void beforeCompletion() {
if (shouldUnbindAtCompletion()) {
TransactionSynchronizationManager.unbindResource(this.resourceKey);
this.holderActive = false;
if (shouldReleaseBeforeCompletion()) {
releaseResource(this.resourceHolder, this.resourceKey);
}
}
}
@Override
public void afterCommit() {
if (!shouldReleaseBeforeCompletion()) {
processResourceAfterCommit(this.resourceHolder);
}
}
@Override
public void afterCompletion(int status) {
if (shouldUnbindAtCompletion()) {
boolean releaseNecessary = false;
if (this.holderActive) {
// The thread-bound resource holder might not be available anymore,
// since afterCompletion might get called from a different thread.
this.holderActive = false;
TransactionSynchronizationManager.unbindResourceIfPossible(this.resourceKey);
this.resourceHolder.unbound();
releaseNecessary = true;
}
else {
releaseNecessary = shouldReleaseAfterCompletion(this.resourceHolder);
}
if (releaseNecessary) {
releaseResource(this.resourceHolder, this.resourceKey);
}
}
else {
// Probably a pre-bound resource...
cleanupResource(this.resourceHolder, this.resourceKey, (status == STATUS_COMMITTED));
}
this.resourceHolder.reset();
}
/**
* Return whether this holder should be unbound at completion
* (or should rather be left bound to the thread after the transaction).
* <p>The default implementation returns {@code true}.
*/
protected boolean shouldUnbindAtCompletion() {
return true;
}
/**
* Return whether this holder's resource should be released before
* transaction completion ({@code true}) or rather after
* transaction completion ({@code false}).
* <p>Note that resources will only be released when they are
* unbound from the thread ({@link #shouldUnbindAtCompletion()}).
* <p>The default implementation returns {@code true}.
* @see #releaseResource
*/
protected boolean shouldReleaseBeforeCompletion() {
return true;
}
/**
* Return whether this holder's resource should be released after
* transaction completion ({@code true}).
* <p>The default implementation returns {@code !shouldReleaseBeforeCompletion()},
* releasing after completion if no attempt was made before completion.
* @see #releaseResource
*/
protected boolean shouldReleaseAfterCompletion(H resourceHolder) {
return !shouldReleaseBeforeCompletion();
}
/**
* Flush callback for the given resource holder.
* @param resourceHolder the resource holder to flush
*/
protected void flushResource(H resourceHolder) {
}
/**
* After-commit callback for the given resource holder.
* Only called when the resource hasn't been released yet
* ({@link #shouldReleaseBeforeCompletion()}).
* @param resourceHolder the resource holder to process
*/
protected void processResourceAfterCommit(H resourceHolder) {
}
/**
* Release the given resource (after it has been unbound from the thread).
* @param resourceHolder the resource holder to process
* @param resourceKey the key that the ResourceHolder was bound for
*/
protected void releaseResource(H resourceHolder, K resourceKey) {
}
/**
* Perform a cleanup on the given resource (which is left bound to the thread).
* @param resourceHolder the resource holder to process
* @param resourceKey the key that the ResourceHolder was bound for
* @param committed whether the transaction has committed ({@code true})
* or rolled back ({@code false})
*/
protected void cleanupResource(H resourceHolder, K resourceKey, boolean committed) {
}
}
|
ResourceHolderSynchronization
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/jobgraph/JobTaskVertexTest.java
|
{
"start": 15058,
"end": 15237
}
|
class ____ extends URLClassLoader {
public TestClassLoader() {
super(new URL[0], Thread.currentThread().getContextClassLoader());
}
}
}
|
TestClassLoader
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionEvaluationReportTests.java
|
{
"start": 14293,
"end": 14394
}
|
class ____ {
@Bean
String example() {
return "example";
}
}
}
|
UnconditionalAutoConfiguration
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-translate/src/test/java/org/apache/camel/component/aws2/translate/Translate2ProducerTest.java
|
{
"start": 1339,
"end": 4456
}
|
class ____ extends CamelTestSupport {
@BindToRegistry("amazonTranslateClient")
AmazonAWSTranslateMock clientMock = new AmazonAWSTranslateMock();
@EndpointInject("mock:result")
private MockEndpoint mock;
@Test
public void translateTextTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:translateText", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(Translate2Constants.SOURCE_LANGUAGE, Translate2LanguageEnum.ITALIAN);
exchange.getIn().setHeader(Translate2Constants.TARGET_LANGUAGE, Translate2LanguageEnum.ENGLISH);
exchange.getIn().setBody("ciao");
}
});
MockEndpoint.assertIsSatisfied(context);
String resultGet = exchange.getIn().getBody(String.class);
assertEquals("Hello", resultGet);
}
@Test
public void translateTextPojoTest() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:translatePojoText", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn()
.setBody(TranslateTextRequest.builder().sourceLanguageCode(Translate2LanguageEnum.ITALIAN.toString())
.targetLanguageCode(Translate2LanguageEnum.ENGLISH.toString()).text("ciao").build());
}
});
MockEndpoint.assertIsSatisfied(context);
String resultGet = exchange.getIn().getBody(String.class);
assertEquals("Hello", resultGet);
}
@Test
public void translateTextTestOptions() throws Exception {
mock.expectedMessageCount(1);
Exchange exchange = template.request("direct:translateTextOptions", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("ciao");
}
});
MockEndpoint.assertIsSatisfied(context);
String resultGet = exchange.getIn().getBody(String.class);
assertEquals("Hello", resultGet);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:translateText")
.to("aws2-translate://test?translateClient=#amazonTranslateClient&operation=translateText")
.to("mock:result");
from("direct:translatePojoText").to(
"aws2-translate://test?translateClient=#amazonTranslateClient&operation=translateText&pojoRequest=true")
.to("mock:result");
from("direct:translateTextOptions").to(
"aws2-translate://test?translateClient=#amazonTranslateClient&operation=translateText&sourceLanguage=it&targetLanguage=en")
.to("mock:result");
}
};
}
}
|
Translate2ProducerTest
|
java
|
spring-projects__spring-framework
|
framework-docs/src/main/java/org/springframework/docs/core/beans/java/beansjavaprogrammaticregistration/MyBeanRegistrar.java
|
{
"start": 1088,
"end": 1912
}
|
class ____ implements BeanRegistrar {
@Override
public void register(BeanRegistry registry, Environment env) {
registry.registerBean("foo", Foo.class);
registry.registerBean("bar", Bar.class, spec -> spec
.prototype()
.lazyInit()
.description("Custom description")
.supplier(context -> new Bar(context.bean(Foo.class))));
if (env.matchesProfiles("baz")) {
registry.registerBean(Baz.class, spec -> spec
.supplier(context -> new Baz("Hello World!")));
}
registry.registerBean(MyRepository.class);
registry.registerBean(RouterFunction.class, spec ->
spec.supplier(context -> router(context.bean(MyRepository.class))));
}
RouterFunction<ServerResponse> router(MyRepository myRepository) {
return RouterFunctions.route()
// ...
.build();
}
}
// end::snippet[]
|
MyBeanRegistrar
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/core/simple/SimpleJdbcCall.java
|
{
"start": 2561,
"end": 2744
}
|
interface ____.
*
* @author Thomas Risberg
* @author Stephane Nicoll
* @since 2.5
* @see java.sql.DatabaseMetaData
* @see org.springframework.jdbc.core.JdbcTemplate
*/
public
|
style
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/processor/internals/TasksTest.java
|
{
"start": 1849,
"end": 11130
}
|
class ____ {
private static final TopicPartition TOPIC_PARTITION_A_0 = new TopicPartition("topicA", 0);
private static final TopicPartition TOPIC_PARTITION_A_1 = new TopicPartition("topicA", 1);
private static final TopicPartition TOPIC_PARTITION_B_0 = new TopicPartition("topicB", 0);
private static final TopicPartition TOPIC_PARTITION_B_1 = new TopicPartition("topicB", 1);
private static final TaskId TASK_0_0 = new TaskId(0, 0);
private static final TaskId TASK_0_1 = new TaskId(0, 1);
private static final TaskId TASK_1_0 = new TaskId(1, 0);
private static final TaskId TASK_1_1 = new TaskId(1, 1);
private final Tasks tasks = new Tasks(new LogContext());
@Test
public void shouldCheckStateWhenRemoveTask() {
final StreamTask closedTask = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_A_0)).inState(State.CLOSED).build();
final StandbyTask suspendedTask = standbyTask(TASK_0_1, Set.of(TOPIC_PARTITION_A_1)).inState(State.SUSPENDED).build();
final StreamTask runningTask = statelessTask(TASK_1_0).inState(State.RUNNING).build();
tasks.addActiveTasks(Set.of(closedTask, runningTask));
tasks.addStandbyTasks(Collections.singletonList(suspendedTask));
assertDoesNotThrow(() -> tasks.removeTask(closedTask));
assertDoesNotThrow(() -> tasks.removeTask(suspendedTask));
assertThrows(IllegalStateException.class, () -> tasks.removeTask(runningTask));
}
@Test
public void shouldKeepAddedTasks() {
final StreamTask statefulTask = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_A_0)).build();
final StandbyTask standbyTask = standbyTask(TASK_0_1, Set.of(TOPIC_PARTITION_A_1)).build();
final StreamTask statelessTask = statelessTask(TASK_1_0).build();
tasks.addActiveTasks(Set.of(statefulTask, statelessTask));
tasks.addStandbyTasks(Collections.singletonList(standbyTask));
assertEquals(statefulTask, tasks.task(statefulTask.id()));
assertEquals(statelessTask, tasks.task(statelessTask.id()));
assertEquals(standbyTask, tasks.task(standbyTask.id()));
assertEquals(Set.of(statefulTask, statelessTask), new HashSet<>(tasks.activeTasks()));
assertEquals(Set.of(statefulTask, statelessTask, standbyTask), tasks.allTasks());
assertEquals(Set.of(statefulTask, standbyTask), tasks.tasks(Set.of(statefulTask.id(), standbyTask.id())));
assertEquals(Set.of(statefulTask.id(), statelessTask.id(), standbyTask.id()), tasks.allTaskIds());
assertEquals(
mkMap(
mkEntry(statefulTask.id(), statefulTask),
mkEntry(statelessTask.id(), statelessTask),
mkEntry(standbyTask.id(), standbyTask)
),
tasks.allTasksPerId());
assertTrue(tasks.contains(statefulTask.id()));
assertTrue(tasks.contains(statelessTask.id()));
assertTrue(tasks.contains(statefulTask.id()));
}
@Test
public void shouldDrainPendingTasksToCreate() {
tasks.addPendingActiveTasksToCreate(mkMap(
mkEntry(new TaskId(0, 0, "A"), Set.of(TOPIC_PARTITION_A_0)),
mkEntry(new TaskId(0, 1, "A"), Set.of(TOPIC_PARTITION_A_1)),
mkEntry(new TaskId(0, 0, "B"), Set.of(TOPIC_PARTITION_B_0)),
mkEntry(new TaskId(0, 1, "B"), Set.of(TOPIC_PARTITION_B_1))
));
tasks.addPendingStandbyTasksToCreate(mkMap(
mkEntry(new TaskId(0, 0, "A"), Set.of(TOPIC_PARTITION_A_0)),
mkEntry(new TaskId(0, 1, "A"), Set.of(TOPIC_PARTITION_A_1)),
mkEntry(new TaskId(0, 0, "B"), Set.of(TOPIC_PARTITION_B_0)),
mkEntry(new TaskId(0, 1, "B"), Set.of(TOPIC_PARTITION_B_1))
));
assertEquals(mkMap(
mkEntry(new TaskId(0, 0, "A"), Set.of(TOPIC_PARTITION_A_0)),
mkEntry(new TaskId(0, 1, "A"), Set.of(TOPIC_PARTITION_A_1))
), tasks.drainPendingActiveTasksForTopologies(Set.of("A")));
assertEquals(mkMap(
mkEntry(new TaskId(0, 0, "A"), Set.of(TOPIC_PARTITION_A_0)),
mkEntry(new TaskId(0, 1, "A"), Set.of(TOPIC_PARTITION_A_1))
), tasks.drainPendingStandbyTasksForTopologies(Set.of("A")));
tasks.clearPendingTasksToCreate();
assertEquals(Collections.emptyMap(), tasks.drainPendingActiveTasksForTopologies(Set.of("B")));
assertEquals(Collections.emptyMap(), tasks.drainPendingStandbyTasksForTopologies(Set.of("B")));
}
@Test
public void shouldVerifyIfPendingTaskToInitExist() {
assertFalse(tasks.hasPendingTasksToInit());
final StreamTask activeTask = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0)).build();
tasks.addPendingTasksToInit(Collections.singleton(activeTask));
assertTrue(tasks.hasPendingTasksToInit());
final StandbyTask standbyTask = standbyTask(TASK_1_0, Set.of(TOPIC_PARTITION_A_1)).build();
tasks.addPendingTasksToInit(Collections.singleton(standbyTask));
assertTrue(tasks.hasPendingTasksToInit());
assertTrue(tasks.hasPendingTasksToInit());
final Set<Task> tasksToInit = tasks.drainPendingTasksToInit();
assertEquals(2, tasksToInit.size());
assertTrue(tasksToInit.contains(activeTask));
assertTrue(tasksToInit.contains(standbyTask));
assertFalse(tasks.hasPendingTasksToInit());
}
@Test
public void shouldVerifyIfPendingActiveTaskToInitAreDrained() {
final StreamTask activeTask1 = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0)).build();
final StreamTask activeTask2 = statefulTask(TASK_0_1, Set.of(TOPIC_PARTITION_B_1)).build();
final StandbyTask standbyTask1 = standbyTask(TASK_1_0, Set.of(TOPIC_PARTITION_A_0)).build();
final StandbyTask standbyTask2 = standbyTask(TASK_1_1, Set.of(TOPIC_PARTITION_A_1)).build();
tasks.addPendingTasksToInit(Set.of(activeTask1, activeTask2, standbyTask1, standbyTask2));
final Set<Task> activeTasksToInit = tasks.drainPendingActiveTasksToInit();
assertEquals(2, activeTasksToInit.size());
assertTrue(activeTasksToInit.containsAll(Set.of(activeTask1, activeTask2)));
assertFalse(activeTasksToInit.containsAll(Set.of(standbyTask1, standbyTask2)));
assertEquals(2, tasks.pendingTasksToInit().size());
assertTrue(tasks.hasPendingTasksToInit());
assertTrue(tasks.pendingTasksToInit().containsAll(Set.of(standbyTask1, standbyTask2)));
}
@Test
public void shouldAddFailedTask() {
final StreamTask activeTask1 = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0)).build();
final StreamTask activeTask2 = statefulTask(TASK_0_1, Set.of(TOPIC_PARTITION_B_1)).build();
tasks.addTask(activeTask2);
tasks.addFailedTask(activeTask1);
assertEquals(activeTask1, tasks.task(TASK_0_0));
assertEquals(activeTask2, tasks.task(TASK_0_1));
assertTrue(tasks.allTasks().contains(activeTask1));
assertTrue(tasks.allTasks().contains(activeTask2));
assertFalse(tasks.allNonFailedTasks().contains(activeTask1));
assertTrue(tasks.allNonFailedTasks().contains(activeTask2));
}
@Test
public void shouldRemoveFailedTask() {
final StreamTask activeTask1 = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0))
.inState(State.SUSPENDED).build();
tasks.addFailedTask(activeTask1);
tasks.removeTask(activeTask1);
assertFalse(tasks.allNonFailedTasks().contains(activeTask1));
assertFalse(tasks.allTasks().contains(activeTask1));
tasks.addTask(activeTask1);
assertTrue(tasks.allNonFailedTasks().contains(activeTask1));
}
@Test
public void shouldClearFailedTask() {
final StreamTask activeTask1 = statefulTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0))
.inState(State.SUSPENDED).build();
tasks.addFailedTask(activeTask1);
tasks.clear();
assertFalse(tasks.allNonFailedTasks().contains(activeTask1));
assertFalse(tasks.allTasks().contains(activeTask1));
tasks.addTask(activeTask1);
assertTrue(tasks.allNonFailedTasks().contains(activeTask1));
}
@Test
public void shouldClearAllPendingTasks() {
final StandbyTask task = standbyTask(TASK_0_0, Set.of(TOPIC_PARTITION_B_0))
.inState(State.CREATED).build();
tasks.addPendingTasksToInit(Collections.singleton(task));
final TaskId taskId1 = new TaskId(0, 0, "A");
tasks.addPendingActiveTasksToCreate(mkMap(
mkEntry(taskId1, Set.of(TOPIC_PARTITION_A_0))
));
final TaskId taskId2 = new TaskId(0, 1, "A");
tasks.addPendingStandbyTasksToCreate(mkMap(
mkEntry(taskId2, Set.of(TOPIC_PARTITION_A_0))
));
assertTrue(tasks.pendingTasksToInit().contains(task));
assertTrue(tasks.pendingActiveTasksToCreate().containsKey(taskId1));
assertTrue(tasks.pendingStandbyTasksToCreate().containsKey(taskId2));
tasks.clear();
assertTrue(tasks.pendingTasksToInit().isEmpty());
assertTrue(tasks.pendingActiveTasksToCreate().isEmpty());
assertTrue(tasks.pendingStandbyTasksToCreate().isEmpty());
}
}
|
TasksTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/codegen/DepartmentCodec.java
|
{
"start": 576,
"end": 8576
}
|
class ____ extends JavaBeanDeserializer implements ObjectDeserializer {
private char[] name_gen_prefix__ = "\"name\":".toCharArray();
private char[] root_gen_prefix__ = "\"root\":".toCharArray();
private char[] type_gen_prefix__ = "\"type\":".toCharArray();
private char[] id_gen_prefix__ = "\"id\":".toCharArray();
private char[] leader_gen_prefix__ = "\"leader\":".toCharArray();
private char[] members_gen_prefix__ = "\"members\":".toCharArray();
private ObjectDeserializer name_gen_deser__;
private ObjectDeserializer leader_gen_deser__;
private ObjectDeserializer members_gen_list_item_deser__;
private Type members_gen_list_item_type__ = com.alibaba.json.test.codegen.Employee.class;
private ObjectDeserializer type_gen_deser__;
public DepartmentCodec (ParserConfig config, Class clazz) {
super(config, clazz);
type_gen_deser__ = config.getDeserializer(com.alibaba.json.test.codegen.DepartmentType.class);
}
public Object createInstance(DefaultJSONParser parser, Type type) {
return new Department();
}
public Object deserialze(DefaultJSONParser parser, Type type, Object fieldName) {
JSONLexerBase lexer = (JSONLexerBase) parser.getLexer();
if (lexer.isEnabled(Feature.SortFeidFastMatch)) {
return super.deserialze(parser, type, fieldName);
}
if (lexer.isEnabled(Feature.SupportArrayToBean)) {
// deserialzeArrayMapping
}
if (lexer.scanType("Department") == JSONLexerBase.NOT_MATCH) {
return super.deserialze(parser, type, fieldName);
}
ParseContext mark_context = parser.getContext();
int matchedCount = 0;
Department instance = new Department();
ParseContext context = parser.getContext();
ParseContext childContext = parser.setContext(context, instance, fieldName);
if (lexer.matchStat == JSONLexerBase.END) {
return instance;
}
int matchStat = 0;
int _asm_flag_0 = 0;
int id_gen = 0;
com.alibaba.json.test.codegen.Employee leader_gen = null;
java.util.List members_gen = null;
String name_gen;
if (lexer.isEnabled(Feature.InitStringFieldAsEmpty)) {
name_gen = lexer.stringDefaultValue();
_asm_flag_0 |= 8;
} else {
name_gen = null;
}
boolean root_gen = false;
com.alibaba.json.test.codegen.DepartmentType type_gen = null;
boolean endFlag = false, restFlag = false;
if ((!endFlag) && (!restFlag)) {
id_gen = lexer.scanFieldInt(id_gen_prefix__);
if(lexer.matchStat > 0) {
_asm_flag_0 |= 1;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat == JSONLexerBase.END) {
endFlag = true;
}
}
if ((!endFlag) && (!restFlag)) {
if (lexer.matchField(leader_gen_prefix__)) {
_asm_flag_0 |= 2;
matchedCount++;
if(parser.getResolveStatus() == DefaultJSONParser.NeedToResolve) {
ResolveTask resolveTask = parser.getLastResolveTask();
resolveTask.ownerContext = parser.getContext();
resolveTask.fieldDeserializer = this.getFieldDeserializer("leader");
parser.setResolveStatus(DefaultJSONParser.NONE);
}
}
if(lexer.matchStat > 0) {
_asm_flag_0 |= 2;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat == JSONLexerBase.END) {
endFlag = true;
}
}
if ((!endFlag) && (!restFlag)) {
if (lexer.matchField(members_gen_prefix__)) {
_asm_flag_0 |= 4;
if (lexer.token() == JSONToken.NULL) {
lexer.nextToken(JSONToken.COMMA);
} else {
if (lexer.token() == JSONToken.LBRACKET) {
if(members_gen_list_item_deser__ == null) {
members_gen_list_item_deser__ = parser.getConfig().getDeserializer(com.alibaba.json.test.codegen.Employee.class);
}
final int fastMatchToken = members_gen_list_item_deser__.getFastMatchToken();
lexer.nextToken(fastMatchToken);
members_gen = new java.util.ArrayList();
ParseContext listContext = parser.getContext();
parser.setContext(members_gen, "members");
for(int i = 0; ;++i) {
if (lexer.token() == JSONToken.RBRACKET) {
break;
}
com.alibaba.json.test.codegen.Employee itemValue = members_gen_list_item_deser__.deserialze(parser, members_gen_list_item_type__, i);
members_gen.add(itemValue);
parser.checkListResolve(members_gen);
if (lexer.token() == JSONToken.COMMA) {
lexer.nextToken(fastMatchToken);
}
}
parser.setContext(listContext);
if (lexer.token() != JSONToken.RBRACKET) {
restFlag = true;
}
lexer.nextToken(JSONToken.COMMA);
} else {
restFlag = true;
}
}
}
if(lexer.matchStat > 0) {
_asm_flag_0 |= 4;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat == JSONLexerBase.END) {
endFlag = true;
}
}
if ((!endFlag) && (!restFlag)) {
name_gen = lexer.scanFieldString(name_gen_prefix__);
if(lexer.matchStat > 0) {
_asm_flag_0 |= 8;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat == JSONLexerBase.END) {
endFlag = true;
}
}
if ((!endFlag) && (!restFlag)) {
root_gen = lexer.scanFieldBoolean(root_gen_prefix__);
if(lexer.matchStat > 0) {
_asm_flag_0 |= 16;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat == JSONLexerBase.END) {
endFlag = true;
}
}
if ((!endFlag) && (!restFlag)) {
type_gen = (com.alibaba.json.test.codegen.DepartmentType) this.scanEnum(lexer, type_gen_prefix__, type_gen_deser__);
if(lexer.matchStat > 0) {
_asm_flag_0 |= 32;
matchedCount++;
}
if(lexer.matchStat == JSONLexerBase.NOT_MATCH) {
restFlag = true;
}
if(lexer.matchStat != JSONLexerBase.END) {
restFlag = true;
}
}
if (restFlag) {
return super.parseRest(parser, type, fieldName, instance, 0, new int[0]);
}
return instance;
}
}
|
DepartmentCodec
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-nacos/src/test/java/org/apache/dubbo/registry/nacos/NacosServiceDiscoveryTest.java
|
{
"start": 3199,
"end": 3579
}
|
class ____ extends NacosServiceDiscoveryTest {
public NacosServiceDiscoveryGroupTest1() {
super();
group = "test-group1";
registryUrl = URL.valueOf("nacos://127.0.0.1:" + NetUtils.getAvailablePort() + "?nacos.check=false")
.addParameter("group", group);
}
}
public static
|
NacosServiceDiscoveryGroupTest1
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/module/CoreModule.java
|
{
"start": 1358,
"end": 3161
}
|
class ____ implements Module {
public static final CoreModule INSTANCE = new CoreModule();
private final Map<String, BuiltInFunctionDefinition> normalizedFunctions;
private final Set<String> functionNamesWithInternal;
private final Set<String> functionNamesWithoutInternal;
private CoreModule() {
final List<BuiltInFunctionDefinition> definitions =
BuiltInFunctionDefinitions.getDefinitions();
this.normalizedFunctions =
definitions.stream()
.collect(
Collectors.toMap(
f -> f.getName().toUpperCase(Locale.ROOT),
Function.identity()));
this.functionNamesWithInternal =
definitions.stream()
.map(BuiltInFunctionDefinition::getName)
.collect(Collectors.toSet());
this.functionNamesWithoutInternal =
definitions.stream()
.filter(f -> !f.isInternal())
.map(BuiltInFunctionDefinition::getName)
.collect(Collectors.toSet());
}
@Override
public Set<String> listFunctions() {
return listFunctions(false);
}
@Override
public Set<String> listFunctions(boolean includeHiddenFunctions) {
if (includeHiddenFunctions) {
return functionNamesWithInternal;
} else {
return functionNamesWithoutInternal;
}
}
@Override
public Optional<FunctionDefinition> getFunctionDefinition(String name) {
final String normalizedName = name.toUpperCase(Locale.ROOT);
return Optional.ofNullable(normalizedFunctions.get(normalizedName));
}
}
|
CoreModule
|
java
|
apache__logging-log4j2
|
log4j-mongodb4/src/test/java/org/apache/logging/log4j/mongodb4/MongoDb4ProviderTest.java
|
{
"start": 1144,
"end": 4833
}
|
class ____ {
private static final String CON_STR_WO_DB = "mongodb://localhost:27017";
private static final String CON_STR_W_DB = "mongodb://localhost:27017/logging";
private static final String CON_STR_DB_COLL = "mongodb://localhost:27017/logging.logs";
private static final String COLLECTION_NAME = "logsTest";
private static final String DATABASE_NAME = "loggingTest";
@Test
void createProviderWithDatabaseAndCollectionProvidedViaConfig() {
MongoDb4Provider provider = MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_WO_DB)
.setDatabaseName(DATABASE_NAME)
.setCollectionName(COLLECTION_NAME)
.build();
assertThat(provider).isNotNull();
assertProviderNamespace(provider, DATABASE_NAME, COLLECTION_NAME);
}
@Test
void createProviderWithoutDatabaseName() {
assertThatThrownBy(() -> MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_WO_DB)
.build())
.hasMessage("Invalid MongoDB database name: `null`");
}
@Test
void createProviderWithoutDatabaseNameWithCollectionName() {
assertThatThrownBy(() -> MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_WO_DB)
.setCollectionName(COLLECTION_NAME)
.build())
.hasMessage("Invalid MongoDB database name: `null`");
}
@Test
void createProviderWithoutCollectionName() {
assertThatThrownBy(() -> MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_WO_DB)
.setDatabaseName(DATABASE_NAME)
.build())
.hasMessage("Invalid MongoDB collection name: `null`");
}
@Test
void createProviderWithDatabaseOnConnectionString() {
MongoDb4Provider provider = MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_W_DB)
.setCollectionName(COLLECTION_NAME)
.build();
assertThat(provider).isNotNull();
assertProviderNamespace(provider, "logging", COLLECTION_NAME);
}
@Test
void createProviderConfigOverridesConnectionString() {
MongoDb4Provider provider = MongoDb4Provider.newBuilder()
.setConnectionStringSource(CON_STR_DB_COLL)
.setCollectionName(COLLECTION_NAME)
.setDatabaseName(DATABASE_NAME)
.build();
assertThat(provider).isNotNull();
assertProviderNamespace(provider, DATABASE_NAME, COLLECTION_NAME);
}
private static void assertProviderNamespace(MongoDb4Provider provider, String databaseName, String collectionName) {
MongoNamespace namespace = providerNamespace(provider);
assertThat(namespace.getDatabaseName()).isEqualTo(databaseName);
assertThat(namespace.getCollectionName()).isEqualTo(collectionName);
}
private static MongoNamespace providerNamespace(MongoDb4Provider provider) {
try {
MongoDb4Connection connection = provider.getConnection();
Field collectionField = MongoDb4Connection.class.getDeclaredField("collection");
collectionField.setAccessible(true);
@SuppressWarnings("unchecked")
MongoCollection<Document> collection = (MongoCollection<Document>) collectionField.get(connection);
return collection.getNamespace();
} catch (Exception exception) {
throw new RuntimeException(exception);
}
}
}
|
MongoDb4ProviderTest
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/customized/QuarkusStrategySelectorBuilder.java
|
{
"start": 3347,
"end": 3467
}
|
class ____ {
/**
* Builds the selector.
*
* @param classLoaderService The
|
QuarkusStrategySelectorBuilder
|
java
|
apache__dubbo
|
dubbo-registry/dubbo-registry-zookeeper/src/main/java/org/apache/dubbo/registry/zookeeper/aot/ZookeeperReflectionTypeDescriberRegistrar.java
|
{
"start": 1183,
"end": 2024
}
|
class ____ implements ReflectionTypeDescriberRegistrar {
@Override
public List<TypeDescriber> getTypeDescribers() {
List<TypeDescriber> typeDescribers = new ArrayList<>();
typeDescribers.add(buildTypeDescriberWithDeclared(ZookeeperInstance.class));
return typeDescribers;
}
private TypeDescriber buildTypeDescriberWithDeclared(Class<?> cl) {
Set<MemberCategory> memberCategories = new HashSet<>();
memberCategories.add(MemberCategory.INVOKE_DECLARED_METHODS);
memberCategories.add(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
memberCategories.add(MemberCategory.DECLARED_FIELDS);
return new TypeDescriber(
cl.getName(), null, new HashSet<>(), new HashSet<>(), new HashSet<>(), memberCategories);
}
}
|
ZookeeperReflectionTypeDescriberRegistrar
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/time/JodaConstructorsTest.java
|
{
"start": 2937,
"end": 3287
}
|
class ____ {
private static final Duration INTERVAL = new Duration(42L, 48);
}
""")
.doTest();
}
@Test
public void durationConstructorLongLong() {
helper
.addSourceLines(
"TestClass.java",
"""
import org.joda.time.Duration;
public
|
TestClass
|
java
|
google__dagger
|
hilt-android/main/java/dagger/hilt/android/AndroidEntryPoint.java
|
{
"start": 1475,
"end": 1923
}
|
class ____ extends FragmentActivity {
* {@literal @}Inject Foo foo;
*
* {@literal @}Override
* public void onCreate(Bundle savedInstanceState) {
* super.onCreate(savedInstanceState); // The foo field is injected in super.onCreate()
* }
* }
* </code></pre>
*
* <p>Example usage (without the Hilt Gradle Plugin):
*
* <pre><code>
* {@literal @}AndroidEntryPoint(FragmentActivity.class)
* public final
|
FooActivity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java
|
{
"start": 713,
"end": 1319
}
|
class ____ extends ActionResponse {
private final ExpressionRoleMapping[] mappings;
public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) {
this.mappings = mappings;
}
public ExpressionRoleMapping[] mappings() {
return mappings;
}
public boolean hasMappings() {
return mappings.length > 0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(mappings.length);
for (ExpressionRoleMapping mapping : mappings) {
mapping.writeTo(out);
}
}
}
|
GetRoleMappingsResponse
|
java
|
apache__camel
|
tooling/maven/camel-package-maven-plugin/src/main/java/org/apache/camel/maven/packaging/PackageArchetypeCatalogMojo.java
|
{
"start": 2461,
"end": 8643
}
|
class ____ one of the threads it
* generated failed.
* @throws org.apache.maven.plugin.MojoFailureException something bad happened...
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
// only generate this for the root pom
if ("pom".equals(project.getModel().getPackaging())) {
try {
generateArchetypeCatalog(getLog(), project, projectHelper, outDir);
} catch (IOException e) {
throw new MojoFailureException("Error generating archetype catalog due " + e.getMessage(), e);
}
}
}
public static void generateArchetypeCatalog(Log log, MavenProject project, MavenProjectHelper projectHelper, File outDir)
throws MojoExecutionException, IOException {
File archetypes = PackageHelper.findCamelDirectory(project.getBasedir(), "archetypes");
if (archetypes == null || !archetypes.exists()) {
throw new MojoExecutionException("Cannot find directory: archetypes");
}
log.info("Scanning for Camel Maven Archetypes from directory: " + archetypes);
// find all archetypes which are in the parent dir of the build dir
File[] dirs
= archetypes.listFiles(pathname -> pathname.getName().startsWith("camel-archetype") && pathname.isDirectory());
List<ArchetypeModel> models = new ArrayList<>();
for (File dir : dirs) {
File pom = new File(dir, "pom.xml");
if (!pom.exists() && !pom.isFile()) {
continue;
}
boolean parent = false;
ArchetypeModel model = new ArchetypeModel();
// just use a simple line by line text parser (no need for DOM) just
// to grab 4 lines of data
for (Object o : FileUtils.readLines(pom, StandardCharsets.UTF_8)) {
String line = o.toString();
// we only want to read version from parent
if (line.contains("<parent>")) {
parent = true;
continue;
}
if (line.contains("</parent>")) {
parent = false;
continue;
}
if (parent) {
// grab version from parent
String version = Strings.between(line, "<version>", "</version>");
if (version != null) {
model.setVersion(version);
}
continue;
}
String groupId = Strings.between(line, "<groupId>", "</groupId>");
String artifactId = Strings.between(line, "<artifactId>", "</artifactId>");
String description = Strings.between(line, "<description>", "</description>");
if (groupId != null && model.getGroupId() == null) {
model.setGroupId(groupId);
}
if (artifactId != null && model.getArtifactId() == null) {
model.setArtifactId(artifactId);
}
if (description != null && model.getDescription() == null) {
model.setDescription(description);
}
}
if (model.getGroupId() != null && model.getArtifactId() != null && model.getVersion() != null) {
models.add(model);
}
}
// sort the models by artifact id so its generated in same order
models.sort((o1, o2) -> o1.getArtifactId().compareToIgnoreCase(o2.getArtifactId()));
log.info("Found " + models.size() + " archetypes");
if (!models.isEmpty()) {
// make sure there is a dir
outDir.mkdirs();
File out = new File(outDir, "archetype-catalog.xml");
try (FileOutputStream fos = new FileOutputStream(out, false)) {
// write top
writeTop(fos);
// write each archetype
writeArchetypes(models, fos);
// write bottom
writeBottom(fos);
}
log.info("Saved archetype catalog to file " + out);
try {
if (projectHelper != null) {
log.info("Attaching archetype catalog to Maven project: " + project.getArtifactId());
List<String> includes = new ArrayList<>();
includes.add("archetype-catalog.xml");
projectHelper.addResource(project, outDir.getPath(), includes, new ArrayList<>());
projectHelper.attachArtifact(project, "xml", "archetype-catalog", out);
}
} catch (Exception e) {
throw new MojoExecutionException("Failed to attach artifact to Maven project. Reason: " + e, e);
}
}
}
private static void writeBottom(FileOutputStream fos) throws IOException {
String bottom = "\n </archetypes>\n</archetype-catalog>\n";
fos.write(bottom.getBytes());
}
private static void writeArchetypes(List<ArchetypeModel> models, FileOutputStream fos) throws IOException {
for (ArchetypeModel model : models) {
fos.write("\n <archetype>".getBytes());
fos.write(("\n <groupId>" + model.getGroupId() + "</groupId>").getBytes());
fos.write(("\n <artifactId>" + model.getArtifactId() + "</artifactId>").getBytes());
fos.write(("\n <version>" + model.getVersion() + "</version>").getBytes());
if (model.getDescription() != null) {
fos.write(("\n <description>" + model.getDescription() + "</description>").getBytes());
}
fos.write("\n </archetype>".getBytes());
}
}
private static void writeTop(FileOutputStream fos) throws IOException {
String top = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<archetype-catalog>\n <archetypes>";
fos.write(top.getBytes());
}
private static
|
or
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.