language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java | {
"start": 1133,
"end": 2807
} | class ____ extends CodecFactory {
static {
// if snappy isn't available, this will throw an exception which we
// can catch so we can avoid registering this codec
Snappy.getNativeLibraryVersion();
}
@Override
protected Codec createInstance() {
return new SnappyCodec();
}
}
private SnappyCodec() {
}
@Override
public String getName() {
return DataFileConstants.SNAPPY_CODEC;
}
@Override
public ByteBuffer compress(ByteBuffer in) throws IOException {
int offset = computeOffset(in);
ByteBuffer out = ByteBuffer.allocate(Snappy.maxCompressedLength(in.remaining()) + 4);
int size = Snappy.compress(in.array(), offset, in.remaining(), out.array(), 0);
crc32.reset();
crc32.update(in.array(), offset, in.remaining());
out.putInt(size, (int) crc32.getValue());
((Buffer) out).limit(size + 4);
return out;
}
@Override
public ByteBuffer decompress(ByteBuffer in) throws IOException {
int offset = computeOffset(in);
ByteBuffer out = ByteBuffer.allocate(Snappy.uncompressedLength(in.array(), offset, in.remaining() - 4));
int size = Snappy.uncompress(in.array(), offset, in.remaining() - 4, out.array(), 0);
((Buffer) out).limit(size);
crc32.reset();
crc32.update(out.array(), 0, size);
if (in.getInt(in.limit() - 4) != (int) crc32.getValue())
throw new IOException("Checksum failure");
return out;
}
@Override
public int hashCode() {
return getName().hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
return obj != null && obj.getClass() == getClass();
}
}
| Option |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_haveAtLeastOne_Test.java | {
"start": 916,
"end": 1366
} | class ____ extends AtomicReferenceArrayAssertBaseTest {
private static final Condition<Object> condition = new TestCondition<>();
@Override
protected AtomicReferenceArrayAssert<Object> invoke_api_method() {
return assertions.haveAtLeastOne(condition);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHaveAtLeast(info(), internalArray(), 1, condition);
}
}
| AtomicReferenceArrayAssert_haveAtLeastOne_Test |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java | {
"start": 1058,
"end": 1390
} | interface ____ building the {@link Write}. Implementations can mix in some interfaces to
* support different ways to write data to data sources.
* <p>
* Unless modified by a mixin interface, the {@link Write} configured by this builder is to
* append data without affecting existing data.
*
* @since 3.0.0
*/
@Evolving
public | for |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Dhis2EndpointBuilderFactory.java | {
"start": 34002,
"end": 37916
} | interface ____ extends EndpointProducerBuilder {
default Dhis2EndpointProducerBuilder basic() {
return (Dhis2EndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDhis2EndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDhis2EndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* References a user-defined
* org.hisp.dhis.integration.sdk.api.Dhis2Client. This option is
* mutually exclusive to the baseApiUrl, username, password, and
* personalAccessToken options.
*
* The option is a:
* <code>org.hisp.dhis.integration.sdk.api.Dhis2Client</code> type.
*
* Group: advanced
*
* @param client the value to set
* @return the dsl builder
*/
default AdvancedDhis2EndpointProducerBuilder client(org.hisp.dhis.integration.sdk.api.Dhis2Client client) {
doSetProperty("client", client);
return this;
}
/**
* References a user-defined
* org.hisp.dhis.integration.sdk.api.Dhis2Client. This option is
* mutually exclusive to the baseApiUrl, username, password, and
* personalAccessToken options.
*
* The option will be converted to a
* <code>org.hisp.dhis.integration.sdk.api.Dhis2Client</code> type.
*
* Group: advanced
*
* @param client the value to set
* @return the dsl builder
*/
default AdvancedDhis2EndpointProducerBuilder client(String client) {
doSetProperty("client", client);
return this;
}
}
/**
* Builder for endpoint for the DHIS2 component.
*/
public | AdvancedDhis2EndpointProducerBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MixedDescriptorsTest.java | {
"start": 934,
"end": 1483
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(MixedDescriptors.class, getClass());
@Test
public void negative() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestFieldProtoMessage;
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestProtoMessage;
import com.google.protobuf.Descriptors.Descriptor;
final | MixedDescriptorsTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/loading/multiLoad/MultiLoadSubSelectCollectionTest.java | {
"start": 1543,
"end": 3029
} | class ____ {
@BeforeEach
public void before(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.setCacheMode( CacheMode.IGNORE );
for ( int i = 1; i <= 60; i++ ) {
final Parent p = new Parent( i, "Entity #" + i );
for ( int j = 0; j < i; j++ ) {
Child child = new Child();
child.setParent( p );
p.getChildren().add( child );
}
session.persist( p );
}
} );
}
@AfterEach
public void after(SessionFactoryScope scope) {
scope.dropData();
}
@Test
@JiraKey(value = "HHH-12740")
public void testSubselect(SessionFactoryScope scope) {
scope.inTransaction(session -> {
List<Parent> list = session.findMultiple( Parent.class, ids( 56 ) );
assertEquals( 56, list.size() );
// None of the collections should be loaded yet
for ( Parent p : list ) {
assertFalse( Hibernate.isInitialized( p.children ) );
}
// When the first collection is loaded, the full collection
// should be loaded.
Hibernate.initialize( list.get( 0 ).children );
for ( int i = 0; i < 56; i++ ) {
assertTrue( Hibernate.isInitialized( list.get( i ).children ) );
assertEquals( i + 1, list.get( i ).children.size() );
}
} );
}
private List<Integer> ids(int count) {
var ids = new ArrayList<Integer>(count);
for ( int i = 1; i <= count; i++ ) {
ids.add(i);
}
return ids;
}
@Entity(name = "Parent")
@Table(name = "Parent")
@BatchSize(size = 15)
public static | MultiLoadSubSelectCollectionTest |
java | elastic__elasticsearch | libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java | {
"start": 4683,
"end": 5532
} | class ____ extends Int7SQVectorScorerSupplier {
public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) {
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS));
}
@Override
float scoreFromSegments(MemorySegment a, float aOffset, MemorySegment b, float bOffset) {
int squareDistance = Similarities.squareDistance7u(a, b, dims);
float adjustedDistance = squareDistance * scoreCorrectionConstant;
return 1 / (1f + adjustedDistance);
}
@Override
public EuclideanSupplier copy() {
return new EuclideanSupplier(input.clone(), values, scoreCorrectionConstant);
}
}
public static final | EuclideanSupplier |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/BeanValidationGraphQLDirectivesTest.java | {
"start": 2042,
"end": 2283
} | class ____ {
@Size(min = 5, max = 20)
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| Person |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/util/JsonRecyclerPools.java | {
"start": 526,
"end": 3386
} | class ____
{
/**
* Method to call to get the default recycler pool instance:
* as of Jackson 3.0 and later this is same as calling
* {@link #newConcurrentDequePool()}.
*
* @return the default {@link RecyclerPool} implementation to use
* if no specific implementation desired.
*/
public static RecyclerPool<BufferRecycler> defaultPool() {
return newConcurrentDequePool();
}
/**
* Accessor for getting the shared/global {@link ThreadLocalPool} instance
* (due to design only one instance ever needed)
*
* @return Globally shared instance of {@link ThreadLocalPool}
*/
public static RecyclerPool<BufferRecycler> threadLocalPool() {
return ThreadLocalPool.GLOBAL;
}
/**
* Accessor for getting the shared/global {@link NonRecyclingPool} instance
* (due to design only one instance ever needed)
*
* @return Globally shared instance of {@link NonRecyclingPool}.
*/
public static RecyclerPool<BufferRecycler> nonRecyclingPool() {
return NonRecyclingPool.GLOBAL;
}
/**
* Accessor for getting the shared/global {@link ConcurrentDequePool} instance.
*
* @return Globally shared instance of {@link NonRecyclingPool}.
*/
public static RecyclerPool<BufferRecycler> sharedConcurrentDequePool() {
return ConcurrentDequePool.GLOBAL;
}
/**
* Accessor for constructing a new, non-shared {@link ConcurrentDequePool} instance.
*
* @return Globally shared instance of {@link NonRecyclingPool}.
*/
public static RecyclerPool<BufferRecycler> newConcurrentDequePool() {
return ConcurrentDequePool.construct();
}
/**
* Accessor for getting the shared/global {@link BoundedPool} instance.
*
* @return Globally shared instance of {@link BoundedPool}.
*/
public static RecyclerPool<BufferRecycler> sharedBoundedPool() {
return BoundedPool.GLOBAL;
}
/**
* Accessor for constructing a new, non-shared {@link BoundedPool} instance.
*
* @param size Maximum number of values to pool
*
* @return Globally shared instance of {@link BoundedPool}.
*/
public static RecyclerPool<BufferRecycler> newBoundedPool(int size) {
return BoundedPool.construct(size);
}
/*
/**********************************************************************
/* Concrete RecyclerPool implementations for recycling BufferRecyclers
/**********************************************************************
*/
/**
* {@link ThreadLocal}-based {@link RecyclerPool} implementation used for
* recycling {@link BufferRecycler} instances:
* see {@link RecyclerPool.ThreadLocalPoolBase} for full explanation
* of functioning.
*/
public static | JsonRecyclerPools |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/actuate/endpoint/web/AbstractWebFluxEndpointHandlerMapping.java | {
"start": 18844,
"end": 19276
} | class ____ {
private final ReactiveWebOperation operation;
ReadOperationHandler(ReactiveWebOperation operation) {
this.operation = operation;
}
@ResponseBody
@Reflective
Publisher<ResponseEntity<Object>> handle(ServerWebExchange exchange) {
return this.operation.handle(exchange, null);
}
@Override
public String toString() {
return this.operation.toString();
}
}
private static | ReadOperationHandler |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/multipart/MultipartDataInputTest.java | {
"start": 4041,
"end": 5054
} | class ____ {
@POST
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@Path("{count}")
public String hello(MultipartFormDataInput input, int count) throws IOException {
Map<String, Collection<FormValue>> map = input.getValues();
List<Item> items = new ArrayList<>();
for (var entry : map.entrySet()) {
for (FormValue value : entry.getValue()) {
items.add(new Item(
entry.getKey(),
value.isFileItem() ? value.getFileItem().getFileSize() : value.getValue().length(),
value.getCharset(),
value.getFileName(),
value.isFileItem(),
value.getHeaders()));
}
}
return new ObjectMapper().writeValueAsString(new Result(items, count));
}
}
public static | Resource |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InitializeInlineTest.java | {
"start": 2158,
"end": 2618
} | class ____ {
int test() {
int a;
if (true) {
a = 1;
return a;
}
a = 2;
return a;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void assignedWithinTry_noMatch() {
compilationHelper
.addInputLines(
"Test.java",
"""
| Test |
java | google__gson | gson/src/main/java/com/google/gson/JsonSyntaxException.java | {
"start": 787,
"end": 1379
} | class ____ extends JsonParseException {
private static final long serialVersionUID = 1L;
public JsonSyntaxException(String msg) {
super(msg);
}
public JsonSyntaxException(String msg, Throwable cause) {
super(msg, cause);
}
/**
* Creates exception with the specified cause. Consider using {@link #JsonSyntaxException(String,
* Throwable)} instead if you can describe what actually happened.
*
* @param cause root exception that caused this exception to be thrown.
*/
public JsonSyntaxException(Throwable cause) {
super(cause);
}
}
| JsonSyntaxException |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeMappingDeployment.java | {
"start": 684,
"end": 5254
} | class ____ {
private final Map<String, TreeMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>>> classTemplates;
private final SortedMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>> nullMethod;
private String currentHttpMethod;
private ArrayList<RequestMapper.RequestPath<RuntimeResource>> currentMapperPerMethodTemplates;
private Map<String, RequestMapper<RuntimeResource>> classMapper;
private int maxMethodTemplateNameCount = -1;
RuntimeMappingDeployment(
Map<String, TreeMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>>> classTemplates) {
this.classTemplates = classTemplates;
this.nullMethod = classTemplates.get(null);
}
int getMaxMethodTemplateNameCount() {
if (maxMethodTemplateNameCount == -1) {
throw new IllegalStateException("Method can only be called after 'buildClassMapper'");
}
return maxMethodTemplateNameCount;
}
Map<String, RequestMapper<RuntimeResource>> buildClassMapper() {
classMapper = new HashMap<>();
maxMethodTemplateNameCount = 0;
classTemplates.forEach(this::forEachClassTemplate);
return classMapper;
}
private void forEachClassTemplate(String httpMethod,
TreeMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>> perMethodTemplateMap) {
currentHttpMethod = httpMethod;
if (nullMethod != null) {
for (var nm : nullMethod.entrySet()) {
if (!perMethodTemplateMap.containsKey(nm.getKey())) {
//resource methods take precedence
//just skip sub resource locators for now
//may need to be revisited if we want to pass the TCK 100%
perMethodTemplateMap.put(nm.getKey(), nm.getValue());
}
}
}
//now we have all our possible resources
currentMapperPerMethodTemplates = new ArrayList<>();
perMethodTemplateMap.forEach(this::forEachMethodTemplateMap);
classMapper.put(httpMethod, new RequestMapper<>(currentMapperPerMethodTemplates));
}
private void forEachMethodTemplateMap(URITemplate path, List<RequestMapper.RequestPath<RuntimeResource>> requestPaths) {
int methodTemplateNameCount = path.countPathParamNames();
if (methodTemplateNameCount > maxMethodTemplateNameCount) {
maxMethodTemplateNameCount = methodTemplateNameCount;
}
if (requestPaths.size() == 1) {
//simple case, only one match
currentMapperPerMethodTemplates.addAll(requestPaths);
} else {
List<RuntimeResource> resources = new ArrayList<>(requestPaths.size());
for (int j = 0; j < requestPaths.size(); j++) {
resources.add(requestPaths.get(j).value);
}
MediaTypeMapper mapper = new MediaTypeMapper(resources);
//now we just create a fake RuntimeResource
//we could add another layer of indirection, however this is not a common case
//so we don't want to add any extra latency into the common case
RuntimeResource fake = new RuntimeResource(currentHttpMethod, path, null, null, Collections.emptyList(),
null, null,
new ServerRestHandler[] { mapper }, null, new Class[0], null, false,
false, null, null, null, null, null,
Collections.emptyMap());
currentMapperPerMethodTemplates.add(new RequestMapper.RequestPath<>(false, fake.getPath(), fake));
}
}
static void buildMethodMapper(
Map<String, TreeMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>>> perClassMappers,
ResourceMethod method, RuntimeResource runtimeResource) {
TreeMap<URITemplate, List<RequestMapper.RequestPath<RuntimeResource>>> templateMap = perClassMappers
.get(method.getHttpMethod());
if (templateMap == null) {
perClassMappers.put(method.getHttpMethod(), templateMap = new TreeMap<>());
}
List<RequestMapper.RequestPath<RuntimeResource>> list = templateMap.get(runtimeResource.getPath());
if (list == null) {
templateMap.put(runtimeResource.getPath(), list = new ArrayList<>());
}
list.add(new RequestMapper.RequestPath<>(method.getHttpMethod() == null, runtimeResource.getPath(),
runtimeResource));
}
}
| RuntimeMappingDeployment |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resource/basic/ReponseInfoTest.java | {
"start": 1024,
"end": 2509
} | class ____ {
static Client client;
@BeforeAll
public static void before() throws Exception {
client = ClientBuilder.newClient();
}
@AfterAll
public static void close() {
client.close();
}
@RegisterExtension
static ResteasyReactiveUnitTest testExtension = new ResteasyReactiveUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
JavaArchive war = ShrinkWrap.create(JavaArchive.class);
war.addClasses(PortProviderUtil.class, ReponseInfoTest.class);
// Use of PortProviderUtil in the deployment
war.addClasses(ResponseInfoResource.class);
return war;
}
});
private void basicTest(String path) {
WebTarget base = client.target(PortProviderUtil.generateURL(path, ReponseInfoTest.class.getSimpleName()));
Response response = base.request().get();
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
assertEquals(true, response.readEntity(boolean.class));
response.close();
}
/**
* @tpTestDetails Check URI location from HTTP headers from response prepared in resource
* @tpSince RESTEasy 3.0.16
*/
@Test
@DisplayName("Test Uri Info")
public void testUriInfo() throws Exception {
basicTest("/simple");
}
}
| ReponseInfoTest |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/JimfsFileSystemCloseTest.java | {
"start": 2381,
"end": 9489
} | class ____ {
private JimfsFileSystem fs = (JimfsFileSystem) Jimfs.newFileSystem(Configuration.unix());
@Test
public void testIsNotOpen() throws IOException {
assertTrue(fs.isOpen());
fs.close();
assertFalse(fs.isOpen());
}
@Test
public void testIsNotAvailableFromProvider() throws IOException {
URI uri = fs.getUri();
assertEquals(fs, FileSystems.getFileSystem(uri));
fs.close();
assertThrows(FileSystemNotFoundException.class, () -> FileSystems.getFileSystem(uri));
}
@Test
public void testOpenStreamsClosed() throws IOException {
Path p = fs.getPath("/foo");
OutputStream out = Files.newOutputStream(p);
InputStream in = Files.newInputStream(p);
out.write(1);
assertEquals(1, in.read());
fs.close();
IOException expected = assertThrows(IOException.class, () -> out.write(1));
assertThat(expected).hasMessageThat().isEqualTo("stream is closed");
expected = assertThrows(IOException.class, () -> in.read());
assertThat(expected).hasMessageThat().isEqualTo("stream is closed");
}
@Test
public void testOpenChannelsClosed() throws IOException {
Path p = fs.getPath("/foo");
FileChannel fc = FileChannel.open(p, READ, WRITE, CREATE);
SeekableByteChannel sbc = Files.newByteChannel(p, READ);
AsynchronousFileChannel afc = AsynchronousFileChannel.open(p, READ, WRITE);
assertTrue(fc.isOpen());
assertTrue(sbc.isOpen());
assertTrue(afc.isOpen());
fs.close();
assertFalse(fc.isOpen());
assertFalse(sbc.isOpen());
assertFalse(afc.isOpen());
assertThrows(ClosedChannelException.class, () -> fc.size());
assertThrows(ClosedChannelException.class, () -> sbc.size());
assertThrows(ClosedChannelException.class, () -> afc.size());
}
@Test
public void testOpenDirectoryStreamsClosed() throws IOException {
Path p = fs.getPath("/foo");
Files.createDirectory(p);
try (DirectoryStream<Path> stream = Files.newDirectoryStream(p)) {
fs.close();
assertThrows(ClosedDirectoryStreamException.class, () -> stream.iterator());
}
}
@Test
public void testOpenWatchServicesClosed() throws IOException {
WatchService ws1 = fs.newWatchService();
WatchService ws2 = fs.newWatchService();
assertNull(ws1.poll());
assertNull(ws2.poll());
fs.close();
assertThrows(ClosedWatchServiceException.class, () -> ws1.poll());
assertThrows(ClosedWatchServiceException.class, () -> ws2.poll());
}
@Test
public void testPathMethodsThrow() throws IOException {
Path p = fs.getPath("/foo");
Files.createDirectory(p);
WatchService ws = fs.newWatchService();
fs.close();
assertThrows(
ClosedWatchServiceException.class,
() -> p.register(ws, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY));
assertThrows(ClosedFileSystemException.class, () -> p.toRealPath());
// While technically (according to the FileSystem.close() spec) all methods on Path should
// probably throw, we only throw for methods that access the file system itself in some way...
// path manipulation methods seem totally harmless to keep working, and I don't see any need to
// add the overhead of checking that the file system is open for each of those method calls.
}
@Test
public void testOpenFileAttributeViewsThrow() throws IOException {
Path p = fs.getPath("/foo");
Files.createFile(p);
BasicFileAttributeView view = Files.getFileAttributeView(p, BasicFileAttributeView.class);
fs.close();
assertThrows(ClosedFileSystemException.class, () -> view.readAttributes());
assertThrows(ClosedFileSystemException.class, () -> view.setTimes(null, null, null));
}
@Test
public void testFileSystemMethodsThrow() throws IOException {
fs.close();
assertThrows(ClosedFileSystemException.class, () -> fs.getPath("/foo"));
assertThrows(ClosedFileSystemException.class, () -> fs.getRootDirectories());
assertThrows(ClosedFileSystemException.class, () -> fs.getFileStores());
assertThrows(ClosedFileSystemException.class, () -> fs.getPathMatcher("glob:*.java"));
assertThrows(ClosedFileSystemException.class, () -> fs.getUserPrincipalLookupService());
assertThrows(ClosedFileSystemException.class, () -> fs.newWatchService());
assertThrows(ClosedFileSystemException.class, () -> fs.supportedFileAttributeViews());
}
@Test
public void testFilesMethodsThrow() throws IOException {
Path file = fs.getPath("/file");
Path dir = fs.getPath("/dir");
Path nothing = fs.getPath("/nothing");
Files.createDirectory(dir);
Files.createFile(file);
fs.close();
// not exhaustive, but should cover every major type of functionality accessible through Files
// TODO(cgdecker): reflectively invoke all methods with default arguments?
assertThrows(ClosedFileSystemException.class, () -> Files.delete(file));
assertThrows(ClosedFileSystemException.class, () -> Files.createDirectory(nothing));
assertThrows(ClosedFileSystemException.class, () -> Files.createFile(nothing));
assertThrows(
ClosedFileSystemException.class,
() -> Files.write(nothing, ImmutableList.of("hello world"), UTF_8));
assertThrows(ClosedFileSystemException.class, () -> Files.newInputStream(file));
assertThrows(ClosedFileSystemException.class, () -> Files.newOutputStream(file));
assertThrows(ClosedFileSystemException.class, () -> Files.newByteChannel(file));
assertThrows(ClosedFileSystemException.class, () -> Files.newDirectoryStream(dir));
assertThrows(ClosedFileSystemException.class, () -> Files.copy(file, nothing));
assertThrows(ClosedFileSystemException.class, () -> Files.move(file, nothing));
assertThrows(ClosedFileSystemException.class, () -> Files.copy(dir, nothing));
assertThrows(ClosedFileSystemException.class, () -> Files.move(dir, nothing));
assertThrows(ClosedFileSystemException.class, () -> Files.createSymbolicLink(nothing, file));
assertThrows(ClosedFileSystemException.class, () -> Files.createLink(nothing, file));
assertThrows(ClosedFileSystemException.class, () -> Files.exists(file));
assertThrows(ClosedFileSystemException.class, () -> Files.getAttribute(file, "size"));
assertThrows(
ClosedFileSystemException.class,
() -> Files.setAttribute(file, "lastModifiedTime", FileTime.fromMillis(0)));
assertThrows(
ClosedFileSystemException.class,
() -> Files.getFileAttributeView(file, BasicFileAttributeView.class));
assertThrows(
ClosedFileSystemException.class,
() -> Files.readAttributes(file, "basic:size,lastModifiedTime"));
assertThrows(
ClosedFileSystemException.class,
() -> Files.readAttributes(file, BasicFileAttributes.class));
assertThrows(ClosedFileSystemException.class, () -> Files.isDirectory(dir));
assertThrows(ClosedFileSystemException.class, () -> Files.readAllBytes(file));
assertThrows(ClosedFileSystemException.class, () -> Files.isReadable(file));
}
}
| JimfsFileSystemCloseTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/logging/log4j2/StructuredLogLayoutTests.java | {
"start": 5696,
"end": 5966
} | class ____ or one of the common formats: [ecs, gelf, logstash]");
}
private Builder newBuilder() {
Builder builder = StructuredLogLayout.newBuilder();
ReflectionTestUtils.setField(builder, "loggerContext", this.loggerContext);
return builder;
}
static final | name |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestHelperDFS.java | {
"start": 1102,
"end": 1716
} | class ____ extends CLITestHelper.TestConfigFileParser {
@Override
public void endElement(String uri, String localName, String qName)
throws SAXException {
if (qName.equals("dfs-admin-command")) {
if (testCommands != null) {
testCommands.add(new CLITestCmdDFS(charString,
new CLICommandDFSAdmin()));
} else if (cleanupCommands != null) {
cleanupCommands.add(new CLITestCmdDFS(charString,
new CLICommandDFSAdmin()));
}
} else {
super.endElement(uri, localName, qName);
}
}
}
}
| TestConfigFileParserDFS |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MoreThanOneInjectableConstructorTest.java | {
"start": 1447,
"end": 1630
} | class ____ {
/** Class has 2 constructors, both are injectable */
// BUG: Diagnostic contains: MoreThanOneInjectableConstructor
public | MoreThanOneInjectableConstructorPositiveCases |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/stubbing/Answer.java | {
"start": 989,
"end": 1248
} | interface ____<T> {
/**
* @param invocation the invocation on the mock.
*
* @return the value to be returned
*
* @throws Throwable the throwable to be thrown
*/
T answer(InvocationOnMock invocation) throws Throwable;
}
| Answer |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/api/StatefulRedisConnection.java | {
"start": 697,
"end": 2035
} | interface ____<K, V> extends StatefulConnection<K, V> {
/**
*
* @return true, if the connection is within a transaction.
*/
boolean isMulti();
/**
* Returns the {@link RedisCommands} API for the current connection. Does not create a new connection.
*
* @return the synchronous API for the underlying connection.
*/
RedisCommands<K, V> sync();
/**
* Returns the {@link RedisAsyncCommands} API for the current connection. Does not create a new connection.
*
* @return the asynchronous API for the underlying connection.
*/
RedisAsyncCommands<K, V> async();
/**
* Returns the {@link RedisReactiveCommands} API for the current connection. Does not create a new connection.
*
* @return the reactive API for the underlying connection.
*/
RedisReactiveCommands<K, V> reactive();
/**
* Add a new {@link PushListener listener} to consume push messages.
*
* @param listener the listener, must not be {@code null}.
* @since 6.0
*/
void addListener(PushListener listener);
/**
* Remove an existing {@link PushListener listener}.
*
* @param listener the listener, must not be {@code null}.
* @since 6.0
*/
void removeListener(PushListener listener);
}
| StatefulRedisConnection |
java | netty__netty | transport-udt/src/test/java/io/netty/test/udt/nio/NioUdtProviderTest.java | {
"start": 1301,
"end": 3464
} | class ____ extends AbstractUdtTest {
/**
* verify factory
*/
@Test
public void provideFactory() {
NioUdtByteAcceptorChannel nioUdtByteAcceptorChannel
= (NioUdtByteAcceptorChannel) NioUdtProvider.BYTE_ACCEPTOR.newChannel();
NioUdtByteConnectorChannel nioUdtByteConnectorChannel
= (NioUdtByteConnectorChannel) NioUdtProvider.BYTE_CONNECTOR.newChannel();
NioUdtByteRendezvousChannel nioUdtByteRendezvousChannel
= (NioUdtByteRendezvousChannel) NioUdtProvider.BYTE_RENDEZVOUS.newChannel();
NioUdtMessageAcceptorChannel nioUdtMessageAcceptorChannel
= (NioUdtMessageAcceptorChannel) NioUdtProvider.MESSAGE_ACCEPTOR.newChannel();
NioUdtMessageConnectorChannel nioUdtMessageConnectorChannel
= (NioUdtMessageConnectorChannel) NioUdtProvider.MESSAGE_CONNECTOR.newChannel();
NioUdtMessageRendezvousChannel nioUdtMessageRendezvousChannel
= (NioUdtMessageRendezvousChannel) NioUdtProvider.MESSAGE_RENDEZVOUS.newChannel();
// bytes
assertNotNull(nioUdtByteAcceptorChannel);
assertNotNull(nioUdtByteConnectorChannel);
assertNotNull(nioUdtByteRendezvousChannel);
// message
assertNotNull(nioUdtMessageAcceptorChannel);
assertNotNull(nioUdtMessageConnectorChannel);
assertNotNull(nioUdtMessageRendezvousChannel);
// channel
assertNotNull(NioUdtProvider.channelUDT(nioUdtByteAcceptorChannel));
assertNotNull(NioUdtProvider.channelUDT(nioUdtByteConnectorChannel));
assertNotNull(NioUdtProvider.channelUDT(nioUdtByteRendezvousChannel));
assertNotNull(NioUdtProvider.channelUDT(nioUdtMessageAcceptorChannel));
assertNotNull(NioUdtProvider.channelUDT(nioUdtMessageConnectorChannel));
assertNotNull(NioUdtProvider.channelUDT(nioUdtMessageRendezvousChannel));
// acceptor types
assertInstanceOf(UdtServerChannel.class, NioUdtProvider.BYTE_ACCEPTOR.newChannel());
assertInstanceOf(UdtServerChannel.class, NioUdtProvider.MESSAGE_ACCEPTOR.newChannel());
}
}
| NioUdtProviderTest |
java | apache__rocketmq | common/src/main/java/org/apache/rocketmq/common/filter/ExpressionType.java | {
"start": 854,
"end": 2319
} | class ____ {
/**
* <ul>
* Keywords:
* <li>{@code AND, OR, NOT, BETWEEN, IN, TRUE, FALSE, IS, NULL}</li>
* </ul>
* <p/>
* <ul>
* Data type:
* <li>Boolean, like: TRUE, FALSE</li>
* <li>String, like: 'abc'</li>
* <li>Decimal, like: 123</li>
* <li>Float number, like: 3.1415</li>
* </ul>
* <p/>
* <ul>
* Grammar:
* <li>{@code AND, OR}</li>
* <li>{@code >, >=, <, <=, =}</li>
* <li>{@code BETWEEN A AND B}, equals to {@code >=A AND <=B}</li>
* <li>{@code NOT BETWEEN A AND B}, equals to {@code >B OR <A}</li>
* <li>{@code IN ('a', 'b')}, equals to {@code ='a' OR ='b'}, this operation only support String type.</li>
* <li>{@code IS NULL}, {@code IS NOT NULL}, check parameter whether is null, or not.</li>
* <li>{@code =TRUE}, {@code =FALSE}, check parameter whether is true, or false.</li>
* </ul>
* <p/>
* <p>
* Example:
* (a > 10 AND a < 100) OR (b IS NOT NULL AND b=TRUE)
* </p>
*/
public static final String SQL92 = "SQL92";
/**
* Only support or operation such as
* "tag1 || tag2 || tag3", <br>
* If null or * expression,meaning subscribe all.
*/
public static final String TAG = "TAG";
public static boolean isTagType(String type) {
if (type == null || "".equals(type) || TAG.equals(type)) {
return true;
}
return false;
}
}
| ExpressionType |
java | apache__camel | components/camel-jgroups/src/main/java/org/apache/camel/component/jgroups/JGroupsProducer.java | {
"start": 1147,
"end": 2925
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(JGroupsProducer.class);
// Producer settings
private final JGroupsEndpoint endpoint;
private final String clusterName;
// Constructor
public JGroupsProducer(JGroupsEndpoint endpoint, String clusterName) {
super(endpoint);
this.endpoint = endpoint;
this.clusterName = clusterName;
}
// Life cycle callbacks
@Override
protected void doStart() throws Exception {
super.doStart();
endpoint.connect();
}
@Override
protected void doStop() throws Exception {
endpoint.disconnect();
super.doStop();
}
// Processing logic
@Override
public void process(Exchange exchange) throws Exception {
Object body = exchange.getIn().getBody();
if (body != null) {
Address destinationAddress = exchange.getIn().getHeader(JGroupsConstants.HEADER_JGROUPS_DEST, Address.class);
Address sourceAddress = exchange.getIn().getHeader(JGroupsConstants.HEADER_JGROUPS_SRC, Address.class);
LOG.debug("Posting: {} to cluster: {}", body, clusterName);
if (destinationAddress != null) {
LOG.debug("Posting to custom destination address: {}", destinationAddress);
}
if (sourceAddress != null) {
LOG.debug("Posting from custom source address: {}", sourceAddress);
}
Message message = new ObjectMessage(destinationAddress, body);
message.setSrc(sourceAddress);
endpoint.getResolvedChannel().send(message);
} else {
LOG.debug("Body is null, cannot post to channel.");
}
}
}
| JGroupsProducer |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviation.java | {
"start": 4633,
"end": 4843
} | class ____ {
public long[] values = new long[2];
public int count;
}
/**
* Evaluator for integers.
* <p>
* To avoid integer overflows, we're using the {@link Longs} | Longs |
java | google__guice | core/src/com/google/inject/util/Enhanced.java | {
"start": 913,
"end": 1129
} | class ____ Guice enhanced with AOP functionality. */
public static boolean isEnhanced(Class<?> clazz) {
return clazz.getSimpleName().contains(BytecodeGen.ENHANCER_BY_GUICE_MARKER);
}
/**
* If the input | that |
java | elastic__elasticsearch | x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateMetricDoubleFieldMapper.java | {
"start": 5107,
"end": 5205
} | enum ____ {
min,
max,
sum,
value_count
}
public static | Metric |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/LeafRuntimeField.java | {
"start": 880,
"end": 2127
} | class ____ implements RuntimeField {
private final String name;
private final MappedFieldType mappedFieldType;
private final List<FieldMapper.Parameter<?>> parameters;
public LeafRuntimeField(String name, MappedFieldType mappedFieldType, List<FieldMapper.Parameter<?>> parameters) {
this.name = name;
this.mappedFieldType = mappedFieldType;
this.parameters = parameters;
assert mappedFieldType.name().endsWith(name) : "full name: " + mappedFieldType.name() + " - leaf name: " + name;
}
@Override
public String name() {
return mappedFieldType.name();
}
@Override
public Stream<MappedFieldType> asMappedFieldTypes() {
return Stream.of(mappedFieldType);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field("type", mappedFieldType.typeName());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
for (FieldMapper.Parameter<?> parameter : parameters) {
parameter.toXContent(builder, includeDefaults);
}
builder.endObject();
return builder;
}
}
| LeafRuntimeField |
java | spring-projects__spring-boot | module/spring-boot-hazelcast/src/test/java/org/springframework/boot/hazelcast/autoconfigure/HazelcastAutoConfigurationServerTests.java | {
"start": 11133,
"end": 11381
} | class ____ {
@Bean
Config anotherHazelcastConfig() {
Config config = new Config();
config.setProperty(HazelcastServerConfiguration.HAZELCAST_LOGGING_TYPE, "jdk");
return config;
}
}
@SpringAware
static | HazelcastConfigWithJDKLogging |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/single/SingleObserveOnTest.java | {
"start": 974,
"end": 1721
} | class ____ extends RxJavaTest {
@Test
public void dispose() {
TestHelper.checkDisposed(Single.just(1).observeOn(Schedulers.single()));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeSingle(new Function<Single<Object>, SingleSource<Object>>() {
@Override
public SingleSource<Object> apply(Single<Object> s) throws Exception {
return s.observeOn(Schedulers.single());
}
});
}
@Test
public void error() {
Single.error(new TestException())
.observeOn(Schedulers.single())
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertFailure(TestException.class);
}
}
| SingleObserveOnTest |
java | apache__camel | components/camel-jooq/src/main/java/org/apache/camel/component/jooq/JooqComponent.java | {
"start": 1110,
"end": 2985
} | class ____ extends HealthCheckComponent {
@Metadata(description = "Component configuration (database connection, database entity type, etc.)")
private JooqConfiguration configuration;
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
JooqConfiguration conf = configuration != null ? configuration.copy() : new JooqConfiguration();
JooqEndpoint endpoint = new JooqEndpoint(uri, this, conf);
setProperties(endpoint, parameters);
initConfiguration(getCamelContext(), conf, remaining);
return endpoint;
}
public JooqConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(JooqConfiguration jooqConfiguration) {
this.configuration = jooqConfiguration;
}
private static void initConfiguration(CamelContext camelContext, JooqConfiguration configuration, String remaining) {
if (remaining == null) {
return;
}
String[] parts = remaining.split("/");
if (parts.length == 0 || parts.length > 2) {
throw new IllegalArgumentException("Unexpected URI format. Expected ... , found '" + remaining + "'");
}
String className = parts[0];
Class<?> type = camelContext.getClassResolver().resolveClass(className);
if (type != null) {
configuration.setEntityType(type);
}
if (parts.length > 1) {
String op = parts[1];
JooqOperation operation = camelContext.getTypeConverter().convertTo(JooqOperation.class, op);
if (operation != null) {
configuration.setOperation(operation);
} else {
throw new IllegalArgumentException("Wrong operation: " + op);
}
}
}
}
| JooqComponent |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/ImportedClass.java | {
"start": 1339,
"end": 1412
} | class ____ the originating class.
*/
String originatingClass();
}
| of |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizer.java | {
"start": 666,
"end": 2832
} | class ____ {
private GroupByOptimizer() {}
/**
* Returns an ordered collection of group by fields in order to get better performance.
*
* The decision is based on the type and whether the input field is a indexed/runtime/script field
*
* TODO: take index sorting into account
*
* @param groups group by as defined by the user
* @param runtimeFields set of runtime fields
* @return collection in order of priority
*/
static Collection<Entry<String, SingleGroupSource>> reorderGroups(Map<String, SingleGroupSource> groups, Set<String> runtimeFields) {
if (groups.size() == 1) {
return groups.entrySet();
}
List<Tuple<Entry<String, SingleGroupSource>, Integer>> prioritizedGroups = new ArrayList<>(groups.size());
// respect the order in the configuration by giving every entry a base priority
int basePriority = groups.size();
for (Entry<String, SingleGroupSource> groupBy : groups.entrySet()) {
// prefer indexed fields over runtime fields over scripts
int priority = basePriority-- + (groupBy.getValue().getScriptConfig() == null
? runtimeFields.contains(groupBy.getValue().getField()) ? 250 : 500
: 0);
switch (groupBy.getValue().getType()) {
case DATE_HISTOGRAM:
priority += 4000;
break;
case HISTOGRAM:
priority += 3000;
break;
case TERMS:
priority += 2000;
break;
case GEOTILE_GRID:
priority += 1000;
break;
default:
assert false : "new group source type misses priority definition";
}
prioritizedGroups.add(new Tuple<>(groupBy, priority));
}
prioritizedGroups.sort(Comparator.comparing(Tuple<Entry<String, SingleGroupSource>, Integer>::v2).reversed());
return prioritizedGroups.stream().map(x -> x.v1()).collect(Collectors.toList());
}
}
| GroupByOptimizer |
java | playframework__playframework | dev-mode/play-run-support/src/main/java/play/runsupport/classloader/DelegatingClassLoader.java | {
"start": 303,
"end": 2434
} | class ____ extends ClassLoader {
private List<String> sharedClasses;
private ClassLoader buildLoader;
private ApplicationClassLoaderProvider applicationClassLoaderProvider;
public DelegatingClassLoader(
ClassLoader commonLoader,
List<String> sharedClasses,
ClassLoader buildLoader,
ApplicationClassLoaderProvider applicationClassLoaderProvider) {
super(commonLoader);
this.sharedClasses = sharedClasses;
this.buildLoader = buildLoader;
this.applicationClassLoaderProvider = applicationClassLoaderProvider;
}
@Override
public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
if (sharedClasses.contains(name)) {
return buildLoader.loadClass(name);
} else {
return super.loadClass(name, resolve);
}
}
@Override
public URL getResource(String name) {
URLClassLoader appClassLoader = applicationClassLoaderProvider.get();
URL resource = null;
if (appClassLoader != null) {
resource = appClassLoader.findResource(name);
}
return resource != null ? resource : super.getResource(name);
}
@SuppressWarnings("unchecked")
@Override
public Enumeration<URL> getResources(String name) throws IOException {
URLClassLoader appClassLoader = applicationClassLoaderProvider.get();
Enumeration<URL> resources1;
if (appClassLoader != null) {
resources1 = appClassLoader.findResources(name);
} else {
resources1 = new Vector<URL>().elements();
}
Enumeration<URL> resources2 = super.getResources(name);
return combineResources(resources1, resources2);
}
private Enumeration<URL> combineResources(
Enumeration<URL> resources1, Enumeration<URL> resources2) {
Set<URL> set = new HashSet<>();
while (resources1.hasMoreElements()) {
set.add(resources1.nextElement());
}
while (resources2.hasMoreElements()) {
set.add(resources2.nextElement());
}
return new Vector<>(set).elements();
}
@Override
public String toString() {
return "DelegatingClassLoader, using parent: " + getParent();
}
}
| DelegatingClassLoader |
java | apache__camel | components/camel-thymeleaf/src/main/java/org/apache/camel/component/thymeleaf/ThymeleafEndpoint.java | {
"start": 2357,
"end": 19807
} | class ____ extends ResourceEndpoint {
private TemplateEngine templateEngine;
private String template;
private JakartaServletWebApplication jakartaServletWebApplication;
@UriParam(label = "advanced", defaultValue = "CLASS_LOADER",
description = "The type of resolver to be used by the template engine.",
javaType = "org.apache.camel.component.thymeleaf.ThymeleafResolverType")
private ThymeleafResolverType resolver = ThymeleafResolverType.CLASS_LOADER;
@UriParam(description = "The template mode to be applied to templates.", defaultValue = "HTML",
enums = "HTML,XML,TEXT,JAVASCRIPT,CSS,RAW")
private String templateMode;
@UriParam(label = "advanced",
description = "An optional prefix added to template names to convert them into resource names.")
private String prefix;
@UriParam(label = "advanced",
description = "An optional suffix added to template names to convert them into resource names.")
private String suffix;
@UriParam(label = "advanced", description = "The character encoding to be used for reading template resources.")
private String encoding;
@UriParam(label = "advanced",
description = "The order in which this template will be resolved as part of the resolver chain.")
private Integer order;
@UriParam(description = "Whether a template resources will be checked for existence before being returned.")
private Boolean checkExistence;
@UriParam(description = "The cache Time To Live for templates, expressed in milliseconds.")
private Long cacheTimeToLive;
@UriParam(description = "Whether templates have to be considered cacheable or not.")
private Boolean cacheable;
@UriParam
private boolean allowTemplateFromHeader;
public ThymeleafEndpoint() {
}
public ThymeleafEndpoint(String endpointURI, Component component, String resourceURI) {
super(endpointURI, component, resourceURI);
}
@Override
public boolean isRemote() {
return false;
}
@Override
public ExchangePattern getExchangePattern() {
return ExchangePattern.InOut;
}
@Override
protected String createEndpointUri() {
return "thymeleaf:" + getResourceUri();
}
public String getTemplateMode() {
return templateMode;
}
/**
* <p>
* Sets the template mode to be applied to templates resolved by this endpoint.
* </p>
* <p>
* One of {@code HTML}, {@code XML}, {@code TEXT}, {@code JAVASCRIPT}, {@code CSS}, {@code RAW}.
* </p>
* <p>
* Note that this template mode will be ignored if the template resource name ends in a known file name suffix:
* {@code .html}, {@code .htm}, {@code .xhtml}, {@code .xml}, {@code .js}, {@code .json}, {@code .css},
* {@code .rss}, {@code .atom}, {@code .txt}.
* </p>
*
* @param templateMode the template mode.
*/
public void setTemplateMode(String templateMode) {
this.templateMode = templateMode;
}
public ThymeleafResolverType getResolver() {
return resolver;
}
/**
* Sets the type of resolver to be used by the template engine.
* <p>
* One of {@code CLASS_LOADER}, {@code DEFAULT}, {@code FILE}, {@code STRING}, {@code URL}, {@code WEB_APP}.
* </p>
*/
public void setResolver(ThymeleafResolverType resolver) {
this.resolver = resolver;
}
public String getPrefix() {
return prefix;
}
/**
* <p>
* Sets a new (optional) prefix to be added to all template names in order to convert <i>template names</i> into
* <i>resource names</i>.
* </p>
*
* @param prefix the prefix to be set.
*/
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public String getSuffix() {
return suffix;
}
/**
* <p>
* Sets a new (optional) suffix to be added to all template names in order to convert <i>template names</i> into
* <i>resource names</i>.
* </p>
* <p>
* Note that this suffix may not be applied to the template name if the template name already ends in a known file
* name suffix: {@code .html}, {@code .htm}, {@code .xhtml}, {@code .xml}, {@code .js}, {@code .json}, {@code .css},
* {@code .rss}, {@code .atom}, {@code .txt}.
* </p>
*
* @param suffix the suffix to be set.
*/
public void setSuffix(String suffix) {
this.suffix = suffix;
}
public String getEncoding() {
return encoding;
}
/**
* Sets a new character encoding for reading template resources.
*
* @param encoding the character encoding to be used.
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public Integer getOrder() {
return order;
}
/**
* Sets a new order for the template engine in the chain. Order should start with 1.
*
* @param order the new order.
*/
public void setOrder(Integer order) {
this.order = order;
}
public Boolean getCheckExistence() {
return checkExistence;
}
/**
* Sets whether template resources will be checked for existence before being returned or not. Default value is
* {@code FALSE}.
*
* @param checkExistence {@code true} if resource existence should be checked, {@code false} if not
*/
public void setCheckExistence(Boolean checkExistence) {
this.checkExistence = checkExistence;
}
public Long getCacheTimeToLive() {
return cacheTimeToLive;
}
/**
* Sets a new value for the cache TTL for resolved templates.
*
* If a template is resolved as <i>cacheable</i> but cache TTL is null, this means the template will live in cache
* until evicted by LRU (Least Recently Used) algorithm for being the oldest entry in cache.
*
* @param cacheTimeToLive the new cache TTL in milliseconds, or null for using natural LRU eviction.
*/
public void setCacheTimeToLive(Long cacheTimeToLive) {
this.cacheTimeToLive = cacheTimeToLive;
}
public Boolean getCacheable() {
return cacheable;
}
/**
* Sets a new value for the <i>cacheable</i> flag.
*
* @param cacheable whether resolved patterns should be considered cacheable or not.
*/
public void setCacheable(Boolean cacheable) {
this.cacheable = cacheable;
}
public boolean isAllowTemplateFromHeader() {
return allowTemplateFromHeader;
}
/**
* Whether to allow to use resource template from header or not (default false).
*
* Enabling this allows to specify dynamic templates via message header. However this can be seen as a potential
* security vulnerability if the header is coming from a malicious user, so use this with care.
*/
public void setAllowTemplateFromHeader(boolean allowTemplateFromHeader) {
this.allowTemplateFromHeader = allowTemplateFromHeader;
}
protected TemplateEngine getTemplateEngine() {
getInternalLock().lock();
try {
if (templateEngine == null) {
ITemplateResolver templateResolver;
switch (resolver) {
case CLASS_LOADER -> {
templateResolver = classLoaderTemplateResolver();
}
case DEFAULT -> {
templateResolver = defaultTemplateResolver();
}
case FILE -> {
templateResolver = fileTemplateResolver();
}
case STRING -> {
templateResolver = stringTemplateResolver();
}
case URL -> {
templateResolver = urlTemplateResolver();
}
case WEB_APP -> {
templateResolver = webApplicationTemplateResolver();
}
default -> {
throw new RuntimeCamelException("cannot determine TemplateResolver for type " + resolver);
}
}
templateEngine = new TemplateEngine();
templateEngine.setTemplateResolver(templateResolver);
}
return templateEngine;
} finally {
getInternalLock().unlock();
}
}
/**
* To use the {@link TemplateEngine} otherwise a new engine is created
*/
public void setTemplateEngine(TemplateEngine templateEngine) {
this.templateEngine = templateEngine;
}
public void setJakartaServletWebApplication(JakartaServletWebApplication jakartaServletWebApplication) {
this.jakartaServletWebApplication = jakartaServletWebApplication;
}
public void setTemplate(String template) {
this.template = template;
}
@Override
public void clearContentCache() {
if (templateEngine != null) {
templateEngine.clearTemplateCache();
}
}
public ThymeleafEndpoint findOrCreateEndpoint(String uri, String newResourceUri) {
String newUri = uri.replace(getResourceUri(), newResourceUri);
log.debug("Getting endpoint with URI: {}", newUri);
return getCamelContext().getEndpoint(newUri, ThymeleafEndpoint.class);
}
@Override
protected void onExchange(Exchange exchange) throws Exception {
String path = getResourceUri();
ObjectHelper.notNull(path, "resourceUri");
if (allowTemplateFromHeader) {
String newResourceUri = exchange.getIn().getHeader(ThymeleafConstants.THYMELEAF_RESOURCE_URI, String.class);
if (newResourceUri != null) {
// remove the header so that it is not propagated in the exchange
exchange.getIn().removeHeader(ThymeleafConstants.THYMELEAF_RESOURCE_URI);
log.debug("{} set to {}, creating new endpoint to handle exchange",
ThymeleafConstants.THYMELEAF_RESOURCE_URI, newResourceUri);
try (ThymeleafEndpoint newEndpoint = findOrCreateEndpoint(getEndpointUri(), newResourceUri)) {
newEndpoint.onExchange(exchange);
}
return;
}
}
String template = null;
if (allowTemplateFromHeader) {
template = exchange.getIn().getHeader(ThymeleafConstants.THYMELEAF_TEMPLATE, String.class);
// remove the header so that it is not propagated in the exchange
exchange.getIn().removeHeader(ThymeleafConstants.THYMELEAF_TEMPLATE);
}
if (template == null) {
template = path;
}
@SuppressWarnings("unchecked")
Map<String, Object> dataModel = exchange.getIn().getHeader(ThymeleafConstants.THYMELEAF_VARIABLE_MAP, Map.class);
if (dataModel == null) {
dataModel = ExchangeHelper.createVariableMap(exchange, isAllowContextMapAll());
} else {
ExchangeHelper.populateVariableMap(exchange, dataModel, isAllowContextMapAll());
// remove the header so that it is not propagated in the exchange
exchange.getIn().removeHeader(ThymeleafConstants.THYMELEAF_VARIABLE_MAP);
}
if (!resolver.equals(ThymeleafResolverType.URL) && ResourceHelper.hasScheme(template)) {
// favour to use Camel to load via resource loader
template = IOHelper.loadText(getResourceAsInputStream());
}
this.template = template;
// let thymeleaf parse and generate the result
TemplateEngine templateEngine = getTemplateEngine();
Context context = new Context();
context.setVariables(dataModel);
String buffer = templateEngine.process(template, context);
// store the result in the exchange body
ExchangeHelper.setInOutBodyPatternAware(exchange, buffer);
}
private ITemplateResolver classLoaderTemplateResolver() {
ClassLoaderTemplateResolver resolver = new ClassLoaderTemplateResolver();
if (cacheable != null) {
resolver.setCacheable(cacheable);
}
if (cacheTimeToLive != null) {
resolver.setCacheTTLMs(cacheTimeToLive);
}
if (encoding != null) {
resolver.setCharacterEncoding(encoding);
}
if (checkExistence != null) {
resolver.setCheckExistence(checkExistence);
}
if (order != null) {
resolver.setOrder(order);
}
if (prefix != null) {
resolver.setPrefix(prefix);
}
if (suffix != null) {
resolver.setSuffix(suffix);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
}
return resolver;
}
private ITemplateResolver defaultTemplateResolver() {
DefaultTemplateResolver resolver = new DefaultTemplateResolver();
if (checkExistence != null) {
resolver.setCheckExistence(checkExistence);
}
if (order != null) {
resolver.setOrder(order);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
} else {
resolver.setTemplate(template);
}
return resolver;
}
private ITemplateResolver fileTemplateResolver() {
FileTemplateResolver resolver = new FileTemplateResolver();
if (cacheable != null) {
resolver.setCacheable(cacheable);
}
if (cacheTimeToLive != null) {
resolver.setCacheTTLMs(cacheTimeToLive);
}
if (encoding != null) {
resolver.setCharacterEncoding(encoding);
}
if (checkExistence != null) {
resolver.setCheckExistence(checkExistence);
}
if (order != null) {
resolver.setOrder(order);
}
if (prefix != null) {
resolver.setPrefix(prefix);
}
if (suffix != null) {
resolver.setSuffix(suffix);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
}
return resolver;
}
private ITemplateResolver stringTemplateResolver() {
StringTemplateResolver resolver = new StringTemplateResolver();
if (cacheable != null) {
resolver.setCacheable(cacheable);
}
if (cacheTimeToLive != null) {
resolver.setCacheTTLMs(cacheTimeToLive);
}
if (checkExistence != null) {
resolver.setCheckExistence(checkExistence);
}
if (order != null) {
resolver.setOrder(order);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
}
return resolver;
}
private ITemplateResolver urlTemplateResolver() {
UrlTemplateResolver resolver = new UrlTemplateResolver();
if (cacheable != null) {
resolver.setCacheable(cacheable);
}
if (cacheTimeToLive != null) {
resolver.setCacheTTLMs(cacheTimeToLive);
}
if (encoding != null) {
resolver.setCharacterEncoding(encoding);
}
if (order != null) {
resolver.setOrder(order);
}
if (prefix != null) {
resolver.setPrefix(prefix);
}
if (suffix != null) {
resolver.setSuffix(suffix);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
}
return resolver;
}
private ITemplateResolver webApplicationTemplateResolver() {
WebApplicationTemplateResolver resolver = new WebApplicationTemplateResolver(jakartaServletWebApplication);
if (cacheable != null) {
resolver.setCacheable(cacheable);
}
if (cacheTimeToLive != null) {
resolver.setCacheTTLMs(cacheTimeToLive);
}
if (encoding != null) {
resolver.setCharacterEncoding(encoding);
}
if (checkExistence != null) {
resolver.setCheckExistence(checkExistence);
}
if (order != null) {
resolver.setOrder(order);
}
if (prefix != null) {
resolver.setPrefix(prefix);
}
if (suffix != null) {
resolver.setSuffix(suffix);
}
if (templateMode != null) {
resolver.setTemplateMode(templateMode);
}
if (template == null) {
throw new RuntimeCamelException("template must be provided");
}
return resolver;
}
}
| ThymeleafEndpoint |
java | elastic__elasticsearch | modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexRestClientSslTests.java | {
"start": 2581,
"end": 10435
} | class ____ extends ESTestCase {
private static HttpsServer server;
private static Consumer<HttpsExchange> handler = ignore -> {};
@BeforeClass
public static void setupHttpServer() throws Exception {
InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0);
SSLContext sslContext = buildServerSslContext();
server = MockHttpServer.createHttps(address, 0);
server.setHttpsConfigurator(new ClientAuthHttpsConfigurator(sslContext));
server.start();
server.createContext("/", http -> {
assert http instanceof HttpsExchange;
HttpsExchange https = (HttpsExchange) http;
handler.accept(https);
// Always respond with 200
// * If the reindex sees the 200, it means the SSL connection was established correctly.
// * We can check client certs in the handler.
https.sendResponseHeaders(200, 0);
https.close();
});
}
@AfterClass
public static void shutdownHttpServer() {
server.stop(0);
server = null;
handler = null;
}
private static SSLContext buildServerSslContext() throws Exception {
final SSLContext sslContext = SSLContext.getInstance("TLS");
final char[] password = "http-password".toCharArray();
final Path cert = PathUtils.get(ReindexRestClientSslTests.class.getResource("http/http.crt").toURI());
final Path key = PathUtils.get(ReindexRestClientSslTests.class.getResource("http/http.key").toURI());
final Path configPath = cert.getParent().getParent();
final PemKeyConfig keyConfig = new PemKeyConfig(cert.toString(), key.toString(), password, configPath);
final X509ExtendedKeyManager keyManager = keyConfig.createKeyManager();
final Path ca = PathUtils.get(ReindexRestClientSslTests.class.getResource("ca.pem").toURI());
final List<String> caList = Collections.singletonList(ca.toString());
final X509ExtendedTrustManager trustManager = new PemTrustConfig(caList, configPath).createTrustManager();
sslContext.init(new KeyManager[] { keyManager }, new TrustManager[] { trustManager }, null);
return sslContext;
}
public void testClientFailsWithUntrustedCertificate() throws IOException {
final List<Thread> threads = new ArrayList<>();
final Settings.Builder builder = Settings.builder().put("path.home", createTempDir());
final Settings settings = builder.build();
final Environment environment = TestEnvironment.newEnvironment(settings);
final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class));
try (RestClient client = Reindexer.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) {
if (inFipsJvm()) {
// Bouncy Castle throws a different exception
IOException exception = expectThrows(IOException.class, () -> client.performRequest(new Request("GET", "/")));
assertThat(exception.getCause(), Matchers.instanceOf(javax.net.ssl.SSLException.class));
} else {
expectThrows(SSLHandshakeException.class, () -> client.performRequest(new Request("GET", "/")));
}
}
}
public void testClientSucceedsWithCertificateAuthorities() throws IOException {
final List<Thread> threads = new ArrayList<>();
final Path ca = getDataPath("ca.pem");
final Settings.Builder builder = Settings.builder()
.put("path.home", createTempDir())
.putList("reindex.ssl.certificate_authorities", ca.toString());
final Settings settings = builder.build();
final Environment environment = TestEnvironment.newEnvironment(settings);
final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class));
try (RestClient client = Reindexer.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) {
final Response response = client.performRequest(new Request("GET", "/"));
assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200));
}
}
public void testClientSucceedsWithVerificationDisabled() throws IOException {
assumeFalse("Cannot disable verification in FIPS JVM", inFipsJvm());
final List<Thread> threads = new ArrayList<>();
final Settings.Builder builder = Settings.builder().put("path.home", createTempDir()).put("reindex.ssl.verification_mode", "NONE");
final Settings settings = builder.build();
final Environment environment = TestEnvironment.newEnvironment(settings);
final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class));
try (RestClient client = Reindexer.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) {
final Response response = client.performRequest(new Request("GET", "/"));
assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200));
}
}
public void testClientPassesClientCertificate() throws IOException {
final List<Thread> threads = new ArrayList<>();
final Path ca = getDataPath("ca.pem");
final Path cert = getDataPath("client/client.crt");
final Path key = getDataPath("client/client.key");
final Settings.Builder builder = Settings.builder()
.put("path.home", createTempDir())
.putList("reindex.ssl.certificate_authorities", ca.toString())
.put("reindex.ssl.certificate", cert)
.put("reindex.ssl.key", key)
.put("reindex.ssl.key_passphrase", "client-password");
final Settings settings = builder.build();
AtomicReference<Certificate[]> clientCertificates = new AtomicReference<>();
handler = https -> {
try {
clientCertificates.set(https.getSSLSession().getPeerCertificates());
} catch (SSLPeerUnverifiedException e) {
logger.warn("Client did not provide certificates", e);
clientCertificates.set(null);
}
};
final Environment environment = TestEnvironment.newEnvironment(settings);
final ReindexSslConfig ssl = new ReindexSslConfig(settings, environment, mock(ResourceWatcherService.class));
try (RestClient client = Reindexer.buildRestClient(getRemoteInfo(), ssl, 1L, threads)) {
final Response response = client.performRequest(new Request("GET", "/"));
assertThat(response.getStatusLine().getStatusCode(), Matchers.is(200));
final Certificate[] certs = clientCertificates.get();
assertThat(certs, Matchers.notNullValue());
assertThat(certs, Matchers.arrayWithSize(1));
assertThat(certs[0], Matchers.instanceOf(X509Certificate.class));
final X509Certificate clientCert = (X509Certificate) certs[0];
assertThat(clientCert.getSubjectX500Principal().getName(), Matchers.is("CN=client"));
assertThat(clientCert.getIssuerX500Principal().getName(), Matchers.is("CN=Elastic Certificate Tool Autogenerated CA"));
}
}
private RemoteInfo getRemoteInfo() {
return new RemoteInfo(
"https",
server.getAddress().getHostName(),
server.getAddress().getPort(),
"/",
new BytesArray("{\"match_all\":{}}"),
"user",
new SecureString("password".toCharArray()),
Collections.emptyMap(),
RemoteInfo.DEFAULT_SOCKET_TIMEOUT,
RemoteInfo.DEFAULT_CONNECT_TIMEOUT
);
}
@SuppressForbidden(reason = "use http server")
private static | ReindexRestClientSslTests |
java | google__dagger | javatests/dagger/hilt/android/processor/internal/aggregateddeps/TestInstallInTest.java | {
"start": 6632,
"end": 7194
} | interface ____ {}");
Source testInstallInEntryPoint =
HiltCompilerTests.javaSource(
"test.TestInstallInEntryPoint",
"package test;",
"",
"import dagger.hilt.EntryPoint;",
"import dagger.hilt.components.SingletonComponent;",
"import dagger.hilt.testing.TestInstallIn;",
"",
"@EntryPoint",
"@TestInstallIn(",
" components = SingletonComponent.class,",
" replaces = InstallInModule.class)",
" | InstallInModule |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/CommitLog.java | {
"start": 68929,
"end": 69050
} | class ____ extends ServiceThread {
protected static final int RETRY_TIMES_OVER = 10;
}
| FlushCommitLogService |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/IntervalJoinITCase.java | {
"start": 2376,
"end": 8307
} | class ____ {
private static List<String> testResults;
@BeforeEach
public void setup() {
testResults = new ArrayList<>();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
public void testCanJoinOverSameKey(boolean enableAsyncState) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
KeyedStream<Tuple2<String, Integer>, String> streamOne =
env.fromData(
Tuple2.of("key", 0),
Tuple2.of("key", 1),
Tuple2.of("key", 2),
Tuple2.of("key", 3),
Tuple2.of("key", 4),
Tuple2.of("key", 5))
.assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor())
.keyBy(new Tuple2KeyExtractor());
KeyedStream<Tuple2<String, Integer>, String> streamTwo =
env.fromData(
Tuple2.of("key", 0),
Tuple2.of("key", 1),
Tuple2.of("key", 2),
Tuple2.of("key", 3),
Tuple2.of("key", 4),
Tuple2.of("key", 5))
.assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor())
.keyBy(new Tuple2KeyExtractor());
if (enableAsyncState) {
streamOne.enableAsyncState();
streamTwo.enableAsyncState();
configAsyncState(env);
}
streamOne
.intervalJoin(streamTwo)
.between(Duration.ofMillis(0), Duration.ofMillis(0))
.process(
new ProcessJoinFunction<
Tuple2<String, Integer>, Tuple2<String, Integer>, String>() {
@Override
public void processElement(
Tuple2<String, Integer> left,
Tuple2<String, Integer> right,
Context ctx,
Collector<String> out)
throws Exception {
out.collect(left + ":" + right);
}
})
.addSink(new ResultSink());
env.execute();
expectInAnyOrder(
"(key,0):(key,0)",
"(key,1):(key,1)",
"(key,2):(key,2)",
"(key,3):(key,3)",
"(key,4):(key,4)",
"(key,5):(key,5)");
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
public void testJoinsCorrectlyWithMultipleKeys(boolean enableAsyncState) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
KeyedStream<Tuple2<String, Integer>, String> streamOne =
env.fromData(
Tuple2.of("key1", 0),
Tuple2.of("key2", 1),
Tuple2.of("key1", 2),
Tuple2.of("key2", 3),
Tuple2.of("key1", 4),
Tuple2.of("key2", 5))
.assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor())
.keyBy(new Tuple2KeyExtractor());
KeyedStream<Tuple2<String, Integer>, String> streamTwo =
env.fromData(
Tuple2.of("key1", 0),
Tuple2.of("key2", 1),
Tuple2.of("key1", 2),
Tuple2.of("key2", 3),
Tuple2.of("key1", 4),
Tuple2.of("key2", 5))
.assignTimestampsAndWatermarks(new AscendingTuple2TimestampExtractor())
.keyBy(new Tuple2KeyExtractor());
if (enableAsyncState) {
streamOne.enableAsyncState();
streamTwo.enableAsyncState();
configAsyncState(env);
}
streamOne
.intervalJoin(streamTwo)
// if it were not keyed then the boundaries [0; 1] would lead to the pairs (1, 1),
// (1, 2), (2, 2), (2, 3)..., so that this is not happening is what we are testing
// here
.between(Duration.ofMillis(0), Duration.ofMillis(1))
.process(new CombineToStringJoinFunction())
.addSink(new ResultSink());
env.execute();
expectInAnyOrder(
"(key1,0):(key1,0)",
"(key2,1):(key2,1)",
"(key1,2):(key1,2)",
"(key2,3):(key2,3)",
"(key1,4):(key1,4)",
"(key2,5):(key2,5)");
}
private DataStream<Tuple2<String, Integer>> buildSourceStream(
final StreamExecutionEnvironment env, final SourceConsumer sourceConsumer) {
return env.addSource(
new SourceFunction<Tuple2<String, Integer>>() {
@Override
public void run(SourceContext<Tuple2<String, Integer>> ctx) {
sourceConsumer.accept(ctx);
}
@Override
public void cancel() {
// do nothing
}
});
}
// Ensure consumer func is serializable
private | IntervalJoinITCase |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/StreamsGroupSubtopologyDescription.java | {
"start": 1118,
"end": 4719
} | class ____ {
private final String subtopologyId;
private final List<String> sourceTopics;
private final List<String> repartitionSinkTopics;
private final Map<String, TopicInfo> stateChangelogTopics;
private final Map<String, TopicInfo> repartitionSourceTopics;
public StreamsGroupSubtopologyDescription(
final String subtopologyId,
final List<String> sourceTopics,
final List<String> repartitionSinkTopics,
final Map<String, TopicInfo> stateChangelogTopics,
final Map<String, TopicInfo> repartitionSourceTopics
) {
this.subtopologyId = Objects.requireNonNull(subtopologyId, "subtopologyId must be non-null");
this.sourceTopics = Objects.requireNonNull(sourceTopics, "sourceTopics must be non-null");
this.repartitionSinkTopics = Objects.requireNonNull(repartitionSinkTopics, "repartitionSinkTopics must be non-null");
this.stateChangelogTopics = Objects.requireNonNull(stateChangelogTopics, "stateChangelogTopics must be non-null");
this.repartitionSourceTopics = Objects.requireNonNull(repartitionSourceTopics, "repartitionSourceTopics must be non-null");
}
/**
* String to uniquely identify the subtopology.
*/
public String subtopologyId() {
return subtopologyId;
}
/**
* The topics the topology reads from.
*/
public List<String> sourceTopics() {
return List.copyOf(sourceTopics);
}
/**
* The repartition topics the topology writes to.
*/
public List<String> repartitionSinkTopics() {
return List.copyOf(repartitionSinkTopics);
}
/**
* The set of state changelog topics associated with this subtopology.
*/
public Map<String, TopicInfo> stateChangelogTopics() {
return Map.copyOf(stateChangelogTopics);
}
/**
* The set of source topics that are internally created repartition topics.
*/
public Map<String, TopicInfo> repartitionSourceTopics() {
return Map.copyOf(repartitionSourceTopics);
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final StreamsGroupSubtopologyDescription that = (StreamsGroupSubtopologyDescription) o;
return Objects.equals(subtopologyId, that.subtopologyId)
&& Objects.equals(sourceTopics, that.sourceTopics)
&& Objects.equals(repartitionSinkTopics, that.repartitionSinkTopics)
&& Objects.equals(stateChangelogTopics, that.stateChangelogTopics)
&& Objects.equals(repartitionSourceTopics, that.repartitionSourceTopics);
}
@Override
public int hashCode() {
return Objects.hash(
subtopologyId,
sourceTopics,
repartitionSinkTopics,
stateChangelogTopics,
repartitionSourceTopics
);
}
@Override
public String toString() {
return "(" +
"subtopologyId='" + subtopologyId + '\'' +
", sourceTopics=" + sourceTopics +
", repartitionSinkTopics=" + repartitionSinkTopics +
", stateChangelogTopics=" + stateChangelogTopics +
", repartitionSourceTopics=" + repartitionSourceTopics +
')';
}
/**
* Information about a topic. These configs reflect what is required by the topology, but may differ from the current state on the
* broker.
*/
public static | StreamsGroupSubtopologyDescription |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java | {
"start": 1735,
"end": 4883
} | class ____ {
// Using /test/build/data/tmp directory to store temporary files
static final String HOSTSTESTDIR = GenericTestUtils.getTestDir()
.getAbsolutePath();
private final File newFile = new File(HOSTSTESTDIR, "dfs.hosts.new.json");
static final String TESTCACHEDATADIR =
System.getProperty("test.cache.data", "build/test/cache");
private final File jsonFile = new File(TESTCACHEDATADIR, "dfs.hosts.json");
private final File legacyFile =
new File(TESTCACHEDATADIR, "legacy.dfs.hosts.json");
@Mock
private Callable<DatanodeAdminProperties[]> callable;
@BeforeEach
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
}
@AfterEach
public void tearDown() throws Exception {
// Delete test file after running tests
newFile.delete();
}
/*
* Load the legacy test json file
*/
@Test
public void testLoadLegacyJsonFile() throws Exception {
DatanodeAdminProperties[] all =
CombinedHostsFileReader.readFile(legacyFile.getAbsolutePath());
assertEquals(7, all.length);
}
/*
* Load the test json file
*/
@Test
public void testLoadExistingJsonFile() throws Exception {
DatanodeAdminProperties[] all =
CombinedHostsFileReader.readFile(jsonFile.getAbsolutePath());
assertEquals(7, all.length);
}
/*
* Test empty json config file
*/
@Test
public void testEmptyCombinedHostsFileReader() throws Exception {
FileWriter hosts = new FileWriter(newFile);
hosts.write("");
hosts.close();
DatanodeAdminProperties[] all =
CombinedHostsFileReader.readFile(newFile.getAbsolutePath());
assertEquals(0, all.length);
}
/*
* When timeout is enabled, test for success when reading file within timeout
* limits
*/
@Test
public void testReadFileWithTimeoutSuccess() throws Exception {
DatanodeAdminProperties[] all = CombinedHostsFileReader.readFileWithTimeout(
jsonFile.getAbsolutePath(), 1000);
assertEquals(7, all.length);
}
/*
* When timeout is enabled, test for IOException when reading file exceeds
* timeout limits
*/
@Test
public void testReadFileWithTimeoutTimeoutException() throws Exception {
when(callable.call()).thenAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Thread.sleep(2000);
return null;
}
});
assertThrows(IOException.class, () -> {
CombinedHostsFileReader.readFileWithTimeout(
jsonFile.getAbsolutePath(), 1);
});
}
/*
* When timeout is enabled, test for IOException when execution is interrupted
*/
@Order(1)
@Test
public void testReadFileWithTimeoutInterruptedException() throws Exception {
when(callable.call()).thenAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
throw new InterruptedException();
}
});
assertThrows(IOException.class, () -> {
CombinedHostsFileReader.readFileWithTimeout(
jsonFile.getAbsolutePath(), 1);
});
}
}
| TestCombinedHostsFileReader |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/SubqueryTests.java | {
"start": 2548,
"end": 61787
} | class ____ extends AbstractStatementParserTests {
/**
* UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_UnresolvedRelation[]
*/
public void testIndexPatternWithSubquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainQueryIndexPattern = randomIndexPatterns();
var subqueryIndexPattern = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {})
""", mainQueryIndexPattern, subqueryIndexPattern);
LogicalPlan plan = statement(query);
UnionAll unionAll = as(plan, UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(mainQueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery = as(children.get(1), Subquery.class);
unresolvedRelation = as(subquery.plan(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
}
/**
* Subqueries in the FROM command with all the processing commands in the main query.
* All processing commands are supported in the main query when subqueries exist in the
* FROM command. With an exception on FORK, the grammar or parser doesn't block FORK,
* however nested FORK will error out in the analysis or logical planning phase. We are hoping
* to lift this restriction in the future, so it is not blocked in the grammar.
*
* Rerank[test_reranker[KEYWORD],war and peace[KEYWORD],[?title AS title#45],?_score]
* \_Sample[0.5[DOUBLE]]
* \_Completion[test_completion[KEYWORD],?prompt,?completion_output]
* \_ChangePoint[?count,?@timestamp,type{r}#39,pvalue{r}#40]
* \_Enrich[ANY,clientip_policy[KEYWORD],?client_ip,null,{},[?env]]
* \_LookupJoin[LEFT,[?n],[?n],false,null]
* |_MvExpand[?m,?m]
* | \_Rename[[?k AS l#29]]
* | \_Keep[[?j]]
* | \_Drop[[?i]]
* | \_Limit[10[INTEGER],false]
* | \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* | \_Grok[?h,Parser[pattern=%{WORD:word} %{NUMBER:number},
* grok=org.elasticsearch.grok.Grok@710201ab],[number{r}#22, word{r}#23]]
* | \_Dissect[?g,Parser[pattern=%{b} %{c}, appendSeparator=,
* parser=org.elasticsearch.dissect.DissectParser@6bd8533a],[b{r}#16, c{r}#17]]
* | \_InlineStats[]
* | \_Aggregate[[?f],[?MAX[?e] AS max_e#14, ?f]]
* | \_Aggregate[[?e],[?COUNT[*] AS cnt#11, ?e]]
* | \_Fork[[]]
* | |_Eval[[fork1[KEYWORD] AS _fork#7]]
* | | \_Filter[?c > 100[INTEGER]]
* | | \_Eval[[?a * 2[INTEGER] AS b#5]]
* | | \_Filter[?a > 10[INTEGER]]
* | | \_UnionAll[[]]
* | | |_UnresolvedRelation[]
* | | \_Subquery[]
* | | \_Filter[?a < 100[INTEGER]]
* | | \_UnresolvedRelation[]
* | \_Eval[[fork2[KEYWORD] AS _fork#7]]
* | \_Filter[?d > 200[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#5]]
* | \_Filter[?a < 10[INTEGER]]
* | \_UnionAll[[]]
* | |_UnresolvedRelation[]
* | \_Subquery[]
* | \_Filter[?a < 100[INTEGER]]
* | \_UnresolvedRelation[]
* \_UnresolvedRelation[lookup_index]
*/
public void testSubqueryWithAllProcessingCommandsInMainquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
// remote cluster does not support COMPLETION or RERANK
var mainQueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var subqueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var joinIndexPattern = "lookup_index"; // randomIndexPatterns may generate on as index pattern, it collides with the ON token
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {} | WHERE a < 100)
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| INLINE STATS max_e = MAX(e) BY f
| DISSECT g "%{b} %{c}"
| GROK h "%{WORD:word} %{NUMBER:number}"
| SORT cnt desc
| LIMIT 10
| DROP i
| KEEP j
| RENAME k AS l
| MV_EXPAND m
| LOOKUP JOIN {} ON n
| ENRICH clientip_policy ON client_ip WITH env
| CHANGE_POINT count ON @timestamp AS type, pvalue
| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
| SAMPLE 0.5
| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
""", mainQueryIndexPattern, subqueryIndexPattern, joinIndexPattern);
LogicalPlan plan = statement(query);
Rerank rerank = as(plan, Rerank.class);
Sample sample = as(rerank.child(), Sample.class);
Completion completion = as(sample.child(), Completion.class);
ChangePoint changePoint = as(completion.child(), ChangePoint.class);
Enrich enrich = as(changePoint.child(), Enrich.class);
LookupJoin lookupJoin = as(enrich.child(), LookupJoin.class);
UnresolvedRelation joinRelation = as(lookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
MvExpand mvExpand = as(lookupJoin.left(), MvExpand.class);
Rename rename = as(mvExpand.child(), Rename.class);
Keep keep = as(rename.child(), Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Grok grok = as(orderBy.child(), Grok.class);
Dissect dissect = as(grok.child(), Dissect.class);
InlineStats inlineStats = as(dissect.child(), InlineStats.class);
Aggregate aggregate = as(inlineStats.child(), Aggregate.class);
aggregate = as(aggregate.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
UnionAll unionAll = as(filter.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(mainQueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
// subquery
Subquery subquery = as(children.get(1), Subquery.class);
Filter subqueryFilter = as(subquery.plan(), Filter.class);
LessThan lessThan = as(subqueryFilter.condition(), LessThan.class);
Attribute left = as(lessThan.left(), Attribute.class);
assertEquals("a", left.name());
Literal right = as(lessThan.right(), Literal.class);
assertEquals(100, right.value());
UnresolvedRelation subqueryRelation = as(subqueryFilter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), subqueryRelation.indexPattern().indexPattern());
}
}
/**
* Subqueries in the FROM command with all the processing commands in the subquery query.
* The grammar allows all processing commands inside the subquery. With an exception on FORK,
* the grammar or parser doesn't block FORK, however nested FORK will error out in the analysis
* or logical planning phase. We are hoping to lift this restriction in the future, so it is not blocked
* in the grammar.
*
* UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Rerank[test_reranker[KEYWORD],war and peace[KEYWORD],[?title AS title#30],?_score]
* \_Sample[0.5[DOUBLE]]
* \_Completion[test_completion[KEYWORD],?prompt,?completion_output]
* \_ChangePoint[?count,?@timestamp,type{r}#24,pvalue{r}#25]
* \_Enrich[ANY,clientip_policy[KEYWORD],?client_ip,null,{},[?env]]
* \_LookupJoin[LEFT,[?n],[?n],false,null]
* |_MvExpand[?m,?m]
* | \_Rename[[?k AS l#17]]
* | \_Keep[[?j]]
* | \_Drop[[?i]]
* | \_Limit[10[INTEGER],false]
* | \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* | \_Grok[?h,Parser[pattern=%{WORD:word} %{NUMBER:number},
* grok=org.elasticsearch.grok.Grok@2d54cab4],[number{r}#41, word{r}#42]]
* | \_Dissect[?g,Parser[pattern=%{b} %{c}, appendSeparator=,
* parser=org.elasticsearch.dissect.DissectParser@5ca49d89],[b{r}#35, c{r}#36]]
* | \_InlineStats[]
* | \_Aggregate[[?f],[?MAX[?e] AS max_e#10, ?f]]
* | \_Aggregate[[?e],[?COUNT[*] AS cnt#7, ?e]]
* | \_Fork[[]]
* | |_Eval[[fork1[KEYWORD] AS _fork#3]]
* | | \_Filter[?c < 100[INTEGER]]
* | | \_Eval[[?a * 2[INTEGER] AS b#34]]
* | | \_Filter[?a > 10[INTEGER]]
* | | \_UnresolvedRelation[]
* | \_Eval[[fork2[KEYWORD] AS _fork#3]]
* | \_Filter[?d > 200[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#34]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnresolvedRelation[]
* \_UnresolvedRelation[lookup_index]
*/
public void testWithSubqueryWithProcessingCommandsInSubquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainQueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var subqueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var joinIndexPattern = "lookup_index";
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| INLINE STATS max_e = MAX(e) BY f
| DISSECT g "%{b} %{c}"
| GROK h "%{WORD:word} %{NUMBER:number}"
| SORT cnt desc
| LIMIT 10
| DROP i
| KEEP j
| RENAME k AS l
| MV_EXPAND m
| LOOKUP JOIN {} ON n
| ENRICH clientip_policy ON client_ip WITH env
| CHANGE_POINT count ON @timestamp AS type, pvalue
| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
| SAMPLE 0.5
| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" })
""", mainQueryIndexPattern, subqueryIndexPattern, joinIndexPattern);
LogicalPlan plan = statement(query);
UnionAll unionAll = as(plan, UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(mainQueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
// subquery
Subquery subquery = as(children.get(1), Subquery.class);
Rerank rerank = as(subquery.plan(), Rerank.class);
Sample sample = as(rerank.child(), Sample.class);
Completion completion = as(sample.child(), Completion.class);
ChangePoint changePoint = as(completion.child(), ChangePoint.class);
Enrich enrich = as(changePoint.child(), Enrich.class);
LookupJoin lookupJoin = as(enrich.child(), LookupJoin.class);
UnresolvedRelation joinRelation = as(lookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
MvExpand mvExpand = as(lookupJoin.left(), MvExpand.class);
Rename rename = as(mvExpand.child(), Rename.class);
Keep keep = as(rename.child(), Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Grok grok = as(orderBy.child(), Grok.class);
Dissect dissect = as(grok.child(), Dissect.class);
InlineStats inlineStats = as(dissect.child(), InlineStats.class);
Aggregate aggregate = as(inlineStats.child(), Aggregate.class);
aggregate = as(aggregate.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
UnresolvedRelation subqueryRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), subqueryRelation.indexPattern().indexPattern());
}
}
/**
* A combination of the two previous tests with processing commands in both the subquery and main query.
* Plan string is skipped as it is too long, and it should be the combination of the above two tests..
*/
public void testSubqueryWithProcessingCommandsInSubqueryAndMainquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainQueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var subqueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var joinIndexPattern = "lookup_index";
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| INLINE STATS max_e = MAX(e) BY f
| DISSECT g "%{b} %{c}"
| GROK h "%{WORD:word} %{NUMBER:number}"
| SORT cnt desc
| LIMIT 10
| DROP i
| KEEP j
| RENAME k AS l
| MV_EXPAND m
| LOOKUP JOIN {} ON n
| ENRICH clientip_policy ON client_ip WITH env
| CHANGE_POINT count ON @timestamp AS type, pvalue
| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
| SAMPLE 0.5
| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" })
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| INLINE STATS max_e = MAX(e) BY f
| DISSECT g "%{b} %{c}"
| GROK h "%{WORD:word} %{NUMBER:number}"
| SORT cnt desc
| LIMIT 10
| DROP i
| KEEP j
| RENAME k AS l
| MV_EXPAND m
| LOOKUP JOIN {} ON n
| ENRICH clientip_policy ON client_ip WITH env
| CHANGE_POINT count ON @timestamp AS type, pvalue
| COMPLETION completion_output = prompt WITH { "inference_id" : "test_completion" }
| SAMPLE 0.5
| RERANK "war and peace" ON title WITH { "inference_id" : "test_reranker" }
""", mainQueryIndexPattern, subqueryIndexPattern, joinIndexPattern, joinIndexPattern);
LogicalPlan plan = statement(query);
Rerank rerank = as(plan, Rerank.class);
Sample sample = as(rerank.child(), Sample.class);
Completion completion = as(sample.child(), Completion.class);
ChangePoint changePoint = as(completion.child(), ChangePoint.class);
Enrich enrich = as(changePoint.child(), Enrich.class);
LookupJoin lookupJoin = as(enrich.child(), LookupJoin.class);
UnresolvedRelation joinRelation = as(lookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
MvExpand mvExpand = as(lookupJoin.left(), MvExpand.class);
Rename rename = as(mvExpand.child(), Rename.class);
Keep keep = as(rename.child(), Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Grok grok = as(orderBy.child(), Grok.class);
Dissect dissect = as(grok.child(), Dissect.class);
InlineStats inlineStats = as(dissect.child(), InlineStats.class);
Aggregate aggregate = as(inlineStats.child(), Aggregate.class);
aggregate = as(aggregate.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
UnionAll unionAll = as(filter.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(mainQueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
// subquery
Subquery subquery = as(children.get(1), Subquery.class);
rerank = as(subquery.plan(), Rerank.class);
sample = as(rerank.child(), Sample.class);
completion = as(sample.child(), Completion.class);
changePoint = as(completion.child(), ChangePoint.class);
enrich = as(changePoint.child(), Enrich.class);
lookupJoin = as(enrich.child(), LookupJoin.class);
joinRelation = as(lookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
mvExpand = as(lookupJoin.left(), MvExpand.class);
rename = as(mvExpand.child(), Rename.class);
keep = as(rename.child(), Keep.class);
drop = as(keep.child(), Drop.class);
limit = as(drop.child(), Limit.class);
orderBy = as(limit.child(), OrderBy.class);
grok = as(orderBy.child(), Grok.class);
dissect = as(grok.child(), Dissect.class);
inlineStats = as(dissect.child(), InlineStats.class);
aggregate = as(inlineStats.child(), Aggregate.class);
aggregate = as(aggregate.child(), Aggregate.class);
fork = as(aggregate.child(), Fork.class);
forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEvalSubquery : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
forkFilter = as(forkEvalSubquery.child(), Filter.class);
eval = as(forkFilter.child(), Eval.class);
filter = as(eval.child(), Filter.class);
UnresolvedRelation subqueryRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), subqueryRelation.indexPattern().indexPattern());
}
}
}
/**
* Verify there is no parsing error if the subquery ends with different modes.
*/
public void testSubqueryEndsWithProcessingCommandsInDifferentMode() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
List<String> processingCommandInDifferentMode = List.of(
"INLINE STATS max_e = MAX(e) BY f", // inline mode, expression mode
"DISSECT g \"%{b} %{c}\"", // expression mode
"LOOKUP JOIN index1 ON n", // join mode
"ENRICH clientip_policy ON client_ip WITH env", // enrich mode
"CHANGE_POINT count ON @timestamp AS type, pvalue", // change_point mode
"FORK (WHERE c < 100) (WHERE d > 200)", // fork mode
"MV_EXPAND m", // mv_expand mode
"RENAME k AS l", // rename mode
"DROP i" // project mode
);
var mainQueryIndexPattern = randomIndexPatterns();
var subqueryIndexPattern = randomIndexPatterns();
for (String processingCommand : processingCommandInDifferentMode) {
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}
| {})
| WHERE a > 10
""", mainQueryIndexPattern, subqueryIndexPattern, processingCommand);
LogicalPlan plan = statement(query);
Filter filter = as(plan, Filter.class);
UnionAll unionAll = as(filter.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(mainQueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
// subquery
Subquery subquery = as(children.get(1), Subquery.class);
}
}
/**
* UnionAll[[]]
* |_Subquery[]
* | \_UnresolvedRelation[]
* |_Subquery[]
* | \_UnresolvedRelation[]
* \_Subquery[]
* \_UnresolvedRelation[]
*/
public void testSubqueryOnly() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var subqueryIndexPattern1 = randomIndexPatterns();
var subqueryIndexPattern2 = randomIndexPatterns();
var subqueryIndexPattern3 = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM (FROM {}), (FROM {}), (FROM {})
""", subqueryIndexPattern1, subqueryIndexPattern2, subqueryIndexPattern3);
LogicalPlan plan = statement(query);
UnionAll unionAll = as(plan, UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(3, children.size());
Subquery subquery = as(children.get(0), Subquery.class);
UnresolvedRelation unresolvedRelation = as(subquery.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern1), unresolvedRelation.indexPattern().indexPattern());
subquery = as(children.get(1), Subquery.class);
unresolvedRelation = as(subquery.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern2), unresolvedRelation.indexPattern().indexPattern());
subquery = as(children.get(2), Subquery.class);
unresolvedRelation = as(subquery.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern3), unresolvedRelation.indexPattern().indexPattern());
}
/**
* If the FROM command contains only one subquery, the subquery is merged into an index pattern.
*
* Keep[[?g]]
* \_Drop[[?f]]
* \_Limit[10[INTEGER],false]
* \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#10, ?e]]
* \_Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#6]]
* | \_Filter[?c < 100[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#4]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnresolvedRelation[]
* \_Eval[[fork2[KEYWORD] AS _fork#6]]
* \_Filter[?d > 200[INTEGER]]
* \_Eval[[?a * 2[INTEGER] AS b#4]]
* \_Filter[?a > 10[INTEGER]]
* \_UnresolvedRelation[]
*/
public void testSubqueryOnlyWithProcessingCommandInMainquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var subqueryIndexPattern = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM (FROM {})
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g
""", subqueryIndexPattern);
LogicalPlan plan = statement(query);
Keep keep = as(plan, Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Aggregate aggregate = as(orderBy.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
UnresolvedRelation unresolvedRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), unresolvedRelation.indexPattern().indexPattern());
}
}
/**
* Keep[[?g]]
* \_Drop[[?f]]
* \_Limit[10[INTEGER],false]
* \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#7, ?e]]
* \_Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#3]]
* | \_Filter[?c < 100[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#13]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnresolvedRelation[]
* \_Eval[[fork2[KEYWORD] AS _fork#3]]
* \_Filter[?d > 200[INTEGER]]
* \_Eval[[?a * 2[INTEGER] AS b#13]]
* \_Filter[?a > 10[INTEGER]]
* \_UnresolvedRelation[]
*/
public void testSubqueryOnlyWithProcessingCommandsInSubquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var subqueryIndexPattern = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM (FROM {}
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g)
""", subqueryIndexPattern);
LogicalPlan plan = statement(query);
Keep keep = as(plan, Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Aggregate aggregate = as(orderBy.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
UnresolvedRelation subqueryRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), subqueryRelation.indexPattern().indexPattern());
}
}
/**
* If the FROM command contains only a subquery, the subquery is merged into an index pattern.
*
* Keep[[?g]]
* \_Drop[[?f]]
* \_Limit[10[INTEGER],false]
* \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#23, ?e]]
* \_Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#19]]
* | \_Filter[?c < 100[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#17]]
* | \_Filter[?a > 10[INTEGER]]
* | \_Keep[[?g]]
* | \_Drop[[?f]]
* | \_Limit[10[INTEGER],false]
* | \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* | \_Aggregate[[?e],[?COUNT[*] AS cnt#7, ?e]]
* | \_Fork[[]]
* | |_Eval[[fork1[KEYWORD] AS _fork#3]]
* | | \_Filter[?c < 100[INTEGER]]
* | | \_Eval[[?a * 2[INTEGER] AS b#13]]
* | | \_Filter[?a > 10[INTEGER]]
* | | \_UnresolvedRelation[]
* | \_Eval[[fork2[KEYWORD] AS _fork#3]]
* | \_Filter[?d > 200[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#13]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnresolvedRelation[]
* \_Eval[[fork2[KEYWORD] AS _fork#19]]
* \_Filter[?d > 200[INTEGER]]
* \_Eval[[?a * 2[INTEGER] AS b#17]]
* \_Filter[?a > 10[INTEGER]]
* \_Keep[[?g]]
* \_Drop[[?f]]
* \_Limit[10[INTEGER],false]
* \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#7, ?e]]
* \_Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#3]]
* | \_Filter[?c < 100[INTEGER]]
* | \_Eval[[?a * 2[INTEGER] AS b#13]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnresolvedRelation[]
* \_Eval[[fork2[KEYWORD] AS _fork#3]]
* \_Filter[?d > 200[INTEGER]]
* \_Eval[[?a * 2[INTEGER] AS b#13]]
* \_Filter[?a > 10[INTEGER]]
* \_UnresolvedRelation[]
*/
public void testSubqueryOnlyWithProcessingCommandsInSubqueryAndMainquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var subqueryIndexPattern = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM (FROM {}
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g)
| WHERE a > 10
| EVAL b = a * 2
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g
""", subqueryIndexPattern);
LogicalPlan plan = statement(query);
Keep keep = as(plan, Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Aggregate aggregate = as(orderBy.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
Eval eval = as(forkFilter.child(), Eval.class);
Filter filter = as(eval.child(), Filter.class);
Keep subqueryKeep = as(filter.child(), Keep.class);
Drop subqueryDrop = as(subqueryKeep.child(), Drop.class);
Limit subqueryLimit = as(subqueryDrop.child(), Limit.class);
OrderBy subqueryOrderby = as(subqueryLimit.child(), OrderBy.class);
Aggregate subqueryAggregate = as(subqueryOrderby.child(), Aggregate.class);
Fork subqueryFork = as(subqueryAggregate.child(), Fork.class);
List<LogicalPlan> subqueryForkChildren = subqueryFork.children();
assertEquals(2, forkChildren.size());
for (Eval subqueryForkEval : List.of(
as(subqueryForkChildren.get(0), Eval.class),
as(subqueryForkChildren.get(1), Eval.class)
)) {
Filter subqueryForkFilter = as(subqueryForkEval.child(), Filter.class);
Eval subqueryEval = as(subqueryForkFilter.child(), Eval.class);
Filter subqueryFilter = as(subqueryEval.child(), Filter.class);
UnresolvedRelation subqueryRelation = as(subqueryFilter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern), subqueryRelation.indexPattern().indexPattern());
}
}
}
/**
* UnionAll[[]]
* |_UnresolvedRelation[]
* |_Subquery[]
* | \_UnresolvedRelation[]
* \_Subquery[]
* \_UnresolvedRelation[]
*/
public void testMultipleMixedIndexPatternsAndSubqueries() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var indexPattern1 = randomIndexPatterns();
var indexPattern2 = randomIndexPatterns();
var indexPattern3 = randomIndexPatterns();
var indexPattern4 = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}), {}, (FROM {})
""", indexPattern1, indexPattern2, indexPattern3, indexPattern4);
LogicalPlan plan = statement(query);
UnionAll unionAll = as(plan, UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(3, children.size());
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(
unquoteIndexPattern(indexPattern1) + "," + unquoteIndexPattern(indexPattern3),
unresolvedRelation.indexPattern().indexPattern()
);
Subquery subquery1 = as(children.get(1), Subquery.class);
unresolvedRelation = as(subquery1.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern2), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery2 = as(children.get(2), Subquery.class);
unresolvedRelation = as(subquery2.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern4), unresolvedRelation.indexPattern().indexPattern());
}
/**
* Keep[[?g]]
* \_Drop[[?f]]
* \_Limit[10[INTEGER],false]
* \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#25, ?e]]
* \_Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#21]]
* | \_Filter[?c < 100[INTEGER]]
* | \_LookupJoin[LEFT,[?c],[?c],true,null]
* | |_Eval[[?a * 2[INTEGER] AS b#18]]
* | | \_Filter[?a > 10[INTEGER]]
* | | \_UnionAll[[]]
* | | |_UnresolvedRelation[]
* | | |_Subquery[]
* | | | \_Keep[[?g]]
* | | | \_Drop[[?f]]
* | | | \_Limit[10[INTEGER],false]
* | | | \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* | | | \_Aggregate[[?e],[?COUNT[*] AS cnt#8, ?e]]
* | | | \_Fork[[]]
* | | | |_Eval[[fork1[KEYWORD] AS _fork#4]]
* | | | | \_Filter[?c < 100[INTEGER]]
* | | | | \_LookupJoin[LEFT,[?c],[?c],true,null]
* | | | | |_Eval[[?a * 2[INTEGER] AS b#14]]
* | | | | | \_Filter[?a > 10[INTEGER]]
* | | | | | \_UnresolvedRelation[]
* | | | | \_UnresolvedRelation[lookup_index]
* | | | \_Eval[[fork2[KEYWORD] AS _fork#4]]
* | | | \_Filter[?d > 200[INTEGER]]
* | | | \_LookupJoin[LEFT,[?c],[?c],true,null]
* | | | |_Eval[[?a * 2[INTEGER] AS b#14]]
* | | | | \_Filter[?a > 10[INTEGER]]
* | | | | \_UnresolvedRelation[]
* | | | \_UnresolvedRelation[lookup_index]
* | | \_Subquery[]
* | | \_UnresolvedRelation[]
* | \_UnresolvedRelation[lookup_index]
* \_Eval[[fork2[KEYWORD] AS _fork#21]]
* \_Filter[?d > 200[INTEGER]]
* \_LookupJoin[LEFT,[?c],[?c],true,null]
* |_Eval[[?a * 2[INTEGER] AS b#18]]
* | \_Filter[?a > 10[INTEGER]]
* | \_UnionAll[[]]
* | |_UnresolvedRelation[]
* | |_Subquery[]
* | | \_Keep[[?g]]
* | | \_Drop[[?f]]
* | | \_Limit[10[INTEGER],false]
* | | \_OrderBy[[Order[?cnt,DESC,FIRST]]]
* | | \_Aggregate[[?e],[?COUNT[*] AS cnt#8, ?e]]
* | | \_Fork[[]]
* | | |_Eval[[fork1[KEYWORD] AS _fork#4]]
* | | | \_Filter[?c < 100[INTEGER]]
* | | | \_LookupJoin[LEFT,[?c],[?c],true,null]
* | | | |_Eval[[?a * 2[INTEGER] AS b#14]]
* | | | | \_Filter[?a > 10[INTEGER]]
* | | | | \_UnresolvedRelation[]
* | | | \_UnresolvedRelation[lookup_index]
* | | \_Eval[[fork2[KEYWORD] AS _fork#4]]
* | | \_Filter[?d > 200[INTEGER]]
* | | \_LookupJoin[LEFT,[?c],[?c],true,null]
* | | |_Eval[[?a * 2[INTEGER] AS b#14]]
* | | | \_Filter[?a > 10[INTEGER]]
* | | | \_UnresolvedRelation[]
* | | \_UnresolvedRelation[lookup_index]
* | \_Subquery[]
* | \_UnresolvedRelation[]
* \_UnresolvedRelation[lookup_index]
*/
public void testMultipleSubqueriesWithProcessingCommands() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainIndexPattern1 = randomIndexPatterns();
var mainIndexPattern2 = randomIndexPatterns();
var subqueryIndexPattern1 = randomIndexPatterns();
var subqueryIndexPattern2 = randomIndexPatterns();
var joinIndexPattern = "lookup_index";
var combinedIndexPattern = unquoteIndexPattern(mainIndexPattern1) + "," + unquoteIndexPattern(mainIndexPattern2);
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}
| WHERE a > 10
| EVAL b = a * 2
| LOOKUP JOIN {} ON c
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g)
, {}, (FROM {})
| WHERE a > 10
| EVAL b = a * 2
| LOOKUP JOIN {} ON c
| FORK (WHERE c < 100) (WHERE d > 200)
| STATS cnt = COUNT(*) BY e
| SORT cnt desc
| LIMIT 10
| DROP f
| KEEP g
""", mainIndexPattern1, subqueryIndexPattern1, joinIndexPattern, mainIndexPattern2, subqueryIndexPattern2, joinIndexPattern);
LogicalPlan plan = statement(query);
Keep keep = as(plan, Keep.class);
Drop drop = as(keep.child(), Drop.class);
Limit limit = as(drop.child(), Limit.class);
OrderBy orderBy = as(limit.child(), OrderBy.class);
Aggregate aggregate = as(orderBy.child(), Aggregate.class);
Fork fork = as(aggregate.child(), Fork.class);
List<LogicalPlan> forkChildren = fork.children();
assertEquals(2, forkChildren.size());
for (Eval forkEval : List.of(as(forkChildren.get(0), Eval.class), as(forkChildren.get(1), Eval.class))) {
Filter forkFilter = as(forkEval.child(), Filter.class);
LookupJoin lookupJoin = as(forkFilter.child(), LookupJoin.class);
Eval eval = as(lookupJoin.left(), Eval.class);
UnresolvedRelation joinRelation = as(lookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
Filter filter = as(eval.child(), Filter.class);
UnionAll unionAll = as(filter.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(3, children.size());
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(combinedIndexPattern), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery1 = as(children.get(1), Subquery.class);
Keep subqueryKeep = as(subquery1.plan(), Keep.class);
Drop subqueryDrop = as(subqueryKeep.child(), Drop.class);
Limit subqueryLimit = as(subqueryDrop.child(), Limit.class);
OrderBy subqueryOrderby = as(subqueryLimit.child(), OrderBy.class);
Aggregate subqueryAggregate = as(subqueryOrderby.child(), Aggregate.class);
Fork subqueryFork = as(subqueryAggregate.child(), Fork.class);
List<LogicalPlan> subqueryForkChildren = subqueryFork.children();
assertEquals(2, forkChildren.size());
for (Eval subqueryForkEval : List.of(
as(subqueryForkChildren.get(0), Eval.class),
as(subqueryForkChildren.get(1), Eval.class)
)) {
Filter subqueryForkFilter = as(subqueryForkEval.child(), Filter.class);
LookupJoin subqueryLookupJoin = as(subqueryForkFilter.child(), LookupJoin.class);
Eval subqueryEval = as(subqueryLookupJoin.left(), Eval.class);
joinRelation = as(subqueryLookupJoin.right(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(joinIndexPattern), joinRelation.indexPattern().indexPattern());
Filter subqueryFilter = as(subqueryEval.child(), Filter.class);
unresolvedRelation = as(subqueryFilter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern1), unresolvedRelation.indexPattern().indexPattern());
}
Subquery subquery2 = as(children.get(2), Subquery.class);
unresolvedRelation = as(subquery2.plan(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(subqueryIndexPattern2), unresolvedRelation.indexPattern().indexPattern());
}
}
/**
* UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_UnresolvedRelation[]
*/
public void testSimpleNestedSubquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var indexPattern1 = randomIndexPatterns();
var indexPattern2 = randomIndexPatterns();
var indexPattern3 = randomIndexPatterns();
var indexPattern4 = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}, (FROM {}, (FROM {})))
""", indexPattern1, indexPattern2, indexPattern3, indexPattern4);
LogicalPlan plan = statement(query);
UnionAll unionAll = as(plan, UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern1), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery1 = as(children.get(1), Subquery.class);
unionAll = as(subquery1.plan(), UnionAll.class);
children = unionAll.children();
assertEquals(2, children.size());
unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern2), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery2 = as(children.get(1), Subquery.class);
unionAll = as(subquery2.plan(), UnionAll.class);
children = unionAll.children();
assertEquals(2, children.size());
unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern3), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery3 = as(children.get(1), Subquery.class);
unresolvedRelation = as(subquery3.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern4), unresolvedRelation.indexPattern().indexPattern());
}
/**
* LogicalPlanBuilder does not flatten nested subqueries with processing commands,
* the structure of the nested subqueries s preserved in the parsed plan.
*
* Limit[10[INTEGER],false]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Aggregate[[?e],[?COUNT[*] AS cnt#7, ?e]]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Eval[[?a * 2[INTEGER] AS b#4]]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Filter[?a > 10[INTEGER]]
* \_UnresolvedRelation[]
*/
public void testNestedSubqueryWithProcessingCommands() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var indexPattern1 = randomIndexPatterns();
var indexPattern2 = randomIndexPatterns();
var indexPattern3 = randomIndexPatterns();
var indexPattern4 = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}, (FROM {}, (FROM {}
| WHERE a > 10)
| EVAL b = a * 2)
|STATS cnt = COUNT(*) BY e)
| LIMIT 10
""", indexPattern1, indexPattern2, indexPattern3, indexPattern4);
LogicalPlan plan = statement(query);
Limit limit = as(plan, Limit.class);
UnionAll unionAll = as(limit.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
UnresolvedRelation unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern1), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery1 = as(children.get(1), Subquery.class);
Aggregate aggregate = as(subquery1.plan(), Aggregate.class);
unionAll = as(aggregate.child(), UnionAll.class);
children = unionAll.children();
assertEquals(2, children.size());
unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern2), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery2 = as(children.get(1), Subquery.class);
Eval eval = as(subquery2.plan(), Eval.class);
unionAll = as(eval.child(), UnionAll.class);
children = unionAll.children();
assertEquals(2, children.size());
unresolvedRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern3), unresolvedRelation.indexPattern().indexPattern());
Subquery subquery3 = as(children.get(1), Subquery.class);
Filter filter = as(subquery3.plan(), Filter.class);
unresolvedRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern4), unresolvedRelation.indexPattern().indexPattern());
}
/**
* The medatada options from the main query are not propagated into subqueries.
*
* Aggregate[[?a],[?COUNT[*] AS cnt#6, ?a]]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Filter[?a > 10[INTEGER]]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_UnresolvedRelation[]
*/
public void testSubqueriesWithMetadada() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var indexPattern1 = randomIndexPatterns();
var indexPattern2 = randomIndexPatterns();
var indexPattern3 = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
FROM {}, (FROM {}, (FROM {}) metadata _score | WHERE a > 10) metadata _index
| STATS cnt = COUNT(*) BY a
""", indexPattern1, indexPattern2, indexPattern3);
LogicalPlan plan = statement(query);
Aggregate aggregate = as(plan, Aggregate.class);
UnionAll unionAll = as(aggregate.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation mainRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern1), mainRelation.indexPattern().indexPattern());
List<Attribute> metadata = mainRelation.metadataFields();
assertEquals(1, metadata.size());
MetadataAttribute metadataAttribute = as(metadata.get(0), MetadataAttribute.class);
assertEquals("_index", metadataAttribute.name());
// subquery1
Subquery subquery = as(children.get(1), Subquery.class);
Filter filter = as(subquery.plan(), Filter.class);
unionAll = as(filter.child(), UnionAll.class);
children = unionAll.children();
assertEquals(2, children.size());
UnresolvedRelation subqueryRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern2), subqueryRelation.indexPattern().indexPattern());
metadata = subqueryRelation.metadataFields();
assertEquals(1, metadata.size());
metadataAttribute = as(metadata.get(0), MetadataAttribute.class);
assertEquals("_score", metadataAttribute.name());
// subquery2
subquery = as(children.get(1), Subquery.class);
subqueryRelation = as(subquery.plan(), UnresolvedRelation.class);
assertEquals(unquoteIndexPattern(indexPattern3), subqueryRelation.indexPattern().indexPattern());
metadata = subqueryRelation.metadataFields();
assertEquals(0, metadata.size());
}
/**
* Aggregate[[?a],[?COUNT[*] AS cnt#4, ?a]]
* \_UnionAll[[]]
* |_UnresolvedRelation[]
* \_Subquery[]
* \_Filter[?a > 10[INTEGER]]
* \_UnresolvedRelation[]
*/
public void testSubqueryWithRemoteCluster() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainRemoteIndexPattern = randomIndexPatterns(CROSS_CLUSTER);
var mainIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var combinedMainIndexPattern = unquoteIndexPattern(mainRemoteIndexPattern) + "," + unquoteIndexPattern(mainIndexPattern);
var subqueryRemoteIndexPattern = randomIndexPatterns(CROSS_CLUSTER);
var subqueryIndexPattern = randomIndexPatterns(without(CROSS_CLUSTER));
var combinedSubqueryIndexPattern = unquoteIndexPattern(subqueryRemoteIndexPattern)
+ ","
+ unquoteIndexPattern(subqueryIndexPattern);
String query = LoggerMessageFormat.format(null, """
FROM {}, {}, (FROM {}, {} | WHERE a > 10)
| STATS cnt = COUNT(*) BY a
""", mainRemoteIndexPattern, mainIndexPattern, subqueryRemoteIndexPattern, subqueryIndexPattern);
LogicalPlan plan = statement(query);
Aggregate aggregate = as(plan, Aggregate.class);
UnionAll unionAll = as(aggregate.child(), UnionAll.class);
List<LogicalPlan> children = unionAll.children();
assertEquals(2, children.size());
// main query
UnresolvedRelation mainRelation = as(children.get(0), UnresolvedRelation.class);
assertEquals(combinedMainIndexPattern, mainRelation.indexPattern().indexPattern());
// subquery
Subquery subquery = as(children.get(1), Subquery.class);
Filter filter = as(subquery.plan(), Filter.class);
UnresolvedRelation unresolvedRelation = as(filter.child(), UnresolvedRelation.class);
assertEquals(combinedSubqueryIndexPattern, unresolvedRelation.indexPattern().indexPattern());
}
public void testTimeSeriesWithSubquery() {
assumeTrue("Requires subquery in FROM command support", EsqlCapabilities.Cap.SUBQUERY_IN_FROM_COMMAND.isEnabled());
var mainIndexPattern = randomIndexPatterns();
var subqueryIndexPattern = randomIndexPatterns();
String query = LoggerMessageFormat.format(null, """
TS index1, (FROM index2)
""", mainIndexPattern, subqueryIndexPattern);
expectThrows(
ParsingException.class,
containsString("line 1:2: Subqueries are not supported in TS command"),
() -> statement(query)
);
}
}
| SubqueryTests |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonPriorityQueue.java | {
"start": 1347,
"end": 1456
} | class ____<V> extends BaseRedissonList<V> implements RPriorityQueue<V> {
public static | RedissonPriorityQueue |
java | spring-projects__spring-boot | module/spring-boot-data-jpa/src/test/java/org/springframework/boot/data/jpa/autoconfigure/domain/country/CountryRepository.java | {
"start": 840,
"end": 954
} | interface ____ extends JpaRepository<Country, Long>, RevisionRepository<Country, Long, Integer> {
}
| CountryRepository |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/StreamTaskStateInitializerImpl.java | {
"start": 4324,
"end": 4719
} | class ____
* the state to create {@link StreamOperatorStateContext} objects for stream operators from the
* {@link TaskStateManager} of the task that runs the stream task and hence the operator.
*
* <p>This implementation operates on top a {@link TaskStateManager}, from which it receives
* everything required to restore state in the backends from checkpoints or savepoints.
*/
public | obtains |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/support/config/KeyValueConverter.java | {
"start": 785,
"end": 1435
} | class ____ implements Converter<String, KeyValue> {
private static final String INVALID_CONFIGURATION_MESSAGE = "Invalid configuration, expected format is: 'key:value', received: ";
@Override
public KeyValue convert(String source) throws IllegalArgumentException {
try {
String[] split = source.split(":");
if (split.length == 2) {
return new KeyValue(split[0], split.length == 1 ? "" : split[1]);
}
throw new IllegalArgumentException(INVALID_CONFIGURATION_MESSAGE + source);
}
catch (ArrayIndexOutOfBoundsException e) {
throw new IllegalArgumentException(INVALID_CONFIGURATION_MESSAGE + source);
}
}
}
| KeyValueConverter |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/number/NumberValueTest_error_7.java | {
"start": 190,
"end": 512
} | class ____ extends TestCase {
public void test_0() throws Exception {
Exception error = null;
try {
String text = "{\"value\":-";
JSON.parse(text);
} catch (Exception e) {
error = e;
}
Assert.assertNotNull(error);
}
}
| NumberValueTest_error_7 |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/issues/Issue5774.java | {
"start": 648,
"end": 3009
} | class ____ {
@Test
public void test_createuser_sql() {
for (String sql : new String[]{
"create user IF NOT EXISTS \"ptscr-2kaq\"@\"%\" identified by \"asdasdasdasd\";",
"create user IF NOT EXISTS \"ptscr-2kaq\" identified by \"asdasdasdasd\";",
"create user \"ptscr-2kaq\"@\"%\" identified by \"asdasdasdasd\";",
"create user \"ptscr-2kaq\"@\"%\" identified by RANDOM PASSWORD;",
"CREATE USER 'jeffrey'@'localhost' IDENTIFIED BY 'password';",
"CREATE USER 'jeffrey'@'localhost'\n"
+ " IDENTIFIED BY 'password';",
"CREATE USER 'jeffrey'@localhost IDENTIFIED BY 'password';",
// "CREATE USER 'jeffrey'@'localhost'\n"
// + " IDENTIFIED BY 'new_password' PASSWORD EXPIRE;",
"CREATE USER 'jeffrey'@'localhost'\n"
+ " IDENTIFIED WITH mysql_native_password BY 'password';",
// "CREATE USER 'u1'@'localhost'\n"
// + " IDENTIFIED WITH caching_sha2_password\n"
// + " BY 'sha2_password'\n"
// + " AND IDENTIFIED WITH authentication_ldap_sasl\n"
// + " AS 'uid=u1_ldap,ou=People,dc=example,dc=com';",
// "CREATE USER 'jeffrey'@'localhost' PASSWORD EXPIRE;",
// "CREATE USER 'jeffrey'@'localhost' PASSWORD EXPIRE DEFAULT;",
// "CREATE USER 'jeffrey'@'localhost' PASSWORD EXPIRE NEVER;",
}) {
DbType dbType = DbType.mysql;
System.out.println("原始的sql===" + sql);
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> statementList = parser.parseStatementList();
System.out.println("生成的sql===" + statementList);
StringBuilder sb = new StringBuilder();
for (SQLStatement statement : statementList) {
sb.append(statement.toString()).append(";");
}
sb.deleteCharAt(sb.length() - 1);
parser = SQLParserUtils.createSQLStatementParser(sb.toString(), dbType);
List<SQLStatement> statementListNew = parser.parseStatementList();
System.out.println("再生成sql===" + statementListNew);
assertEquals(statementList.toString(), statementListNew.toString());
}
}
}
| Issue5774 |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/share/persister/Persister.java | {
"start": 910,
"end": 1201
} | interface ____ methods which can be used by callers to interact with the
* persistence implementation responsible for storing share group/partition states.
* For KIP-932, the default {@link Persister} uses a share coordinator to store information in
* an internal topic.
*/
public | introduces |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/sps/StoragePolicySatisfyManager.java | {
"start": 2059,
"end": 2321
} | class ____ understand more about the
* external sps service functionality.
*
* <p>
* If the configured mode is {@link StoragePolicySatisfierMode#NONE}, then it
* will disable the sps feature completely by clearing all queued up sps path's
* hint.
*
* This | to |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DoubleBraceInitializationTest.java | {
"start": 4095,
"end": 4639
} | class ____ {
static final ImmutableList<Integer> a = ImmutableList.of(1, 2);
static final ImmutableList<Integer> b = ImmutableList.of(1, 2);
List<Integer> c = new ArrayList<Integer>(ImmutableList.of(1, 2));
}
""")
.doTest();
}
@Test
public void set() {
testHelper
.addInputLines(
"in/Test.java",
"""
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
| Test |
java | apache__hadoop | hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingStderr.java | {
"start": 1302,
"end": 4084
} | class ____
{
public TestStreamingStderr() throws IOException {
UtilTest utilTest = new UtilTest(getClass().getName());
utilTest.checkUserDir();
utilTest.redirectIfAntJunit();
}
protected String[] genArgs(File input, File output, int preLines, int duringLines, int postLines) {
return new String[] {
"-input", input.getAbsolutePath(),
"-output", output.getAbsolutePath(),
"-mapper", UtilTest.makeJavaCommand(StderrApp.class,
new String[]{Integer.toString(preLines),
Integer.toString(duringLines),
Integer.toString(postLines)}),
"-reducer", StreamJob.REDUCE_NONE,
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "mapreduce.task.timeout=5000",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
protected File setupInput(String base, boolean hasInput) throws IOException {
File input = new File(base + "-input.txt");
UtilTest.recursiveDelete(input);
FileOutputStream in = new FileOutputStream(input.getAbsoluteFile());
if (hasInput) {
in.write("hello\n".getBytes());
}
in.close();
return input;
}
protected File setupOutput(String base) throws IOException {
File output = new File(base + "-out");
UtilTest.recursiveDelete(output);
return output;
}
public void runStreamJob(String baseName, boolean hasInput,
int preLines, int duringLines, int postLines)
throws Exception {
File input = setupInput(baseName, hasInput);
File output = setupOutput(baseName);
boolean mayExit = false;
int returnStatus = 0;
StreamJob job = new StreamJob(genArgs(input, output, preLines, duringLines, postLines), mayExit);
returnStatus = job.go();
assertEquals(0, returnStatus, "StreamJob success");
}
// This test will fail by blocking forever if the stderr isn't
// consumed by Hadoop for tasks that don't have any input.
@Test
public void testStderrNoInput() throws Exception {
runStreamJob("target/stderr-pre", false, 10000, 0, 0);
}
// Streaming should continue to read stderr even after all input has
// been consumed.
@Test
public void testStderrAfterOutput() throws Exception {
runStreamJob("target/stderr-post", false, 0, 0, 10000);
}
// This test should produce a task timeout if stderr lines aren't
// counted as progress. This won't actually work until
// LocalJobRunner supports timeouts.
@Test
public void testStderrCountsAsProgress() throws Exception {
runStreamJob("target/stderr-progress", true, 10, 1000, 0);
}
}
| TestStreamingStderr |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/concurrent/LazyInitializerSupplierTest.java | {
"start": 1011,
"end": 1786
} | class ____ extends AbstractConcurrentInitializerCloseAndExceptionsTest<Object> {
/**
* Creates the initializer to be tested.
*
* @return the initializer to be tested
*/
@Override
protected ConcurrentInitializer<Object> createInitializer() {
return LazyInitializer.<Object>builder().setInitializer(Object::new).get();
}
@Override
protected ConcurrentInitializer<CloseableObject> createInitializerThatThrowsException(
final FailableSupplier<CloseableObject, ? extends Exception> supplier,
final FailableConsumer<CloseableObject, ? extends Exception> closer) {
return LazyInitializer.<CloseableObject>builder().setInitializer(supplier).setCloser(closer).get();
}
}
| LazyInitializerSupplierTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java | {
"start": 1861,
"end": 33549
} | class ____ extends AbstractStepTestCase<CheckShrinkReadyStep> {
private static final List<SingleNodeShutdownMetadata.Type> REMOVE_SHUTDOWN_TYPES = List.of(
SingleNodeShutdownMetadata.Type.REPLACE,
SingleNodeShutdownMetadata.Type.REMOVE,
SingleNodeShutdownMetadata.Type.SIGTERM
);
@Override
public CheckShrinkReadyStep createRandomInstance() {
Step.StepKey stepKey = randomStepKey();
Step.StepKey nextStepKey = randomStepKey();
return new CheckShrinkReadyStep(stepKey, nextStepKey);
}
@Override
public CheckShrinkReadyStep mutateInstance(CheckShrinkReadyStep instance) {
Step.StepKey key = instance.getKey();
Step.StepKey nextKey = instance.getNextStepKey();
switch (between(0, 1)) {
case 0 -> key = new Step.StepKey(key.phase(), key.action(), key.name() + randomAlphaOfLength(5));
case 1 -> nextKey = new Step.StepKey(nextKey.phase(), nextKey.action(), nextKey.name() + randomAlphaOfLength(5));
default -> throw new AssertionError("Illegal randomisation branch");
}
return new CheckShrinkReadyStep(key, nextKey);
}
@Override
public CheckShrinkReadyStep copyInstance(CheckShrinkReadyStep instance) {
return new CheckShrinkReadyStep(instance.getKey(), instance.getNextStepKey());
}
public void testNoSetting() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = createRandomInstance();
IllegalStateException e = expectThrows(IllegalStateException.class, () -> {
assertAllocateStatus(
index,
1,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(true, null)
);
});
assertThat(e.getMessage(), containsString("Cannot check shrink allocation as there are no allocation rules by _id"));
}
public void testConditionMet() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = createRandomInstance();
assertAllocateStatus(
index,
1,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(true, null)
);
}
public void testConditionMetOnlyOneCopyAllocated() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
boolean primaryOnNode1 = randomBoolean();
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", primaryOnNode1, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node2", primaryOnNode1 == false, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = new CheckShrinkReadyStep(randomStepKey(), randomStepKey());
assertAllocateStatus(
index,
1,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(true, null)
);
}
public void testConditionNotMetDueToRelocation() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
boolean primaryOnNode1 = randomBoolean();
ShardRouting shardOnNode1 = TestShardRouting.newShardRouting(
new ShardId(index, 0),
"node1",
primaryOnNode1,
ShardRoutingState.STARTED
);
shardOnNode1 = shardOnNode1.relocate("node3", 230);
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(shardOnNode1)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node2", primaryOnNode1 == false, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = new CheckShrinkReadyStep(randomStepKey(), randomStepKey());
assertAllocateStatus(
index,
1,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(false, new CheckShrinkReadyStep.Info("node1", 1, 1))
);
}
public void testExecuteAllocateNotComplete() throws Exception {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node2", true, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = createRandomInstance();
assertAllocateStatus(
index,
2,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(false, new CheckShrinkReadyStep.Info("node1", 2, 1))
);
}
public void testExecuteAllocateNotCompleteOnlyOneCopyAllocated() throws Exception {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
boolean primaryOnNode1 = randomBoolean();
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", primaryOnNode1, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node2", primaryOnNode1 == false, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = new CheckShrinkReadyStep(randomStepKey(), randomStepKey());
assertAllocateStatus(
index,
2,
0,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(false, new CheckShrinkReadyStep.Info("node1", 2, 1))
);
}
public void testExecuteAllocateReplicaUnassigned() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED))
.addShard(
shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo(
randomUnassignedInfo("the shard is intentionally unassigned")
).build()
);
CheckShrinkReadyStep step = createRandomInstance();
assertAllocateStatus(
index,
1,
1,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(true, null)
);
}
/**
* this tests the scenario where
*
* PUT index
* {
* "settings": {
* "number_of_replicas": 0,
* "number_of_shards": 1
* }
* }
*
* PUT index/_settings
* {
* "number_of_replicas": 1,
* "index.routing.allocation.include._id": "{node-name}"
* }
*/
public void testExecuteReplicasNotAllocatedOnSingleNode() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = Map.of("_id", "node1");
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> { expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v); });
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node1", false, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node2", true, ShardRoutingState.STARTED))
.addShard(
shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo(
new UnassignedInfo(UnassignedInfo.Reason.REPLICA_ADDED, "no attempt")
).build()
);
CheckShrinkReadyStep step = createRandomInstance();
assertAllocateStatus(
index,
2,
1,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(true, null)
);
}
public void testExecuteReplicasButCopiesNotPresent() {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = Map.of("_id", "node1");
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> { expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v); });
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node2", false, ShardRoutingState.STARTED))
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 1), "node3", true, ShardRoutingState.STARTED))
.addShard(
shardRoutingBuilder(new ShardId(index, 0), null, false, ShardRoutingState.UNASSIGNED).withUnassignedInfo(
new UnassignedInfo(UnassignedInfo.Reason.REPLICA_ADDED, "no attempt")
).build()
);
CheckShrinkReadyStep step = createRandomInstance();
assertAllocateStatus(
index,
2,
1,
step,
existingSettings,
node1Settings,
node2Settings,
indexRoutingTable,
new ClusterStateWaitStep.Result(false, new CheckShrinkReadyStep.Info("node1", 2, 1))
);
}
public void testExecuteIndexMissing() throws Exception {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
ProjectState state = projectStateWithEmptyProject();
CheckShrinkReadyStep step = createRandomInstance();
ClusterStateWaitStep.Result actualResult = step.isConditionMet(index, state);
assertFalse(actualResult.complete());
assertNull(actualResult.informationContext());
}
public void testStepCompletableIfAllShardsActive() {
for (SingleNodeShutdownMetadata.Type type : REMOVE_SHUTDOWN_TYPES) {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.STARTED));
CheckShrinkReadyStep step = createRandomInstance();
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(existingSettings)
.numberOfShards(1)
.numberOfReplicas(1)
.build();
Map<String, IndexMetadata> indices = Map.of(index.getName(), indexMetadata);
var project = ProjectMetadata.builder(randomProjectIdOrDefault()).indices(indices).build();
final String targetNodeName = type == SingleNodeShutdownMetadata.Type.REPLACE ? randomAlphaOfLengthBetween(10, 20) : null;
final TimeValue grace = type == SIGTERM ? randomTimeValue() : null;
ProjectState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.put(project)
.putCustom(
NodesShutdownMetadata.TYPE,
new NodesShutdownMetadata(
Map.of(
"node1",
SingleNodeShutdownMetadata.builder()
.setType(type)
.setStartedAtMillis(randomNonNegativeLong())
.setReason("test")
.setNodeId("node1")
.setNodeEphemeralId("node1")
.setTargetNodeName(targetNodeName)
.setGracePeriod(grace)
.build()
)
)
)
)
.nodes(
DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.builder("node1")
.applySettings(
Settings.builder().put(node1Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node1").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9200))
.build()
)
.add(
DiscoveryNodeUtils.builder("node2")
.applySettings(
Settings.builder().put(node2Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node2").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9201))
.build()
)
)
.putRoutingTable(project.id(), RoutingTable.builder().add(indexRoutingTable).build())
.build()
.projectState(project.id());
assertTrue(step.isCompletable());
ClusterStateWaitStep.Result actualResult = step.isConditionMet(index, state);
assertTrue(actualResult.complete());
assertTrue(step.isCompletable());
}
}
public void testStepBecomesUncompletable() {
for (SingleNodeShutdownMetadata.Type type : REMOVE_SHUTDOWN_TYPES) {
Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
Map<String, String> requires = AllocateActionTests.randomAllocationRoutingMap(1, 5);
Settings.Builder existingSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1")
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID());
Settings.Builder expectedSettings = Settings.builder();
Settings.Builder node1Settings = Settings.builder();
Settings.Builder node2Settings = Settings.builder();
requires.forEach((k, v) -> {
existingSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + k, v);
node1Settings.put(Node.NODE_ATTRIBUTES.getKey() + k, v);
});
IndexRoutingTable.Builder indexRoutingTable = IndexRoutingTable.builder(index)
.addShard(TestShardRouting.newShardRouting(new ShardId(index, 0), "node1", true, ShardRoutingState.INITIALIZING));
CheckShrinkReadyStep step = createRandomInstance();
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(existingSettings)
.numberOfShards(1)
.numberOfReplicas(1)
.build();
Map<String, IndexMetadata> indices = Map.of(index.getName(), indexMetadata);
var project = ProjectMetadata.builder(randomProjectIdOrDefault()).indices(indices).build();
final String targetNodeName = type == SingleNodeShutdownMetadata.Type.REPLACE ? randomAlphaOfLengthBetween(10, 20) : null;
final TimeValue grace = type == SIGTERM ? randomTimeValue() : null;
ProjectState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.put(project)
.putCustom(
NodesShutdownMetadata.TYPE,
new NodesShutdownMetadata(
Map.of(
"node1",
SingleNodeShutdownMetadata.builder()
.setType(type)
.setStartedAtMillis(randomNonNegativeLong())
.setReason("test")
.setNodeId("node1")
.setNodeEphemeralId("node1")
.setTargetNodeName(targetNodeName)
.setGracePeriod(grace)
.build()
)
)
)
)
.nodes(
DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.builder("node1")
.applySettings(
Settings.builder().put(node1Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node1").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9200))
.build()
)
.add(
DiscoveryNodeUtils.builder("node2")
.applySettings(
Settings.builder().put(node2Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node2").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9201))
.build()
)
)
.putRoutingTable(project.id(), RoutingTable.builder().add(indexRoutingTable).build())
.build()
.projectState(project.id());
assertTrue(step.isCompletable());
ClusterStateWaitStep.Result actualResult = step.isConditionMet(index, state);
assertFalse(actualResult.complete());
assertThat(
Strings.toString(actualResult.informationContext()),
containsString("node with id [node1] is currently marked as shutting down")
);
assertFalse(step.isCompletable());
}
}
private void assertAllocateStatus(
Index index,
int shards,
int replicas,
CheckShrinkReadyStep step,
Settings.Builder existingSettings,
Settings.Builder node1Settings,
Settings.Builder node2Settings,
IndexRoutingTable.Builder indexRoutingTable,
ClusterStateWaitStep.Result expectedResult
) {
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(existingSettings)
.numberOfShards(shards)
.numberOfReplicas(replicas)
.build();
Map<String, IndexMetadata> indices = Map.of(index.getName(), indexMetadata);
var project = ProjectMetadata.builder(randomProjectIdOrDefault()).indices(indices).build();
ProjectState state = ClusterState.builder(ClusterState.EMPTY_STATE)
.putProjectMetadata(project)
.nodes(
DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.builder("node1")
.applySettings(
Settings.builder().put(node1Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node1").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9200))
.build()
)
.add(
DiscoveryNodeUtils.builder("node2")
.applySettings(
Settings.builder().put(node2Settings.build()).put(Node.NODE_NAME_SETTING.getKey(), "node2").build()
)
.address(new TransportAddress(TransportAddress.META_ADDRESS, 9201))
.build()
)
)
.putRoutingTable(project.id(), RoutingTable.builder().add(indexRoutingTable).build())
.build()
.projectState(project.id());
ClusterStateWaitStep.Result actualResult = step.isConditionMet(index, state);
assertEquals(expectedResult.complete(), actualResult.complete());
assertEquals(expectedResult.informationContext(), actualResult.informationContext());
}
public static UnassignedInfo randomUnassignedInfo(String message) {
UnassignedInfo.Reason reason = randomFrom(UnassignedInfo.Reason.values());
String lastAllocatedNodeId = null;
boolean delayed = false;
if (reason == UnassignedInfo.Reason.NODE_LEFT || reason == UnassignedInfo.Reason.NODE_RESTARTING) {
if (randomBoolean()) {
delayed = true;
}
lastAllocatedNodeId = randomAlphaOfLength(10);
}
int failedAllocations = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? 1 : 0;
return new UnassignedInfo(
reason,
message,
null,
failedAllocations,
System.nanoTime(),
System.currentTimeMillis(),
delayed,
UnassignedInfo.AllocationStatus.NO_ATTEMPT,
Set.of(),
lastAllocatedNodeId
);
}
}
| CheckShrinkReadyStepTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringAiChatEndpointBuilderFactory.java | {
"start": 47648,
"end": 47995
} | class ____ extends AbstractEndpointBuilder implements SpringAiChatEndpointBuilder, AdvancedSpringAiChatEndpointBuilder {
public SpringAiChatEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new SpringAiChatEndpointBuilderImpl(path);
}
} | SpringAiChatEndpointBuilderImpl |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractFloatArrayAssert.java | {
"start": 998,
"end": 73254
} | class ____<SELF extends AbstractFloatArrayAssert<SELF>>
extends AbstractArrayAssert<SELF, float[], Float> {
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
protected FloatArrays arrays = FloatArrays.instance();
protected AbstractFloatArrayAssert(float[] actual, Class<?> selfType) {
super(actual, selfType);
}
/** {@inheritDoc} */
@Override
public void isNullOrEmpty() {
arrays.assertNullOrEmpty(info, actual);
}
/** {@inheritDoc} */
@Override
public void isEmpty() {
arrays.assertEmpty(info, actual);
}
/** {@inheritDoc} */
@Override
public SELF isNotEmpty() {
arrays.assertNotEmpty(info, actual);
return myself;
}
/**
* {@inheritDoc}
* <p>
* Examples:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).hasSize(3);
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).hasSize(2);</code></pre>
*/
@Override
public SELF hasSize(int expected) {
arrays.assertHasSize(info, actual, expected);
return myself;
}
/**
* Verifies that the number of values in the actual array is greater than the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).hasSizeGreaterThan(1);
*
* // assertion will fail
* assertThat(new float[] { 1.0f }).hasSizeGreaterThan(1);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual array is not greater than the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeGreaterThan(int boundary) {
arrays.assertHasSizeGreaterThan(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual array is greater than or equal to the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).hasSizeGreaterThanOrEqualTo(1)
* .hasSizeGreaterThanOrEqualTo(2);
*
* // assertion will fail
* assertThat(new float[] { 1.0f }).hasSizeGreaterThanOrEqualTo(2);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual array is not greater than or equal to the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeGreaterThanOrEqualTo(int boundary) {
arrays.assertHasSizeGreaterThanOrEqualTo(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual array is less than the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).hasSizeLessThan(3);
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f }).hasSizeLessThan(1);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual array is not less than the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeLessThan(int boundary) {
arrays.assertHasSizeLessThan(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual array is less than or equal to the given boundary.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new byte[] { 1.0f, 2.0f }).hasSizeLessThanOrEqualTo(3)
* .hasSizeLessThanOrEqualTo(2);
*
* // assertion will fail
* assertThat(new byte[] { 1.0f, 2.0f }).hasSizeLessThanOrEqualTo(1);</code></pre>
*
* @param boundary the given value to compare the actual size to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual array is not less than or equal to the boundary.
* @since 3.12.0
*/
@Override
public SELF hasSizeLessThanOrEqualTo(int boundary) {
arrays.assertHasSizeLessThanOrEqualTo(info, actual, boundary);
return myself;
}
/**
* Verifies that the number of values in the actual group is between the given boundaries (inclusive).
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new byte[] { 1.0f, 2.0f }).hasSizeBetween(1, 3)
* .hasSizeBetween(2, 2);
*
* // assertion will fail
* assertThat(new byte[] { 1.0f, 2.0f }).hasSizeBetween(4, 5);</code></pre>
*
* @param lowerBoundary the lower boundary compared to which actual size should be greater than or equal to.
* @param higherBoundary the higher boundary compared to which actual size should be less than or equal to.
* @return {@code this} assertion object.
* @throws AssertionError if the number of values of the actual array is not between the boundaries.
* @since 3.12.0
*/
@Override
public SELF hasSizeBetween(int lowerBoundary, int higherBoundary) {
arrays.assertHasSizeBetween(info, actual, lowerBoundary, higherBoundary);
return myself;
}
/**
* Verifies that the actual group has the same size as given {@link Iterable}.
* <p>
* Examples:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).hasSameSizeAs(Arrays.asList(1, 2, 3));
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).hasSameSizeAs(Arrays.asList(1, 2));</code></pre>
*/
@Override
public SELF hasSameSizeAs(Iterable<?> other) {
arrays.assertHasSameSizeAs(info, actual, other);
return myself;
}
/**
* Verifies that the actual array contains the given values, in any order.
* <p>
* If you want to set a precision for the comparison either use {@link #contains(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).contains(1.0f, 3.0f, 2.0f)
* .contains(3.0f, 1.0f)
* .usingComparatorWithPrecision(0.5f)
* .contains(1.1f, 2.1f);
*
* // assertions will fail
* assertThat(values).contains(1.0f, 4.0f);
* assertThat(values).usingComparatorWithPrecision(0.01f)
* .contains(1.1f, 2.1f);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values.
*/
public SELF contains(float... values) {
arrays.assertContains(info, actual, values);
return myself;
}
/**
* Verifies that the actual array contains the values of the given array, in any order.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).contains(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 1.0f, 2.0f }).contains(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 1.0f, 2.0f }).contains(new Float[] { 1.0f });
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f }).contains(new Float[] { 3.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values.
* @since 3.19.0
*/
public SELF contains(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContains(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Verifies that the actual array contains the given values, in any order,
* the comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).contains(new float[] { 1.01f, 3.01f, 2.0f }, withPrecision(0.02f));
*
* // assertions will fail
* assertThat(values).contains(new float[] { 1.0f, 4.0f }, withPrecision(0.5f));
* assertThat(values).contains(new float[] { 4.0f, 7.0f }, withPrecision(2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values.
*/
public SELF contains(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).contains(values);
}
/**
* Verifies that the actual array contains the values of the given array, in any order,
* the comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).contains(new Float[] { 1.01f, 3.01f, 2.0f }, withPrecision(0.02f));
*
* // assertions will fail
* assertThat(values).contains(new Float[] { 1.0f, 4.0f }, withPrecision(0.5f));
* assertThat(values).contains(new Float[] { 4.0f, 7.0f }, withPrecision(2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values.
* @since 3.19.0
*/
public SELF contains(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).contains(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual array contains only the given values and nothing else, in any order.
* <p>
* If you want to set a precision for the comparison either use {@link #containsOnly(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new double[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).containsOnly(1.0f, 2.0f, 3.0f)
* .containsOnly(2.0f, 3.0f, 1.0f)
* .usingComparatorWithPrecision(0.5f)
* .containsOnly(1.1f, 3.1f, 2.1f);
* // assertions will fail
* assertThat(values).containsOnly(1.0f, 4.0f, 2.0f, 3.0f);
* assertThat(values).containsOnly(4.0f, 7.0f);
* assertThat(values).containsOnly(1.1f, 2.1f, 3.1f);
* assertThat(values).usingComparatorWithPrecision(0.01f)
* .containsOnly(1.1f, 2.1f, 3.1f);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some
* or none of the given values, or the actual array contains more values than the given ones.
*/
public SELF containsOnly(float... values) {
arrays.assertContainsOnly(info, actual, values);
return myself;
}
/**
* Verifies that the actual array contains only the values of the given array and nothing else, in any order.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] { 1.0f, 2.0f }).containsOnly(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 2.0f, 1.0f }).containsOnly(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 1.0f, 1.0f, 2.0f }).containsOnly(new Float[] { 1.0f, 2.0f });
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f }).containsOnly(new Float[] { 2.0f });
* assertThat(new float[] { 1.0f }).containsOnly(new Float[] { 1.0f, 2.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some
* or none of the given values, or the actual array contains more values than the given ones.
* @since 3.19.0
*/
public SELF containsOnly(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContainsOnly(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Verifies that the actual array contains only the given values and nothing else, in any order.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).containsOnly(new float[] {1.0f, 2.0f, 3.0f }, withPrecision(0.00001f))
* .containsOnly(new float[] {2.0,f 3.0f, 0.7f}, withPrecision(0.5f));
*
* // assertions will fail
* assertThat(values).containsOnly(new float[] {1.0f, 4.0f, 2.0f, 3.0f}, withPrecision(0.5f));
* assertThat(values).containsOnly(new float[] {4.0f, 7.0f}, withPrecision(0.2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some
* or none of the given values, or the actual array contains more values than the given ones.
*/
public SELF containsOnly(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsOnly(values);
}
/**
* Verifies that the actual array contains only the values of the given array and nothing else, in any order.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).containsOnly(new Float[] { 1.0f, 2.0f, 3.0f }, withPrecision(0.00001f))
* .containsOnly(new Float[] { 2.0,f 3.0f, 0.7f }, withPrecision(0.5f));
*
* // assertions will fail
* assertThat(values).containsOnly(new Float[] { 1.0f, 4.0f, 2.0f, 3.0f }, withPrecision(0.5f));
* assertThat(values).containsOnly(new Float[] { 4.0f, 7.0f }, withPrecision(0.2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some
* or none of the given values, or the actual array contains more values than the given ones.
* @since 3.19.0
*/
public SELF containsOnly(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsOnly(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual array contains the given values only once.
* <p>
* If you want to set a precision for the comparison either use {@link #containsOnlyOnce(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Examples :
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(1.0f, 2.0f)
* .usingComparatorWithPrecision(0.5f)
* .containsOnlyOnce(1.1f, 3.1f, 2.1f);
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).containsOnlyOnce(1.0f);
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).containsOnlyOnce(1.0f, 2.0f);
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(4.0f);
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).usingComparatorWithPrecision(0.05f)
* .containsOnlyOnce(1.1f, 2.1f);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some
* or none of the given values, or the actual group contains more than once these values.
*/
public SELF containsOnlyOnce(float... values) {
arrays.assertContainsOnlyOnce(info, actual, values);
return myself;
}
/**
* Verifies that the actual array contains the values of the given array only once.
* <p>
* Examples :
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).containsOnlyOnce(new Float[] { 1.0f, 2.0f });
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).containsOnlyOnce(new Float[] { 1.0f });
* assertThat(new float[] { 1.0f }).containsOnlyOnce(new Float[] { 2.0f });
* assertThat(new float[] { 1.0f }).containsOnlyOnce(new Float[] { 1.0f, 2.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some
* or none of the given values, or the actual group contains more than once these values.
* @since 3.19.0
*/
public SELF containsOnlyOnce(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContainsOnlyOnce(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Verifies that the actual array contains the given values only once.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(new float[] {1.1f, 2.0f}, withPrecision(0.2f));
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).containsOnlyOnce(new float[] {1.05f}, withPrecision(0.1f));
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(new float[] {4.0f}, withPrecision(0.1f));
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 3.0f }).containsOnlyOnce(new float[] {0.1f, 0.9f, 2.0f, 3.11f, 4.0f, 5.0f}, withPrecision(0.2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some
* or none of the given values, or the actual group contains more than once these values.
*/
public SELF containsOnlyOnce(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsOnlyOnce(values);
}
/**
* Verifies that the actual array contains the values of the given array only once.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(new Float[] { 1.1f, 2.0f }, withPrecision(0.2f));
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f, 1.0f }).containsOnlyOnce(new Float[] { 1.05f }, withPrecision(0.1f));
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsOnlyOnce(new Float[] { 4.0f }, withPrecision(0.1f));
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 3.0f }).containsOnlyOnce(new Float[] { 0.1f, 0.9f, 2.0f, 3.11f, 4.0f, 5.0f }, withPrecision(0.2f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some
* or none of the given values, or the actual group contains more than once these values.
* @since 3.19.0
*/
public SELF containsOnlyOnce(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsOnlyOnce(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual array contains the given sequence, without any other values between them.
* <p>
* If you want to set a precision for the comparison either use {@link #containsSequence(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).containsSequence(1.0f, 2.0f)
* .containsSequence(1.0f, 2.0f, 3.0f)
* .containsSequence(2.0f, 3.0f)
* .usingComparatorWithPrecision(0.5f)
* .containsSequence(1.1f, 2.1f);
*
* // assertions will fail
* assertThat(values).containsSequence(1.0f, 3.0f);
* assertThat(values).containsSequence(4.0f, 7.0f);
* assertThat(values).usingComparatorWithPrecision(0.01f)
* .containsSequence(1.1f, 2.0f, 3.0f);</code></pre>
*
* @param sequence the sequence of values to look for.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given sequence.
*/
public SELF containsSequence(float... sequence) {
arrays.assertContainsSequence(info, actual, sequence);
return myself;
}
/**
* Verifies that the actual array contains the given sequence, without any other values between them.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).containsSequence(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 1.0f, 2.0f, 2.0f, 1.0f }).containsSequence(new Float[] { 2.0f, 1.0f });
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsSequence(new Float[] { 3.0f, 1.0f });</code></pre>
*
* @param sequence the sequence of values to look for.
* @return myself assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given sequence.
* @since 3.19.0
*/
public SELF containsSequence(Float[] sequence) {
requireNonNullParameter(sequence, "sequence");
arrays.assertContainsSequence(info, actual, toPrimitiveFloatArray(sequence));
return myself;
}
/**
* Verifies that the actual array contains the given sequence, without any other values between them.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).containsSequence(new float[] {1.07f, 2.0f}, withPrecision(0.1f))
* .containsSequence(new float[] {1.1f, 2.1f, 3.0f}, withPrecision(0.2f))
* .containsSequence(new float[] {2.2f, 3.0f}, withPrecision(0.3f));
*
* // assertions will fail
* assertThat(values).containsSequence(new float[] {1.0f, 3.0f}, withPrecision(0.2f));
* assertThat(values).containsSequence(new float[] {4.0f, 7.0f}, withPrecision(0.1f));</code></pre>
*
* @param sequence the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given sequence.
*/
public SELF containsSequence(float[] sequence, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsSequence(sequence);
}
/**
* Verifies that the actual array contains the given sequence, without any other values between them.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertions will pass
* assertThat(values).containsSequence(new Float[] { 1.07f, 2.0f }, withPrecision(0.1f))
* .containsSequence(new Float[] { 1.1f, 2.1f, 3.0f }, withPrecision(0.2f))
* .containsSequence(new Float[] { 2.2f, 3.0f }, withPrecision(0.3f));
*
* // assertions will fail
* assertThat(values).containsSequence(new Float[] { 1.0f, 3.0f }, withPrecision(0.2f));
* assertThat(values).containsSequence(new Float[] { 4.0f, 7.0f }, withPrecision(0.1f));</code></pre>
*
* @param sequence the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given sequence.
* @since 3.19.0
*/
public SELF containsSequence(Float[] sequence, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsSequence(toPrimitiveFloatArray(sequence));
}
/**
* Verifies that the actual array contains the given subsequence (possibly with other values between them).
* <p>
* If you want to set a precision for the comparison either use {@link #containsSubsequence(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).containsSubsequence(1.0f, 2.0f);
* .containsSubsequence(1.0f, 2.0f, 3.0f)
* .containsSubsequence(1.0f, 3.0f)
* .usingComparatorWithPrecision(0.5f)
* .containsSubsequence(1.1f, 2.1f);
*
* // assertions will fail
* assertThat(values).containsSubsequence(3.0f, 1.0f);
* assertThat(values).containsSubsequence(4.0f, 7.0f);
* assertThat(values).usingComparatorWithPrecision(0.01f)
* .containsSubsequence(1.1f, 2.0f);</code></pre>
*
* @param subsequence the subsequence of values to look for.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given subsequence.
*/
public SELF containsSubsequence(float... subsequence) {
arrays.assertContainsSubsequence(info, actual, subsequence);
return myself;
}
/**
* Verifies that the actual array contains the given subsequence (possibly with other values between them).
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).containsSubsequence(new Float[] { 1.0f, 2.0f });
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 4.0f }).containsSubsequence(new Float[] { 1.0f, 4.0f });
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsSubsequence(new Float[] { 3.0f, 1.0f });</code></pre>
*
* @param subsequence the subsequence of values to look for.
* @return myself assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given subsequence.
* @since 3.19.0
*/
public SELF containsSubsequence(Float[] subsequence) {
requireNonNullParameter(subsequence, "subsequence");
arrays.assertContainsSubsequence(info, actual, toPrimitiveFloatArray(subsequence));
return myself;
}
/**
* Verifies that the actual array contains the given subsequence (possibly with other values between them).
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).containsSubsequence(new float[] {1.0f, 2.0f}, withPrecision(0.1f))
* .containsSubsequence(new float[] {1.0f, 2.07f, 3.0f}, withPrecision(0.1f))
* .containsSubsequence(new float[] {2.1f, 2.9f}, withPrecision(0.2f));
*
* // assertions will fail
* assertThat(values).containsSubsequence(new float[] {1.0f, 3.0f}, withPrecision(0.1f));
* assertThat(values).containsSubsequence(new float[] {4.0f, 7.0f}, withPrecision(0.1f));</code></pre>
*
* @param subsequence the subsequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given subsequence.
*/
public SELF containsSubsequence(float[] subsequence, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsSubsequence(subsequence);
}
/**
* Verifies that the actual array contains the given subsequence (possibly with other values between them).
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Examples :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertions will pass
* assertThat(values).containsSubsequence(new Float[] { 1.0f, 2.0f }, withPrecision(0.1f))
* .containsSubsequence(new Float[] { 1.0f, 2.07f, 3.0f }, withPrecision(0.1f))
* .containsSubsequence(new Float[] { 2.1f, 2.9f }, withPrecision(0.2f));
*
* // assertions will fail
* assertThat(values).containsSubsequence(new Float[] { 1.0f, 3.0f }, withPrecision(0.1f));
* assertThat(values).containsSubsequence(new Float[] { 4.0f, 7.0f }, withPrecision(0.1f));</code></pre>
*
* @param subsequence the subsequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the given array is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array does not contain the given subsequence.
* @since 3.19.0
*/
public SELF containsSubsequence(Float[] subsequence, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsSubsequence(toPrimitiveFloatArray(subsequence));
}
/**
* Verifies that the actual array contains the given value at the given index.
* <p>
* If you want to set a precision for the comparison either use {@link #contains(float, Index, Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).contains(1.0f, atIndex(O))
* .contains(3.0f, atIndex(2))
* .usingComparatorWithPrecision(0.5f)
* .contains(3.1f, atIndex(2));
*
* // assertions will fail
* assertThat(values).contains(1.0f, atIndex(1));
* assertThat(values).contains(4.0f, atIndex(2));
* assertThat(values).usingComparatorWithPrecision(0.01f)
* .contains(3.1f, atIndex(2));</code></pre>
*
* @param value the value to look for.
* @param index the index where the value should be stored in the actual array.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null} or empty.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of
* the actual array.
* @throws AssertionError if the actual array does not contain the given value at the given index.
*/
public SELF contains(float value, Index index) {
arrays.assertContains(info, actual, value, index);
return myself;
}
/**
* Verifies that the actual array contains the given value at the given index.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).contains(1.0f, atIndex(O), withPrecision(0.01f))
* .contains(3.3f, atIndex(2), withPrecision(0.5f));
*
* // assertions will fail
* assertThat(values).contains(1.0f, atIndex(1), withPrecision(0.2f));
* assertThat(values).contains(4.5f, atIndex(2), withPrecision(0.1f));</code></pre>
*
* @param value the value to look for.
* @param index the index where the value should be stored in the actual array.
* @param precision the precision which the value may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null} or empty.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws IndexOutOfBoundsException if the value of the given {@code Index} is equal to or greater than the size of
* the actual array.
* @throws AssertionError if the actual array does not contain the given value at the given index.
*/
public SELF contains(float value, Index index, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).contains(value, index);
}
/**
* Verifies that the actual array does not contain the given values.
* <p>
* If you want to set a precision for the comparison either use {@link #doesNotContain(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).doesNotContain(4.0f, 8.0f)
* .usingComparatorWithPrecision(0.0001f)
* .doesNotContain(1.01f, 2.01f);
*
* // assertions will fail
* assertThat(values).doesNotContain(1.0f, 4.0f, 8.0f);
* assertThat(values).usingComparatorWithPrecision(0.1f)
* .doesNotContain(1.001f, 2.001f);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains any of the given values.
*/
public SELF doesNotContain(float... values) {
arrays.assertDoesNotContain(info, actual, values);
return myself;
}
/**
* Verifies that the actual array does not contain the values of the given array.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f }).doesNotContain(new Float[] { 3.0f });
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).doesNotContain(new Float[] { 1.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains any of the given values.
* @since 3.19.0
*/
public SELF doesNotContain(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertDoesNotContain(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Verifies that the actual array does not contain the given values.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).doesNotContain(new float[] {4.0f, 8.0f}, withPrecision(0.5f));
*
* // assertion will fail
* assertThat(values).doesNotContain(new float[] {1.05f, 4.0f, 8.0f}, withPrecision(0.1f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains any of the given values.
*/
public SELF doesNotContain(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).doesNotContain(values);
}
/**
* Verifies that the actual array does not contain the values of the given array.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).doesNotContain(new Float[] { 4.0f, 8.0f }, withPrecision(0.5f));
*
* // assertion will fail
* assertThat(values).doesNotContain(new Float[] { 1.05f, 4.0f, 8.0f }, withPrecision(0.1f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws IllegalArgumentException if the given argument is an empty array.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains any of the given values.
* @since 3.19.0
*/
public SELF doesNotContain(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).doesNotContain(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual array does not contain the given value at the given index.
* <p>
* If you want to set a precision for the comparison either use {@link #doesNotContain(float, Index, Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).doesNotContain(1.0f, atIndex(1))
* .doesNotContain(2.0f, atIndex(0))
* .usingComparatorWithPrecision(0.001)
* .doesNotContain(1.1f, atIndex(0));
*
* // assertions will fail
* assertThat(values).doesNotContain(1.0f, atIndex(0));
* assertThat(values).usingComparatorWithPrecision(0.1)
* .doesNotContain(1.001f, atIndex(0));</code></pre>
*
* @param value the value to look for.
* @param index the index where the value should be stored in the actual array.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws AssertionError if the actual array contains the given value at the given index.
*/
public SELF doesNotContain(float value, Index index) {
arrays.assertDoesNotContain(info, actual, value, index);
return myself;
}
/**
* Verifies that the actual array does not contain the given value at the given index.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertions will pass
* assertThat(values).doesNotContain(1.01f, atIndex(1), withPrecision(0.0001f))
* .doesNotContain(2.05f, atIndex(0), withPrecision(0.1f));
*
* // assertion will fail
* assertThat(values).doesNotContain(1.01f, atIndex(0), withPrecision(0.1f));</code></pre>
*
* @param value the value to look for.
* @param index the index where the value should be stored in the actual array.
* @param precision the precision under which the value may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws NullPointerException if the given {@code Index} is {@code null}.
* @throws AssertionError if the actual array contains the given value at the given index.
*/
public SELF doesNotContain(float value, Index index, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).doesNotContain(value, index);
}
/**
* Verifies that the actual array does not contain duplicates.
* <p>
* If you want to set a precision for the comparison either use {@link #doesNotHaveDuplicates(Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).doesNotHaveDuplicates();
* assertThat(new float[] { 1.0f, 1.1f }).usingComparatorWithPrecision(0.01f)
* .doesNotHaveDuplicates();
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 1.0f, 2.0f, 3.0f }).doesNotHaveDuplicates();
* assertThat(new float[] { 1.0f, 1.1f }).usingComparatorWithPrecision(0.5f)
* .doesNotHaveDuplicates();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains duplicates.
*/
public SELF doesNotHaveDuplicates() {
arrays.assertDoesNotHaveDuplicates(info, actual);
return myself;
}
/**
* Verifies that the actual array does not contain duplicates.
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] {1.0f, 2.0f, 3.0f}).doesNotHaveDuplicates(withPrecision(0.1f));
* assertThat(new float[] {1.1f, 1.2f, 1.3f}).doesNotHaveDuplicates(withPrecision(0.05f));
*
* // assertion will fail
* assertThat(new float[] {1.0f, 1.01f, 2.0f}).doesNotHaveDuplicates(withPrecision(0.1f));</code></pre>
*
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array contains duplicates.
*/
public SELF doesNotHaveDuplicates(Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).doesNotHaveDuplicates();
}
/**
* Verifies that the actual array starts with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the first element in the
* sequence is also first element of the actual array.
* <p>
* If you want to set a precision for the comparison either use {@link #startsWith(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).startsWith(1.0f, 2.0f)
* .usingComparatorWithPrecision(0.5f)
* .startsWith(1.1f, 2.1f);
*
* // assertion will fail
* assertThat(values).startsWith(2.0f, 3.0f);</code></pre>
*
* @param sequence the sequence of values to look for.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not start with the given sequence.
*/
public SELF startsWith(float... sequence) {
arrays.assertStartsWith(info, actual, sequence);
return myself;
}
/**
* Verifies that the actual array starts with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(Float[])}</code>, but it also verifies that the first element in the
* sequence is also first element of the actual array.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 4.0f }).startsWith(new Float[] { 1.0f, 2.0f });
*
* // assertion will fail
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 4.0f }).startsWith(new Float[] { 2.0f, 3.0f, 4.0f });</code></pre>
*
* @param sequence the sequence of values to look for.
* @return myself assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not start with the given sequence.
* @since 3.19.0
*/
public SELF startsWith(Float[] sequence) {
requireNonNullParameter(sequence, "sequence");
arrays.assertStartsWith(info, actual, toPrimitiveFloatArray(sequence));
return myself;
}
/**
* Verifies that the actual array starts with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the first element in the
* sequence is also first element of the actual array.
* <p>
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).startsWith(new float[] {1.01f, 2.01f}, withPrecision(0.1f));
*
* // assertions will fail
* assertThat(values).startsWith(new float[] {2.0f, 1.0f}, withPrecision(0.1f))
* assertThat(values).startsWith(new float[] {1.1f, 2.1f}, withPrecision(0.5f))</code></pre>
*
* @param values the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
*/
public SELF startsWith(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).startsWith(values);
}
/**
* Verifies that the actual array starts with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the first element in the
* sequence is also first element of the actual array.
* <p>
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).startsWith(new Float[] { 1.01f, 2.01f }, withPrecision(0.1f));
*
* // assertions will fail
* assertThat(values).startsWith(new Float[] { 2.0f, 1.0f }, withPrecision(0.1f))
* assertThat(values).startsWith(new Float[] { 1.1f, 2.1f }, withPrecision(0.5f))</code></pre>
*
* @param values the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
* @since 3.19.0
*/
public SELF startsWith(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).startsWith(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual array ends with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the last element in the
* sequence is also last element of the actual array.
* <p>
* If you want to set a precision for the comparison either use {@link #endsWith(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).endsWith(2.0f, 3.0f)
* .usingComparatorWithPrecision(0.5f)
* .endsWith(2.1f, 3.1f);
*
* // assertion will fail
* assertThat(values).endsWith(1.0f, 3.0f);</code></pre>
*
* @param sequence the sequence of values to look for.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
*/
public SELF endsWith(float... sequence) {
arrays.assertEndsWith(info, actual, sequence);
return myself;
}
/**
* Verifies that the actual array ends with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(Float[])}</code>, but it also verifies that the last element in the
* sequence is also last element of the actual array.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new boolean[] { 1.0f, 2.0f, 3.0f, 4.0f }).endsWith(new Float[] { 3.0f, 4.0f });
*
* // assertion will fail
* assertThat(new boolean[] { 1.0f, 2.0f, 3.0f, 4.0f }).endsWith(new Float[] { 2.0f, 3.0f });</code></pre>
*
* @param sequence the sequence of values to look for.
* @return myself assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
* @since 3.19.0
*/
public SELF endsWith(Float[] sequence) {
requireNonNullParameter(sequence, "sequence");
arrays.assertEndsWith(info, actual, toPrimitiveFloatArray(sequence));
return myself;
}
/**
* Verifies that the actual array ends with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the last element in the
* sequence is also last element of the actual array.
* <p>
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).endsWith(new float[] {2.01f, 3.01f}, withPrecision(0.1f));
*
* // assertions will fail
* assertThat(values).endsWith(new float[] {3.0f, 2.0f}, withPrecision(0.1f))
* assertThat(values).endsWith(new float[] {2.1f, 3.1f}, withPrecision(0.5f))</code></pre>
*
* @param values the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
*/
public SELF endsWith(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).endsWith(values);
}
/**
* Verifies that the actual array ends with the given sequence of values, without any other values between them.
* Similar to <code>{@link #containsSequence(float...)}</code>, but it also verifies that the last element in the
* sequence is also last element of the actual array.
* <p>
* The comparison is done at the given precision/offset set with {@link Assertions#withPrecision(Float)}.
* <p>
* Example:
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).endsWith(new Float[] { 2.01f, 3.01f }, withPrecision(0.1f));
*
* // assertions will fail
* assertThat(values).endsWith(new Float[] { 3.0f, 2.0f }, withPrecision(0.1f))
* assertThat(values).endsWith(new Float[] { 2.1f, 3.1f }, withPrecision(0.5f))</code></pre>
*
* @param values the sequence of values to look for.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual array is {@code null}.
* @throws AssertionError if the actual array does not end with the given sequence.
* @since 3.19.0
*/
public SELF endsWith(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).endsWith(toPrimitiveFloatArray(values));
}
/** {@inheritDoc} */
@Override
public SELF isSorted() {
arrays.assertIsSorted(info, actual);
return myself;
}
/** {@inheritDoc} */
@Override
public SELF isSortedAccordingTo(Comparator<? super Float> comparator) {
arrays.assertIsSortedAccordingToComparator(info, actual, comparator);
return myself;
}
/** {@inheritDoc} */
@Override
@CheckReturnValue
public SELF usingElementComparator(Comparator<? super Float> customComparator) {
this.arrays = new FloatArrays(new ComparatorBasedComparisonStrategy(customComparator));
return myself;
}
/** {@inheritDoc} */
@Override
@CheckReturnValue
public SELF usingDefaultElementComparator() {
this.arrays = FloatArrays.instance();
return myself;
}
/**
* Verifies that the actual group contains only the given values and nothing else, <b>in order</b>.
* <p>
* If you want to set a precision for the comparison either use {@link #containsExactly(float[], Offset)}
* or {@link #usingComparatorWithPrecision(Float)} before calling the assertion.
* <p>
* Example :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).containsExactly(1.0f, 2.0f, 3.0f)
* .usingComparatorWithPrecision(0.2f)
* .containsExactly(1.1f, 2.1f, 2.9f);
*
* // assertion will fail as actual and expected order differ
* assertThat(values).containsExactly(2.0f, 1.0f, 3.0f);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group
* contains some or none of the given values, or the actual group contains more values than the given ones
* or values are the same but the order is not.
*/
public SELF containsExactly(float... values) {
arrays.assertContainsExactly(info, actual, values);
return myself;
}
/**
* Verifies that the actual group contains only the values of the given array and nothing else, <b>in order</b>.
* <p>
* Example :
* <pre><code class='java'> // assertion will pass
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 4.0f }).containsExactly(new Float[] { 1.0f, 2.0f, 3.0f, 4.0f });
*
* // assertion will fail as actual and expected order differ
* assertThat(new float[] { 1.0f, 2.0f, 3.0f, 4.0f }).containsExactly(new Float[] { 1.0f, 5.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group
* contains some or none of the given values, or the actual group contains more values than the given ones
* or values are the same but the order is not.
* @since 3.19.0
*/
public SELF containsExactly(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContainsExactly(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Verifies that the actual group contains only the given values and nothing else, <b>in order</b>.
* The values may vary with a specified precision.
* <p>
* Example :
* <pre><code class='java'> float[] values = new float[] {1.0f, 2.0f, 3.0f};
*
* // assertion will pass
* assertThat(values).containsExactly(new float[] {1.0f, 1.98f, 3.01f}, withPrecision(0.05f));
*
* // assertion fails because |1.0 - 1.1| > 0.05 (precision)
* assertThat(values).containsExactly(new float[] {1.1f, 2.0f, 3.01f}, withPrecision(0.05f));
*
* // assertion will fail as actual and expected order differ
* assertThat(values).containsExactly(new float[] {1.98f, 1.0f, 3.01f}, withPrecision(0.05f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values within the specified precision
* with same order, i.e. the actual group contains some or none of the given values, or the actual group contains
* more values than the given ones or values are the same but the order is not.
*/
public SELF containsExactly(float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsExactly(values);
}
/**
* Verifies that the actual group contains only the values of the given array and nothing else, <b>in order</b>.
* The values may vary with a specified precision.
* <p>
* Example :
* <pre><code class='java'> float[] values = new float[] { 1.0f, 2.0f, 3.0f };
*
* // assertion will pass
* assertThat(values).containsExactly(new Float[] { 1.0f, 1.98f, 3.01f }, withPrecision(0.05f));
*
* // assertion fails because |1.0 - 1.1| > 0.05 (precision)
* assertThat(values).containsExactly(new Float[] { 1.1f, 2.0f, 3.01f }, withPrecision(0.05f));
*
* // assertion will fail as actual and expected order differ
* assertThat(values).containsExactly(new Float[] { 1.98f, 1.0f, 3.01f }, withPrecision(0.05f));</code></pre>
*
* @param values the given values.
* @param precision the precision under which the values may vary.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values within the specified precision
* with same order, i.e. the actual group contains some or none of the given values, or the actual group contains
* more values than the given ones or values are the same but the order is not.
* @since 3.19.0
*/
public SELF containsExactly(Float[] values, Offset<Float> precision) {
return usingComparatorWithPrecision(precision.value).containsExactly(toPrimitiveFloatArray(values));
}
/**
* Verifies that the actual group contains exactly the given values and nothing else, <b>in any order</b>.<br>
* <p>
* Example :
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] { 1.0F, 2.0F }).containsExactlyInAnyOrder(1.0F, 2.0F);
* assertThat(new float[] { 1.0F, 2.0F, 1.0F }).containsExactlyInAnyOrder(1.0F, 1.0F, 2.0F);
*
* // assertions will fail
* assertThat(new float[] { 1.0F, 2.0F }).containsExactlyInAnyOrder(1.0F);
* assertThat(new float[] { 1.0F }).containsExactlyInAnyOrder(1.0F, 2.0F);
* assertThat(new float[] { 1.0F, 2.0F, 1.0F }).containsExactlyInAnyOrder(1.0F, 2.0F);</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group
* contains some or none of the given values, or the actual group contains more values than the given ones.
* @since 2.6.0 / 3.6.0
*/
public SELF containsExactlyInAnyOrder(float... values) {
arrays.assertContainsExactlyInAnyOrder(info, actual, values);
return myself;
}
/**
* Verifies that the actual group contains exactly the values of the given array and nothing else, <b>in any order</b>.<br>
* <p>
* Example :
* <pre><code class='java'> // assertions will pass
* assertThat(new float[] { 1.0f, 2.0f }).containsExactlyInAnyOrder(new Float[] { 2.0f, 1.0f });
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsExactlyInAnyOrder(new Float[] { 3.0f, 1.0f, 2.0f });
*
* // assertions will fail
* assertThat(new float[] { 1.0f, 2.0f }).containsExactlyInAnyOrder(new Float[] { 1.0f });
* assertThat(new float[] { 1.0f}).containsExactlyInAnyOrder(new Float[] { 2.0f, 1.0f });
* assertThat(new float[] { 1.0f, 2.0f, 3.0f }).containsExactlyInAnyOrder(new Float[] { 2.0f, 1.0f });</code></pre>
*
* @param values the given values.
* @return {@code this} assertion object.
* @throws NullPointerException if the given argument is {@code null}.
* @throws AssertionError if the actual group is {@code null}.
* @throws AssertionError if the given argument is an empty array and the actual array is not empty.
* @throws AssertionError if the actual group does not contain the given values, i.e. the actual group
* contains some or none of the given values, or the actual group contains more values than the given ones.
* @since 3.19.0
*/
public SELF containsExactlyInAnyOrder(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContainsExactlyInAnyOrder(info, actual, toPrimitiveFloatArray(values));
return myself;
}
/**
* Create a {@link Float} comparator which compares floats at the given precision and pass it to {@link #usingElementComparator(Comparator)}.
* All the following assertions will use this comparator to compare float[] elements.
*
* @param precision precision used to compare {@link Float}.
* @return {@code this} assertion object.
*/
@CheckReturnValue
public SELF usingComparatorWithPrecision(Float precision) {
return usingElementComparator(ComparatorFactory.INSTANCE.floatComparatorWithPrecision(precision));
}
/**
* Verifies that the actual array contains at least one of the given values.
* <p>
* Example :
* <pre><code class='java'> float[] oneTwoThree = { 1.0f, 2.0f, 3.0f };
*
* // assertions will pass
* assertThat(oneTwoThree).containsAnyOf(2.0f)
* .containsAnyOf(2.0f, 3.0f)
* .containsAnyOf(1.0f, 2.0f, 3.0f)
* .containsAnyOf(1.0f, 2.0f, 3.0f, 4.0f)
* .containsAnyOf(5.0f, 6.0f, 7.0f, 2.0f);
*
* // assertions will fail
* assertThat(oneTwoThree).containsAnyOf(4.0f);
* assertThat(oneTwoThree).containsAnyOf(4.0f, 5.0f, 6.0f, 7.0f);</code></pre>
*
* @param values the values whose at least one which is expected to be in the array under test.
* @return {@code this} assertion object.
* @throws NullPointerException if the array of values is {@code null}.
* @throws AssertionError if the array of values is empty and the array under test is not empty.
* @throws AssertionError if the array under test is {@code null}.
* @throws AssertionError if the array under test does not contain any of the given {@code values}.
* @since 2.9.0 / 3.9.0
*/
public SELF containsAnyOf(float... values) {
arrays.assertContainsAnyOf(info, actual, values);
return myself;
}
/**
* Verifies that the actual array contains at least one of the values of the given array.
* <p>
* Example :
* <pre><code class='java'> float[] soFloats = { 1.0f, 2.0f, 3.0f };
*
* // assertions will pass
* assertThat(soFloats).containsAnyOf(new Float[] { 1.0f })
* .containsAnyOf(new Float[] { 3.0f, 4.0f, 5.0f, 6.0f });
*
* // assertions will fail
* assertThat(soFloats).containsAnyOf(new Float[] { 8.0f });
* assertThat(soFloats).containsAnyOf(new Float[] { 11.0f, 15.0f, 420.0f });</code></pre>
*
* @param values the values whose at least one which is expected to be in the array under test.
* @return {@code this} assertion object.
* @throws NullPointerException if the array of values is {@code null}.
* @throws AssertionError if the array of values is empty and the array under test is not empty.
* @throws AssertionError if the array under test is {@code null}.
* @throws AssertionError if the array under test does not contain any of the given {@code values}.
* @since 3.19.0
*/
public SELF containsAnyOf(Float[] values) {
requireNonNullParameter(values, "values");
arrays.assertContainsAnyOf(info, actual, toPrimitiveFloatArray(values));
return myself;
}
private static float[] toPrimitiveFloatArray(Float[] values) {
float[] floats = new float[values.length];
range(0, values.length).forEach(i -> floats[i] = values[i]);
return floats;
}
}
| AbstractFloatArrayAssert |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/AggregateSupport.java | {
"start": 1017,
"end": 7465
} | interface ____ {
/**
* Returns the custom read expression to use for {@code column}.
* Replaces the given {@code placeholder} in the given {@code template}
* by the custom read expression to use for {@code column}.
*
* @param template The custom read expression template of the column
* @param placeholder The placeholder to replace with the actual read expression
* @param aggregateParentReadExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the read expression
* @param aggregateColumn The type information for the aggregate column
* @param column The column within the aggregate type, for which to return the read expression
*/
default String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
final int sqlTypeCode = aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode();
return aggregateComponentCustomReadExpression(
template,
placeholder,
aggregateParentReadExpression,
columnExpression,
// We need to know what array this is STRUCT_ARRAY/JSON_ARRAY/XML_ARRAY,
// which we can easily get from the type code of the aggregate column
sqlTypeCode == SqlTypes.ARRAY ? aggregateColumn.getTypeCode() : sqlTypeCode,
new SqlTypedMappingImpl(
column.getTypeName(),
column.getLength(),
column.getArrayLength(),
column.getPrecision(),
column.getScale(),
column.getTemporalPrecision(),
column.getType()
),
aggregateColumn.getComponent().getMetadata().getTypeConfiguration()
);
}
/**
* Returns the custom read expression to use for {@code column}.
* Replaces the given {@code placeholder} in the given {@code template}
* by the custom read expression to use for {@code column}.
*
* @param template The custom read expression template of the column
* @param placeholder The placeholder to replace with the actual read expression
* @param aggregateParentReadExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the read expression
* @param aggregateColumnTypeCode The SQL type code of the aggregate column
* @param column The column within the aggregate type, for which to return the read expression
* @param typeConfiguration The type configuration
* @since 7.0
*/
String aggregateComponentCustomReadExpression(
String template,
String placeholder,
String aggregateParentReadExpression,
String columnExpression,
int aggregateColumnTypeCode,
SqlTypedMapping column,
TypeConfiguration typeConfiguration);
/**
* Returns the assignment expression to use for {@code column},
* which is part of the aggregate type of {@code aggregatePath}.
*
* @param aggregateParentAssignmentExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the assignment expression
* @param aggregateColumn The type information for the aggregate column
* @param column The column within the aggregate type, for which to return the assignment expression
*/
default String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String columnExpression,
AggregateColumn aggregateColumn,
Column column) {
final int sqlTypeCode = aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode();
return aggregateComponentAssignmentExpression(
aggregateParentAssignmentExpression,
columnExpression,
// We need to know what array this is STRUCT_ARRAY/JSON_ARRAY/XML_ARRAY,
// which we can easily get from the type code of the aggregate column
sqlTypeCode == SqlTypes.ARRAY ? aggregateColumn.getTypeCode() : sqlTypeCode,
column
);
}
/**
* Returns the assignment expression to use for {@code column},
* which is part of the aggregate type of {@code aggregatePath}.
*
* @param aggregateParentAssignmentExpression The expression to the aggregate column, which contains the column
* @param columnExpression The column within the aggregate type, for which to return the assignment expression
* @param aggregateColumnTypeCode The SQL type code of the aggregate column
* @param column The column within the aggregate type, for which to return the assignment expression
*
* @since 7.0
*/
String aggregateComponentAssignmentExpression(
String aggregateParentAssignmentExpression,
String columnExpression,
int aggregateColumnTypeCode,
Column column);
/**
* Returns the custom write expression to use for an aggregate column
* of the given column type, containing the given aggregated columns.
*
* @param aggregateColumn The type information for the aggregate column
* @param aggregatedColumns The columns of the aggregate type
*/
String aggregateCustomWriteExpression(AggregateColumn aggregateColumn, List<Column> aggregatedColumns);
/**
* Whether {@link #aggregateCustomWriteExpressionRenderer(SelectableMapping, SelectableMapping[], TypeConfiguration)} is needed
* when assigning an expression to individual aggregated columns in an update statement.
*/
boolean requiresAggregateCustomWriteExpressionRenderer(int aggregateSqlTypeCode);
/**
* Whether to prefer selecting the aggregate column as a whole instead of individual parts.
*/
boolean preferSelectAggregateMapping(int aggregateSqlTypeCode);
/**
* Whether to prefer binding the aggregate column as a whole instead of individual parts.
*/
boolean preferBindAggregateMapping(int aggregateSqlTypeCode);
/**
* @param aggregateColumn The mapping of the aggregate column
* @param columnsToUpdate The mappings of the columns that should be updated
* @param typeConfiguration The type configuration
*/
WriteExpressionRenderer aggregateCustomWriteExpressionRenderer(
SelectableMapping aggregateColumn,
SelectableMapping[] columnsToUpdate,
TypeConfiguration typeConfiguration);
/**
* Contract for rendering the custom write expression that updates a selected set of aggregated columns
* within an aggregate column to the value expressions as given by the {@code aggregateColumnWriteExpression}.
*/
| AggregateSupport |
java | dropwizard__dropwizard | dropwizard-health/src/main/java/io/dropwizard/health/response/ServletHealthResponder.java | {
"start": 472,
"end": 2346
} | class ____ extends HttpServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(ServletHealthResponder.class);
private final HealthResponseProvider healthResponseProvider;
private final boolean cacheControlEnabled;
private final String cacheControlValue;
public ServletHealthResponder(final HealthResponseProvider healthResponseProvider,
final boolean cacheControlEnabled, final String cacheControlValue) {
this.healthResponseProvider = requireNonNull(healthResponseProvider);
this.cacheControlEnabled = cacheControlEnabled;
this.cacheControlValue = requireNonNull(cacheControlValue);
}
@Override
protected void doGet(final HttpServletRequest request, final HttpServletResponse response) throws IOException {
if (cacheControlEnabled) {
response.setHeader(HttpHeaders.CACHE_CONTROL, cacheControlValue);
}
final Map<String, Collection<String>> queryParameters = request.getParameterMap()
.entrySet()
.stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entry -> Arrays.asList(entry.getValue())
));
final HealthResponse healthResponse = healthResponseProvider.healthResponse(queryParameters);
response.setContentType(healthResponse.getContentType());
try {
response.getWriter()
.write(healthResponse.getMessage());
response.setStatus(healthResponse.getStatus());
} catch (IOException ioException) {
LOGGER.error("Failed to write response", ioException);
if (!response.isCommitted()) {
response.reset();
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
}
}
}
| ServletHealthResponder |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DeadLetterChannelOnExceptionOnRedeliveryTest.java | {
"start": 5326,
"end": 5651
} | class ____ implements Processor {
@Override
public void process(Exchange exchange) throws Exception {
// force some error so Camel will do redelivery
if (++counter <= 3) {
throw new IOException("Cannot connect");
}
}
}
}
| ThrowIOExceptionProcessor |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_3300/Issue3343.java | {
"start": 126,
"end": 768
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
assertFalse(
JSONValidator.from("{\"name\":\"999}")
.validate());
assertTrue(
JSONValidator.from("false")
.validate());
assertEquals(JSONValidator.Type.Value,
JSONValidator.from("false")
.getType());
assertTrue(
JSONValidator.from("999").validate());
assertEquals(JSONValidator.Type.Value,
JSONValidator.from("999")
.getType());
}
}
| Issue3343 |
java | google__guava | android/guava-testlib/test/com/google/common/collect/testing/MinimalCollectionTest.java | {
"start": 1008,
"end": 1742
} | class ____ extends TestCase {
public static Test suite() {
return CollectionTestSuiteBuilder.using(
new TestStringCollectionGenerator() {
@Override
public Collection<String> create(String[] elements) {
// TODO: MinimalCollection should perhaps throw
for (Object element : elements) {
if (element == null) {
throw new NullPointerException();
}
}
return MinimalCollection.of(elements);
}
})
.named("MinimalCollection")
.withFeatures(CollectionFeature.NONE, CollectionSize.ANY)
.createTestSuite();
}
}
| MinimalCollectionTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/text/FormatFactory.java | {
"start": 1251,
"end": 1757
} | interface ____ {
/**
* Create or retrieve a format instance.
*
* @param name The format type name.
* @param arguments Arguments used to create the format instance. This allows the {@link FormatFactory} to implement the "format style" concept from
* {@link java.text.MessageFormat}.
* @param locale The locale, may be null.
* @return The format instance.
*/
Format getFormat(String name, String arguments, Locale locale);
}
| FormatFactory |
java | google__guava | android/guava/src/com/google/common/collect/Iterables.java | {
"start": 2056,
"end": 2623
} | class ____ not being deprecated, but we gently encourage you to migrate to
* streams.
*
* <p><i>Performance notes:</i> Unless otherwise noted, all of the iterables produced in this class
* are <i>lazy</i>, which means that their iterators only advance the backing iteration when
* absolutely necessary.
*
* <p>See the Guava User Guide article on <a href=
* "https://github.com/google/guava/wiki/CollectionUtilitiesExplained#iterables">{@code
* Iterables}</a>.
*
* @author Kevin Bourrillion
* @author Jared Levy
* @since 2.0
*/
@GwtCompatible
public final | is |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAlterDatabaseItem.java | {
"start": 714,
"end": 767
} | interface ____ extends SQLObject {
}
| SQLAlterDatabaseItem |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/CancellableRunnable.java | {
"start": 2720,
"end": 2814
} | enum ____ {
AWAITING_EXECUTION,
EXECUTING,
TIMED_OUT
}
}
| RunnableState |
java | apache__camel | components/camel-aws/camel-aws-cloudtrail/src/main/java/org/apache/camel/component/aws/cloudtrail/client/impl/CloudtrailClientIAMProfileOptimizedImpl.java | {
"start": 1915,
"end": 4823
} | class ____ implements CloudtrailInternalClient {
private static final Logger LOG = LoggerFactory.getLogger(CloudtrailClientIAMProfileOptimizedImpl.class);
private CloudtrailConfiguration configuration;
/**
* Constructor that uses the config file.
*/
public CloudtrailClientIAMProfileOptimizedImpl(CloudtrailConfiguration configuration) {
LOG.trace("Creating an AWS Cloudtrail client for an ec2 instance with IAM temporary credentials (normal for ec2s).");
this.configuration = configuration;
}
/**
* Getting the Cloudtrail client that is used.
*
* @return Amazon Cloudtrail Client.
*/
@Override
public CloudTrailClient getCloudtrailClient() {
CloudTrailClient client = null;
CloudTrailClientBuilder clientBuilder = CloudTrailClient.builder();
ProxyConfiguration.Builder proxyConfig = null;
ApacheHttpClient.Builder httpClientBuilder = null;
if (ObjectHelper.isNotEmpty(configuration.getProxyHost()) && ObjectHelper.isNotEmpty(configuration.getProxyPort())) {
proxyConfig = ProxyConfiguration.builder();
URI proxyEndpoint = URI.create(configuration.getProxyProtocol() + "://" + configuration.getProxyHost() + ":"
+ configuration.getProxyPort());
proxyConfig.endpoint(proxyEndpoint);
httpClientBuilder = ApacheHttpClient.builder().proxyConfiguration(proxyConfig.build());
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder);
}
if (configuration.getProfileCredentialsName() != null) {
clientBuilder = clientBuilder.httpClientBuilder(httpClientBuilder)
.credentialsProvider(ProfileCredentialsProvider.create(configuration.getProfileCredentialsName()));
}
if (ObjectHelper.isNotEmpty(configuration.getRegion())) {
clientBuilder = clientBuilder.region(Region.of(configuration.getRegion()));
}
if (configuration.isOverrideEndpoint()) {
clientBuilder.endpointOverride(URI.create(configuration.getUriEndpointOverride()));
}
if (configuration.isTrustAllCertificates()) {
if (httpClientBuilder == null) {
httpClientBuilder = ApacheHttpClient.builder();
}
SdkHttpClient ahc = httpClientBuilder.buildWithDefaults(AttributeMap
.builder()
.put(
SdkHttpConfigurationOption.TRUST_ALL_CERTIFICATES,
Boolean.TRUE)
.build());
// set created http client to use instead of builder
clientBuilder.httpClient(ahc);
clientBuilder.httpClientBuilder(null);
}
client = clientBuilder.build();
return client;
}
}
| CloudtrailClientIAMProfileOptimizedImpl |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedMethodTest.java | {
"start": 14278,
"end": 14527
} | class ____ {
private Test() {}
}
""")
.doTest();
}
@Test
public void annotationProperty_assignedByname() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/DeclarationOrderIndependenceTests.java | {
"start": 3453,
"end": 3522
} | interface ____ {
void doSomething();
int getX();
}
| TopsyTurvyTarget |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/util/JandexUtilTest.java | {
"start": 10218,
"end": 10295
} | class ____ extends GenericArrayRepo<Integer> {
}
public static | ArrayRepo |
java | quarkusio__quarkus | integration-tests/jaxb/src/main/java/io/quarkus/it/jaxb/BookIBANField.java | {
"start": 296,
"end": 527
} | class ____ {
@XmlElement
private String IBAN;
public BookIBANField() {
}
public void setIBAN(String IBAN) {
this.IBAN = IBAN;
}
public String getIBAN() {
return IBAN;
}
}
| BookIBANField |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/geo/GeoSearchArgs.java | {
"start": 977,
"end": 1594
} | interface ____ {
/**
* Defines search from defined member
*
* @param member - object
* @return search conditions object
*/
static <V> ShapeGeoSearch from(V member) {
return new GeoSearchParams(member);
}
/**
* Defines search from defined longitude and latitude coordinates
*
* @param longitude - longitude of object
* @param latitude - latitude of object
* @return search conditions object
*/
static ShapeGeoSearch from(double longitude, double latitude) {
return new GeoSearchParams(longitude, latitude);
}
}
| GeoSearchArgs |
java | apache__dubbo | dubbo-registry/dubbo-registry-api/src/main/java/org/apache/dubbo/registry/client/metadata/ServiceInstanceNotificationCustomizer.java | {
"start": 994,
"end": 1106
} | interface ____ {
void customize(List<ServiceInstance> serviceInstance);
}
| ServiceInstanceNotificationCustomizer |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/mixed/ConcatMapXMainSubscriber.java | {
"start": 1323,
"end": 4398
} | class ____<T> extends AtomicInteger
implements FlowableSubscriber<T> {
private static final long serialVersionUID = -3214213361171757852L;
final AtomicThrowable errors;
final int prefetch;
final ErrorMode errorMode;
SimpleQueue<T> queue;
Subscription upstream;
volatile boolean done;
volatile boolean cancelled;
boolean syncFused;
public ConcatMapXMainSubscriber(int prefetch, ErrorMode errorMode) {
this.errorMode = errorMode;
this.errors = new AtomicThrowable();
this.prefetch = prefetch;
}
@Override
public final void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(upstream, s)) {
upstream = s;
if (s instanceof QueueSubscription) {
@SuppressWarnings("unchecked")
QueueSubscription<T> qs = (QueueSubscription<T>)s;
int mode = qs.requestFusion(QueueFuseable.ANY | QueueFuseable.BOUNDARY);
if (mode == QueueFuseable.SYNC) {
queue = qs;
syncFused = true;
done = true;
onSubscribeDownstream();
drain();
return;
}
else if (mode == QueueFuseable.ASYNC) {
queue = qs;
onSubscribeDownstream();
upstream.request(prefetch);
return;
}
}
queue = new SpscArrayQueue<>(prefetch);
onSubscribeDownstream();
upstream.request(prefetch);
}
}
@Override
public final void onNext(T t) {
// In async fusion mode, t is a drain indicator
if (t != null) {
if (!queue.offer(t)) {
upstream.cancel();
onError(new QueueOverflowException());
return;
}
}
drain();
}
@Override
public final void onError(Throwable t) {
if (errors.tryAddThrowableOrReport(t)) {
if (errorMode == ErrorMode.IMMEDIATE) {
disposeInner();
}
done = true;
drain();
}
}
@Override
public final void onComplete() {
done = true;
drain();
}
final void stop() {
cancelled = true;
upstream.cancel();
disposeInner();
errors.tryTerminateAndReport();
if (getAndIncrement() == 0) {
queue.clear();
clearValue();
}
}
/**
* Override this to clear values when the downstream disposes.
*/
void clearValue() {
}
/**
* Typically, this should be {@code downstream.onSubscribe(this);}.
*/
abstract void onSubscribeDownstream();
/**
* Typically, this should be {@code inner.dispose()}.
*/
abstract void disposeInner();
/**
* Implement the serialized inner subscribing and value emission here.
*/
abstract void drain();
}
| ConcatMapXMainSubscriber |
java | elastic__elasticsearch | x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementServiceTests.java | {
"start": 4123,
"end": 14468
} | class ____ implements AsyncTaskManagementService.AsyncOperation<TestRequest, TestResponse, TestTask> {
@Override
public TestTask createTask(
TestRequest request,
long id,
String type,
String action,
TaskId parentTaskId,
Map<String, String> headers,
Map<String, String> originHeaders,
AsyncExecutionId asyncExecutionId
) {
return new TestTask(
id,
type,
action,
request.getDescription(),
parentTaskId,
headers,
originHeaders,
asyncExecutionId,
TimeValue.timeValueDays(5)
);
}
@Override
public void execute(TestRequest request, TestTask task, ActionListener<TestResponse> listener) {
if (request.string.equals("die")) {
listener.onFailure(new IllegalArgumentException("test exception"));
} else {
listener.onResponse(new TestResponse("response for [" + request.string + "]", task.getExecutionId().getEncoded()));
}
}
@Override
public TestResponse initialResponse(TestTask task) {
return new TestResponse(null, task.getExecutionId().getEncoded());
}
@Override
public TestResponse readResponse(StreamInput inputStream) throws IOException {
return new TestResponse(inputStream);
}
}
public String index = "test-index";
@Before
public void setup() {
clusterService = getInstanceFromNode(ClusterService.class);
transportService = getInstanceFromNode(TransportService.class);
BigArrays bigArrays = getInstanceFromNode(BigArrays.class);
AsyncTaskIndexService<StoredAsyncResponse<TestResponse>> store = new AsyncTaskIndexService<>(
index,
clusterService,
transportService.getThreadPool().getThreadContext(),
client(),
"test",
in -> new StoredAsyncResponse<>(TestResponse::new, in),
writableRegistry(),
bigArrays
);
results = new AsyncResultsService<>(
store,
true,
TestTask.class,
(task, listener, timeout) -> addCompletionListener(transportService.getThreadPool(), task, listener, timeout),
transportService.getTaskManager(),
clusterService
);
}
/**
* Shutdown the executor so we don't leak threads into other test runs.
*/
@After
public void shutdownExec() {
executorService.shutdown();
}
private AsyncTaskManagementService<TestRequest, TestResponse, TestTask> createManagementService(
AsyncTaskManagementService.AsyncOperation<TestRequest, TestResponse, TestTask> operation
) {
BigArrays bigArrays = getInstanceFromNode(BigArrays.class);
return new AsyncTaskManagementService<>(
index,
client(),
"test_origin",
writableRegistry(),
transportService.getTaskManager(),
"test_action",
operation,
TestTask.class,
clusterService,
transportService.getThreadPool(),
bigArrays
);
}
public void testReturnBeforeTimeout() throws Exception {
AsyncTaskManagementService<TestRequest, TestResponse, TestTask> service = createManagementService(new TestOperation());
boolean success = randomBoolean();
boolean keepOnCompletion = randomBoolean();
CountDownLatch latch = new CountDownLatch(1);
TestRequest request = new TestRequest(success ? randomAlphaOfLength(10) : "die");
service.asyncExecute(
request,
TimeValue.timeValueMinutes(1),
TimeValue.timeValueMinutes(10),
keepOnCompletion,
ActionListener.wrap(r -> {
assertThat(success, equalTo(true));
assertThat(r.string, equalTo("response for [" + request.string + "]"));
assertThat(r.id, notNullValue());
latch.countDown();
}, e -> {
assertThat(success, equalTo(false));
assertThat(e.getMessage(), equalTo("test exception"));
latch.countDown();
})
);
assertThat(latch.await(10, TimeUnit.SECONDS), equalTo(true));
}
public void testReturnAfterTimeout() throws Exception {
CountDownLatch executionLatch = new CountDownLatch(1);
AsyncTaskManagementService<TestRequest, TestResponse, TestTask> service = createManagementService(new TestOperation() {
@Override
public void execute(TestRequest request, TestTask task, ActionListener<TestResponse> listener) {
executorService.submit(() -> {
try {
assertThat(executionLatch.await(10, TimeUnit.SECONDS), equalTo(true));
} catch (InterruptedException ex) {
fail("Shouldn't be here");
}
super.execute(request, task, listener);
});
}
});
boolean success = randomBoolean();
boolean keepOnCompletion = randomBoolean();
boolean timeoutOnFirstAttempt = randomBoolean();
boolean waitForCompletion = randomBoolean();
CountDownLatch latch = new CountDownLatch(1);
TestRequest request = new TestRequest(success ? randomAlphaOfLength(10) : "die");
AtomicReference<TestResponse> responseHolder = new AtomicReference<>();
service.asyncExecute(
request,
TimeValue.timeValueMillis(1),
TimeValue.timeValueMinutes(10),
keepOnCompletion,
ActionTestUtils.assertNoFailureListener(r -> {
assertThat(r.string, nullValue());
assertThat(r.id, notNullValue());
assertThat(responseHolder.getAndSet(r), nullValue());
latch.countDown();
})
);
assertThat(latch.await(20, TimeUnit.SECONDS), equalTo(true));
if (timeoutOnFirstAttempt) {
logger.trace("Getting an in-flight response");
// try getting results, but fail with timeout because it is not ready yet
StoredAsyncResponse<TestResponse> response = getResponse(responseHolder.get().id, TimeValue.timeValueMillis(2));
assertThat(response.getException(), nullValue());
assertThat(response.getResponse(), notNullValue());
assertThat(response.getResponse().id, equalTo(responseHolder.get().id));
assertThat(response.getResponse().string, nullValue());
}
if (waitForCompletion) {
// now we are waiting for the task to finish
logger.trace("Waiting for response to complete");
AtomicReference<StoredAsyncResponse<TestResponse>> responseRef = new AtomicReference<>();
CountDownLatch getResponseCountDown = getResponse(
responseHolder.get().id,
TimeValue.timeValueSeconds(5),
ActionTestUtils.assertNoFailureListener(responseRef::set)
);
executionLatch.countDown();
assertThat(getResponseCountDown.await(10, TimeUnit.SECONDS), equalTo(true));
StoredAsyncResponse<TestResponse> response = responseRef.get();
if (success) {
assertThat(response.getException(), nullValue());
assertThat(response.getResponse(), notNullValue());
assertThat(response.getResponse().id, equalTo(responseHolder.get().id));
assertThat(response.getResponse().string, equalTo("response for [" + request.string + "]"));
} else {
assertThat(response.getException(), notNullValue());
assertThat(response.getResponse(), nullValue());
assertThat(response.getException().getMessage(), equalTo("test exception"));
}
} else {
executionLatch.countDown();
}
// finally wait until the task disappears and get the response from the index
logger.trace("Wait for task to disappear ");
assertBusy(() -> {
Task task = transportService.getTaskManager().getTask(AsyncExecutionId.decode(responseHolder.get().id).getTaskId().getId());
assertThat(task, nullValue());
});
logger.trace("Getting the final response from the index");
StoredAsyncResponse<TestResponse> response = getResponse(responseHolder.get().id, TimeValue.ZERO);
if (success) {
assertThat(response.getException(), nullValue());
assertThat(response.getResponse(), notNullValue());
assertThat(response.getResponse().string, equalTo("response for [" + request.string + "]"));
} else {
assertThat(response.getException(), notNullValue());
assertThat(response.getResponse(), nullValue());
assertThat(response.getException().getMessage(), equalTo("test exception"));
}
}
private StoredAsyncResponse<TestResponse> getResponse(String id, TimeValue timeout) throws InterruptedException {
AtomicReference<StoredAsyncResponse<TestResponse>> response = new AtomicReference<>();
assertThat(
getResponse(id, timeout, ActionTestUtils.assertNoFailureListener(response::set)).await(10, TimeUnit.SECONDS),
equalTo(true)
);
return response.get();
}
private CountDownLatch getResponse(String id, TimeValue timeout, ActionListener<StoredAsyncResponse<TestResponse>> listener) {
CountDownLatch responseLatch = new CountDownLatch(1);
GetAsyncResultRequest getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeout);
results.retrieveResult(getResultsRequest, ActionListener.wrap(r -> {
listener.onResponse(r);
responseLatch.countDown();
}, e -> {
listener.onFailure(e);
responseLatch.countDown();
}));
return responseLatch;
}
}
| TestOperation |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryModuleBuilder.java | {
"start": 1997,
"end": 2664
} | class ____ implements Payment {
* {@literal @}Inject
* public RealPayment(
* CreditService creditService,
* AuthService authService,
* <strong>{@literal @}Assisted Date startDate</strong>,
* <strong>{@literal @}Assisted Money amount</strong>) {
* ...
* }
* }</pre>
*
* <h3>Multiple factory methods for the same type</h3>
*
* If the factory contains many methods that return the same type, you can create multiple
* constructors in your concrete class, each constructor marked with with {@literal @}{@link
* AssistedInject}, in order to match the different parameters types of the factory methods.
*
* <pre>public | RealPayment |
java | quarkusio__quarkus | extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/graal/VertxSubstitutions.java | {
"start": 2025,
"end": 2380
} | class ____ {
@Substitute
public static boolean isAvailable() {
return false;
}
@Substitute
public static boolean isAlpnAvailable() {
return false;
}
}
@SuppressWarnings("rawtypes")
@TargetClass(className = "io.vertx.core.eventbus.impl.clustered.ClusteredEventBus")
final | Target_io_vertx_core_net_OpenSSLEngineOptions |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/type/java/LocaleMappingTests.java | {
"start": 4274,
"end": 5677
} | class ____ {
private Integer id;
private Locale locale;
private String name;
private Set<Locale> locales = new HashSet<>();
private Map<Locale, Integer> countByLocale = new HashMap<>();
public LocaleMappingTestEntity() {
}
public LocaleMappingTestEntity(Integer id, Locale locale, String name) {
this.id = id;
this.locale = locale;
this.name = name;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Locale getLocale() {
return locale;
}
public void setLocale(Locale locale) {
this.locale = locale;
}
@ElementCollection
@CollectionTable(
name = "entity_locale",
joinColumns = @JoinColumn(name = "entity_id")
)
@Column(name = "locales")
public Set<Locale> getLocales() {
return locales;
}
public void setLocales(Set<Locale> locales) {
this.locales = locales;
}
@ElementCollection
@CollectionTable(name = "count_by_locale", joinColumns = @JoinColumn(name = "entity_id"))
@MapKeyColumn(name = "locl")
@Column(name = "cnt")
public Map<Locale, Integer> getCountByLocale() {
return countByLocale;
}
public void setCountByLocale(Map<Locale, Integer> countByLocale) {
this.countByLocale = countByLocale;
}
}
}
| LocaleMappingTestEntity |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/devservices/DevServicesConfig.java | {
"start": 963,
"end": 1257
} | class ____ implements BooleanSupplier {
final DevServicesConfig config;
public Enabled(DevServicesConfig config) {
this.config = config;
}
@Override
public boolean getAsBoolean() {
return config.enabled();
}
}
}
| Enabled |
java | processing__processing4 | java/src/processing/mode/java/debug/LineBreakpoint.java | {
"start": 6189,
"end": 6611
} | class ____
*/
protected String className() {
if (line.fileName().endsWith(".pde")) {
// pure pde tab
return line.fileName().substring(0, line.fileName().lastIndexOf(".pde"));
}
if (line.fileName().endsWith(".java")) {
// pure java tab
return line.fileName().substring(0, line.fileName().lastIndexOf(".java"));
}
return null;
}
/**
* Event handler called when a | name |
java | apache__logging-log4j2 | log4j-jakarta-smtp/src/test/java/org/apache/logging/log4j/smtp/SmtpManagerTest.java | {
"start": 1512,
"end": 4156
} | class ____ {
private void testAdd(final LogEvent event) {
final SmtpAppender appender = SmtpAppender.newBuilder()
.setName("smtp")
.setTo("to")
.setCc("cc")
.setBcc("bcc")
.setFrom("from")
.setReplyTo("replyTo")
.setSubject("subject")
.setSmtpProtocol("smtp")
.setSmtpHost("host")
.setSmtpPort(0)
.setSmtpUsername("username")
.setSmtpPassword("password")
.setSmtpDebug(false)
.setFilter(null)
.setBufferSize(10)
.build();
final MailManager mailManager = appender.getManager();
assertThat(mailManager).isInstanceOf(SmtpManager.class);
final SmtpManager smtpManager = (SmtpManager) mailManager;
smtpManager.removeAllBufferedEvents(); // in case this smtpManager is reused
smtpManager.add(event);
final LogEvent[] bufferedEvents = smtpManager.removeAllBufferedEvents();
assertThat(bufferedEvents).as("Buffered events").hasSize(1);
assertThat(bufferedEvents[0].getMessage()).as("Immutable message").isNotInstanceOf(ReusableMessage.class);
}
// LOG4J2-3172: make sure existing protections are not violated
@Test
void testAdd_WhereLog4jLogEventWithReusableMessage() {
final LogEvent event = new Log4jLogEvent.Builder()
.setMessage(getReusableMessage("test message"))
.build();
testAdd(event);
}
// LOG4J2-3172: make sure existing protections are not violated
@Test
void testAdd_WhereMutableLogEvent() {
final MutableLogEvent event = new MutableLogEvent(new StringBuilder("test message"), null);
testAdd(event);
}
// LOG4J2-3172
@Test
void testAdd_WhereRingBufferLogEvent() {
final RingBufferLogEvent event = new RingBufferLogEvent();
event.setValues(
null,
null,
null,
null,
null,
getReusableMessage("test message"),
null,
null,
null,
0,
null,
0,
null,
ClockFactory.getClock(),
new DummyNanoClock());
testAdd(event);
}
private ReusableMessage getReusableMessage(final String text) {
final ReusableSimpleMessage message = new ReusableSimpleMessage();
message.set(text);
return message;
}
}
| SmtpManagerTest |
java | dropwizard__dropwizard | dropwizard-logging/src/main/java/io/dropwizard/logging/common/AppenderFactory.java | {
"start": 606,
"end": 866
} | class ____ implements {@link AppenderFactory}.</li>
* <li>Annotate it with {@code @JsonTypeName} and give it a unique type name.</li>
* <li>add a {@code META-INF/services/io.dropwizard.logging.common.AppenderFactory} file with your
* implementation's full | which |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/contexts/dependent/DependentCreationalContextTest.java | {
"start": 845,
"end": 2811
} | class ____ {
@RegisterExtension
ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(NoPreDestroy.class, HasDestroy.class, HasDependency.class,
ProducerNoDisposer.class, ProducerWithDisposer.class, String.class, Boolean.class)
.beanRegistrars(new BeanRegistrar() {
@Override
public void register(RegistrationContext context) {
context.configure(SyntheticOne.class).addType(SyntheticOne.class).creator(SyntheticOne.class).done();
context.configure(SyntheticTwo.class).addType(SyntheticTwo.class).creator(SyntheticTwo.class)
.destroyer(SyntheticTwo.class).done();
}
})
.build();
@Test
public void testCreationalContextOptimization() {
InstanceImpl<Object> instance = (InstanceImpl<Object>) Arc.container().beanManager().createInstance();
assertBeanType(instance, NoPreDestroy.class, false);
assertBeanType(instance, HasDestroy.class, true);
assertBeanType(instance, HasDependency.class, true);
// ProducerNoDisposer
assertBeanType(instance, boolean.class, false);
// ProducerWithDisposer
assertBeanType(instance, String.class, true);
// Synthetic bean
assertBeanType(instance, SyntheticOne.class, false);
// Synthetic bean with destruction logic
assertBeanType(instance, SyntheticTwo.class, true);
}
<T> void assertBeanType(InstanceImpl<Object> instance, Class<T> beanType, boolean shouldBeStored) {
T bean = instance.select(beanType).get();
assertNotNull(bean);
if (shouldBeStored) {
assertTrue(instance.hasDependentInstances());
} else {
assertFalse(instance.hasDependentInstances());
}
instance.destroy(bean);
}
@Dependent
static | DependentCreationalContextTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/spi/TableSpecificationSource.java | {
"start": 252,
"end": 647
} | interface ____ {
/**
* Obtain the supplied schema name
*
* @return The schema name. If {@code null}, the binder will apply the default.
*/
String getExplicitSchemaName();
/**
* Obtain the supplied catalog name
*
* @return The catalog name. If {@code null}, the binder will apply the default.
*/
String getExplicitCatalogName();
String getComment();
}
| TableSpecificationSource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/MapKeyClassTest.java | {
"start": 912,
"end": 2012
} | class ____ {
@Test
public void testLifecycle(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
//tag::collections-map-key-class-persist-example[]
Person person = new Person();
person.setId(1L);
person.getCallRegister().put(new MobilePhone("01", "234", "567"), 101);
person.getCallRegister().put(new MobilePhone("01", "234", "789"), 102);
entityManager.persist(person);
//end::collections-map-key-class-persist-example[]
});
scope.inTransaction( entityManager -> {
//tag::collections-map-key-class-fetch-example[]
Person person = entityManager.find(Person.class, 1L);
assertEquals(2, person.getCallRegister().size());
assertEquals(
Integer.valueOf(101),
person.getCallRegister().get(MobilePhone.fromString("01-234-567"))
);
assertEquals(
Integer.valueOf(102),
person.getCallRegister().get(MobilePhone.fromString("01-234-789"))
);
//end::collections-map-key-class-fetch-example[]
});
}
//tag::collections-map-key-class-mapping-example[]
@Entity
@Table(name = "person")
public static | MapKeyClassTest |
java | resilience4j__resilience4j | resilience4j-core/src/main/java/io/github/resilience4j/core/registry/AbstractRegistry.java | {
"start": 6994,
"end": 8491
} | class ____ extends EventProcessor<RegistryEvent> implements
EventConsumer<RegistryEvent>, EventPublisher<E> {
private RegistryEventProcessor() {
}
private RegistryEventProcessor(List<RegistryEventConsumer<E>> registryEventConsumers) {
registryEventConsumers.forEach(consumer -> {
onEntryAdded(consumer::onEntryAddedEvent);
onEntryRemoved(consumer::onEntryRemovedEvent);
onEntryReplaced(consumer::onEntryReplacedEvent);
});
}
@Override
public EventPublisher<E> onEntryAdded(
EventConsumer<EntryAddedEvent<E>> onSuccessEventConsumer) {
registerConsumer(EntryAddedEvent.class.getName(), onSuccessEventConsumer);
return this;
}
@Override
public EventPublisher<E> onEntryRemoved(
EventConsumer<EntryRemovedEvent<E>> onErrorEventConsumer) {
registerConsumer(EntryRemovedEvent.class.getName(), onErrorEventConsumer);
return this;
}
@Override
public EventPublisher<E> onEntryReplaced(
EventConsumer<EntryReplacedEvent<E>> onStateTransitionEventConsumer) {
registerConsumer(EntryReplacedEvent.class.getName(), onStateTransitionEventConsumer);
return this;
}
@Override
public void consumeEvent(RegistryEvent event) {
super.processEvent(event);
}
}
}
| RegistryEventProcessor |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/test/RoutingInvocationHandler.java | {
"start": 142,
"end": 731
} | class ____ extends ConnectionDecoratingInvocationHandler {
private final InvocationHandler delegate;
public RoutingInvocationHandler(Object target, InvocationHandler delegate) {
super(target);
this.delegate = delegate;
}
@Override
protected Object handleInvocation(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getName().equals("getStatefulConnection")) {
return super.handleInvocation(proxy, method, args);
}
return delegate.invoke(proxy, method, args);
}
}
| RoutingInvocationHandler |
java | apache__camel | components/camel-freemarker/src/test/java/org/apache/camel/component/freemarker/FreemarkerFileLetterTest.java | {
"start": 905,
"end": 1331
} | class ____ extends FreemarkerLetterTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:a")
.to("freemarker:file:src/test/resources/org/apache/camel/component/freemarker/letter.ftl")
.to("mock:result");
}
};
}
}
| FreemarkerFileLetterTest |
java | dropwizard__dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/errors/EofExceptionWriterInterceptorJerseyTest.java | {
"start": 1693,
"end": 2102
} | class ____ {
@GET
public Response streamForever() {
final StreamingOutput output = os -> {
//noinspection InfiniteLoopStatement
while (true) {
os.write('a');
os.flush();
}
};
return Response.ok(output).build();
}
}
@Provider
public static | TestResource |
java | apache__flink | flink-metrics/flink-metrics-prometheus/src/main/java/org/apache/flink/metrics/prometheus/PrometheusPushGatewayReporterOptions.java | {
"start": 1485,
"end": 5576
} | class ____ {
public static final ConfigOption<String> HOST_URL =
ConfigOptions.key("hostUrl")
.stringType()
.noDefaultValue()
.withDescription(
"The PushGateway server host URL including scheme, host name, and port.");
public static final ConfigOption<String> JOB_NAME =
ConfigOptions.key("jobName")
.stringType()
.defaultValue("")
.withDescription("The job name under which metrics will be pushed");
public static final ConfigOption<Boolean> RANDOM_JOB_NAME_SUFFIX =
ConfigOptions.key("randomJobNameSuffix")
.booleanType()
.defaultValue(true)
.withDescription(
"Specifies whether a random suffix should be appended to the job name.");
public static final ConfigOption<Boolean> DELETE_ON_SHUTDOWN =
ConfigOptions.key("deleteOnShutdown")
.booleanType()
.defaultValue(true)
.withDescription(
Description.builder()
.text(
"Specifies whether to delete metrics from the PushGateway on shutdown."
+ " Flink will try its best to delete the metrics but this is not guaranteed. See %s for more details.",
LinkElement.link(
"https://issues.apache.org/jira/browse/FLINK-13787",
"here"))
.build());
public static final ConfigOption<Boolean> FILTER_LABEL_VALUE_CHARACTER =
ConfigOptions.key("filterLabelValueCharacters")
.booleanType()
.defaultValue(true)
.withDescription(
Description.builder()
.text(
"Specifies whether to filter label value characters."
+ " If enabled, all characters not matching [a-zA-Z0-9:_] will be removed,"
+ " otherwise no characters will be removed."
+ " Before disabling this option please ensure that your"
+ " label values meet the %s.",
LinkElement.link(
"https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels",
"Prometheus requirements"))
.build());
public static final ConfigOption<String> GROUPING_KEY =
ConfigOptions.key("groupingKey")
.stringType()
.defaultValue("")
.withDescription(
Description.builder()
.text(
"Specifies the grouping key which is the group and global labels of all metrics."
+ " The label name and value are separated by '=', and labels are separated by ';', e.g., %s."
+ " Please ensure that your grouping key meets the %s.",
TextElement.code("k1=v1;k2=v2"),
LinkElement.link(
"https://prometheus.io/docs/concepts/data_model/#metric-names-and-labels",
"Prometheus requirements"))
.build());
}
| PrometheusPushGatewayReporterOptions |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/client/TlsBasicConnectorRuntimeTlsConfigurationTest.java | {
"start": 1047,
"end": 2556
} | class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(jar -> jar
.addClasses(ServerEndpoint.class)
.addAsResource(new File("target/certs/ssl-test-keystore.jks"), "keystore.jks")
.addAsResource(new File("target/certs/ssl-test-truststore.jks"), "truststore.jks"))
.overrideConfigKey("quarkus.tls.key-store.jks.path", "keystore.jks")
.overrideConfigKey("quarkus.tls.key-store.jks.password", "secret");
@Inject
BasicWebSocketConnector connector;
@Inject
TlsConfigurationRegistry tlsRegistry;
@TestHTTPResource(value = "/end", tls = true)
URI uri;
@Test
void testClient() {
tlsRegistry.register("ws-client", new BaseTlsConfiguration() {
@Override
public TrustOptions getTrustStoreOptions() {
return new JksOptions().setPath("truststore.jks").setPassword("secret");
}
});
WebSocketClientConnection connection = connector
.tlsConfigurationName("ws-client")
.baseUri(uri)
.path("/{name}")
.pathParam("name", "Lu")
.connectAndAwait();
assertTrue(connection.isOpen());
assertTrue(connection.isSecure());
connection.closeAndAwait();
}
@WebSocket(path = "/end/{name}")
public static | TlsBasicConnectorRuntimeTlsConfigurationTest |
java | spring-projects__spring-framework | spring-beans/src/testFixtures/java/org/springframework/beans/testfixture/beans/factory/aot/CustomBean.java | {
"start": 786,
"end": 1086
} | class ____ {
private CustomPropertyValue customPropertyValue;
public CustomPropertyValue getCustomPropertyValue() {
return this.customPropertyValue;
}
public void setCustomPropertyValue(CustomPropertyValue customPropertyValue) {
this.customPropertyValue = customPropertyValue;
}
}
| CustomBean |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/NestedTestClassesTests.java | {
"start": 16726,
"end": 16823
} | class ____ extends AbstractSuperClass {
// empty on purpose
}
static | TestCaseWithInheritedNested |
java | processing__processing4 | app/src/processing/app/syntax/PdeTextAreaDefaults.java | {
"start": 1304,
"end": 3880
} | class ____ extends TextAreaDefaults {
public PdeTextAreaDefaults() {
document = new SyntaxDocument();
// Set to 0 for revision 0215 because it causes strange jumps
// https://github.com/processing/processing/issues/1093
electricScroll = 0;
caretVisible = true;
caretBlinks = Preferences.getBoolean("editor.caret.blink");
blockCaret = Preferences.getBoolean("editor.caret.block");
cols = 80;
// Set the number of rows lower to avoid layout badness with large fonts
// https://github.com/processing/processing/issues/1313
rows = 5;
styles = new SyntaxStyle[Token.ID_COUNT];
updateTheme();
}
/**
* Deprecated since 4.0 beta 5, because the Mode is no longer used;
* simply use the default constructor instead.
*/
@Deprecated
public PdeTextAreaDefaults(Mode ignoredMode) {
this();
}
protected void updateTheme() {
fgcolor = Theme.getColor("editor.fgcolor");
bgcolor = Theme.getColor("editor.bgcolor");
styles[Token.COMMENT1] = Theme.getStyle("comment1");
styles[Token.COMMENT2] = Theme.getStyle("comment2");
styles[Token.KEYWORD1] = Theme.getStyle("keyword1");
styles[Token.KEYWORD2] = Theme.getStyle("keyword2");
styles[Token.KEYWORD3] = Theme.getStyle("keyword3");
styles[Token.KEYWORD4] = Theme.getStyle("keyword4");
styles[Token.KEYWORD5] = Theme.getStyle("keyword5");
styles[Token.KEYWORD6] = Theme.getStyle("keyword6");
styles[Token.FUNCTION1] = Theme.getStyle("function1");
styles[Token.FUNCTION2] = Theme.getStyle("function2");
styles[Token.FUNCTION3] = Theme.getStyle("function3");
styles[Token.FUNCTION4] = Theme.getStyle("function4");
styles[Token.LITERAL1] = Theme.getStyle("literal1");
styles[Token.LITERAL2] = Theme.getStyle("literal2");
styles[Token.LABEL] = Theme.getStyle("label");
styles[Token.OPERATOR] = Theme.getStyle("operator");
// area that's not in use by the text (replaced with tildes)
styles[Token.INVALID] = Theme.getStyle("invalid");
caretColor = Theme.getColor("editor.caret.color");
selectionColor = Theme.getColor("editor.selection.color");
lineHighlight = Theme.getBoolean("editor.line.highlight");
lineHighlightColor = Theme.getColor("editor.line.highlight.color");
bracketHighlight = Theme.getBoolean("editor.bracket.highlight");
bracketHighlightColor = Theme.getColor("editor.bracket.highlight.color");
eolMarkers = Theme.getBoolean("editor.eol_markers");
eolMarkerColor = Theme.getColor("editor.eol_markers.color");
}
}
| PdeTextAreaDefaults |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/source/ConfigurationPropertySourcesTests.java | {
"start": 9003,
"end": 9642
} | class ____ extends MapPropertySource implements PropertySourceInfo {
private final boolean immutable;
TestPropertySource(int index, boolean immutable) {
super("test-" + index, createProperties(index));
this.immutable = immutable;
}
private static Map<String, Object> createProperties(int index) {
Map<String, Object> map = new LinkedHashMap<>();
for (int i = 0; i < 1000; i++) {
String name = "test-" + index + "-property-" + i;
String value = name + "-value";
map.put(name, value);
}
return map;
}
@Override
public boolean isImmutable() {
return this.immutable;
}
}
}
| TestPropertySource |
java | apache__camel | components/camel-platform-http-main/src/test/java/org/apache/camel/component/platform/http/main/authentication/AuthenticationConfigurationMainHttpServerTest.java | {
"start": 1506,
"end": 1863
} | class ____ extends RouteBuilder {
@Override
public void configure() throws Exception {
from("platform-http:/main-http-test")
.log("Received request with headers: ${headers}\nWith body: ${body}")
.setBody(simple("main-http-auth-basic-test-response"));
}
}
}
| PlatformHttpRouteBuilder |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/observers/DisposableSingleObserverTest.java | {
"start": 1051,
"end": 2921
} | class ____<T> extends DisposableSingleObserver<T> {
int start;
final List<T> values = new ArrayList<>();
final List<Throwable> errors = new ArrayList<>();
@Override
protected void onStart() {
super.onStart();
start++;
}
@Override
public void onSuccess(T value) {
values.add(value);
}
@Override
public void onError(Throwable e) {
errors.add(e);
}
}
@Test
public void normal() {
TestSingle<Integer> tc = new TestSingle<>();
assertFalse(tc.isDisposed());
assertEquals(0, tc.start);
assertTrue(tc.values.isEmpty());
assertTrue(tc.errors.isEmpty());
Single.just(1).subscribe(tc);
assertFalse(tc.isDisposed());
assertEquals(1, tc.start);
assertEquals(1, tc.values.get(0).intValue());
assertTrue(tc.errors.isEmpty());
}
@Test
public void startOnce() {
List<Throwable> error = TestHelper.trackPluginErrors();
try {
TestSingle<Integer> tc = new TestSingle<>();
tc.onSubscribe(Disposable.empty());
Disposable d = Disposable.empty();
tc.onSubscribe(d);
assertTrue(d.isDisposed());
assertEquals(1, tc.start);
TestHelper.assertError(error, 0, IllegalStateException.class, EndConsumerHelper.composeMessage(tc.getClass().getName()));
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestSingle<Integer> tc = new TestSingle<>();
tc.dispose();
assertTrue(tc.isDisposed());
Disposable d = Disposable.empty();
tc.onSubscribe(d);
assertTrue(d.isDisposed());
assertEquals(0, tc.start);
}
}
| TestSingle |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_3809/Issue3809Mapper.java | {
"start": 720,
"end": 950
} | class ____ {
private String param1;
public String getParam1() {
return param1;
}
public void setParam1(String param1) {
this.param1 = param1;
}
}
| NestedSource |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java | {
"start": 18248,
"end": 21159
} | class ____ extends RawLocalFileSystem {
int numListLocatedStatusCalls = 0;
@Override
public FileStatus[] listStatus(Path f) throws FileNotFoundException,
IOException {
if (f.toString().equals("test:/a1")) {
return new FileStatus[] {
new FileStatus(0, true, 1, 150, 150, new Path("test:/a1/a2")),
new FileStatus(10, false, 1, 150, 150, new Path("test:/a1/file1")) };
} else if (f.toString().equals("test:/a1/a2")) {
return new FileStatus[] {
new FileStatus(10, false, 1, 150, 150,
new Path("test:/a1/a2/file2")),
new FileStatus(10, false, 1, 151, 150,
new Path("test:/a1/a2/file3")) };
}
return new FileStatus[0];
}
@Override
public FileStatus[] globStatus(Path pathPattern, PathFilter filter)
throws IOException {
return new FileStatus[] { new FileStatus(10, true, 1, 150, 150,
pathPattern) };
}
@Override
public FileStatus[] listStatus(Path f, PathFilter filter)
throws FileNotFoundException, IOException {
return this.listStatus(f);
}
@Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len)
throws IOException {
DatanodeInfo[] ds = new DatanodeInfo[2];
ds[0] = new DatanodeDescriptor(
new DatanodeID("127.0.0.1", "localhost", "abcd",
9866, 9867, 9868, 9869));
ds[1] = new DatanodeDescriptor(
new DatanodeID("1.0.0.1", "otherhost", "efgh",
9866, 9867, 9868, 9869));
long blockLen = len / 3;
ExtendedBlock b1 = new ExtendedBlock("bpid", 0, blockLen, 0);
ExtendedBlock b2 = new ExtendedBlock("bpid", 1, blockLen, 1);
ExtendedBlock b3 = new ExtendedBlock("bpid", 2, len - 2 * blockLen, 2);
String[] names = new String[]{ "localhost:9866", "otherhost:9866" };
String[] hosts = new String[]{ "localhost", "otherhost" };
String[] cachedHosts = {"localhost"};
BlockLocation loc1 = new BlockLocation(names, hosts, cachedHosts,
new String[0], 0, blockLen, false);
BlockLocation loc2 = new BlockLocation(names, hosts, cachedHosts,
new String[0], blockLen, blockLen, false);
BlockLocation loc3 = new BlockLocation(names, hosts, cachedHosts,
new String[0], 2 * blockLen, len - 2 * blockLen, false);
return new BlockLocation[]{
new HdfsBlockLocation(loc1, new LocatedBlock(b1, ds)),
new HdfsBlockLocation(loc2, new LocatedBlock(b2, ds)),
new HdfsBlockLocation(loc3, new LocatedBlock(b3, ds)) };
}
@Override
protected RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f,
PathFilter filter) throws FileNotFoundException, IOException {
++numListLocatedStatusCalls;
return super.listLocatedStatus(f, filter);
}
}
}
| MockFileSystem |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/validation/SyncValidationTest.java | {
"start": 940,
"end": 5417
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyRoutes.class));
@Test
public void test() {
// Valid result
get("/valid").then().statusCode(200)
.body("name", is("luke"))
.body("welcome", is("hello"));
// Valid parameter
given()
.queryParam("name", "neo")
.when()
.get("/query")
.then().statusCode(200);
// Invalid parameter
given()
.when()
.get("/invalid-param")
.then()
.statusCode(400)
.body("title", containsString("Constraint Violation"))
.body("status", is(400))
.body("detail", containsString("validation constraint violations"))
.body("violations[0].field", containsString("name"))
.body("violations[0].message", is(not(emptyString())));
// Invalid parameter - HTML output
get("/invalid-param-html")
.then()
// the return value is ok but the param is invalid
.statusCode(400)
.body(containsString("ConstraintViolation"), is(not(emptyString())));
// JSON output
given()
.header("Accept", "application/json")
.when()
.get("/invalid")
.then()
.statusCode(500)
.body("title", containsString("Constraint Violation"))
.body("status", is(500))
.body("detail", containsString("validation constraint violations"))
.body("violations[0].field", containsString("name"))
.body("violations[0].message", is(not(emptyString())));
given()
.header("Accept", "application/json")
.when()
.get("/invalid2").then().statusCode(500)
.body("title", containsString("Constraint Violation"))
.body("status", is(500))
.body("detail", containsString("validation constraint violations"))
.body("violations[0].field", anyOf(containsString("name"), containsString("welcome")))
.body("violations[0].message", is(not(emptyString())))
.body("violations[1].field", anyOf(containsString("name"), containsString("welcome")))
.body("violations[1].message", is(not(emptyString())));
// Input parameter violation - JSON
given()
.header("Accept", "application/json")
.queryParam("name", "doesNotMatch")
.when()
.get("/query")
.then().statusCode(400)
.body("title", containsString("Constraint Violation"))
.body("status", is(400))
.body("detail", containsString("validation constraint violations"))
.body("violations[0].field", containsString("name"))
.body("violations[0].message", is(not(emptyString())));
// Input parameter violation - JSON
given()
.queryParam("name", "doesNotMatch")
.when()
.get("/query")
.then().statusCode(400)
.body("title", containsString("Constraint Violation"))
.body("status", is(400))
.body("detail", containsString("validation constraint violations"));
given()
.contentType("application/json")
.body(new JsonObject().put("name", "foo").put("message", "bar").encode())
.when()
.post("/echo")
.then()
.statusCode(500)
.body("title", is("Constraint Violation"))
.body("detail", is("validation constraint violations"))
.body("status", is(500));
given()
.contentType("application/json")
.body(new JsonObject().put("name", "foobar").put("message", "quux").encode())
.when()
.post("/echo")
.then()
.statusCode(200)
.body("name", is("foobar"))
.body("welcome", is("quux"));
}
@ApplicationScoped
public static | SyncValidationTest |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/SubclassByteBuddyMockMaker.java | {
"start": 1147,
"end": 2426
} | class ____ implements ClassCreatingMockMaker {
private final BytecodeGenerator cachingMockBytecodeGenerator;
public SubclassByteBuddyMockMaker() {
this(ModuleHandler.make());
}
public SubclassByteBuddyMockMaker(ModuleHandler handler) {
cachingMockBytecodeGenerator =
new TypeCachingBytecodeGenerator(new SubclassBytecodeGenerator(handler), false);
}
@Override
public <T> T createMock(MockCreationSettings<T> settings, MockHandler handler) {
Class<? extends T> mockedProxyType = createMockType(settings);
Instantiator instantiator = Plugins.getInstantiatorProvider().getInstantiator(settings);
T mockInstance = null;
try {
mockInstance = instantiator.newInstance(mockedProxyType);
MockAccess mockAccess = (MockAccess) mockInstance;
mockAccess.setMockitoInterceptor(new MockMethodInterceptor(handler, settings));
return ensureMockIsAssignableToMockedType(settings, mockInstance);
} catch (ClassCastException cce) {
throw new MockitoException(
join(
"ClassCastException occurred while creating the mockito mock :",
" | SubclassByteBuddyMockMaker |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.