language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeFailureReplication.java
|
{
"start": 2148,
"end": 8059
}
|
class ____ {
static final Logger LOG = LoggerFactory
.getLogger(TestReplaceDatanodeFailureReplication.class);
static final String DIR =
"/" + TestReplaceDatanodeFailureReplication.class.getSimpleName() + "/";
static final short REPLICATION = 3;
final private static String RACK0 = "/rack0";
/**
* Test fail last datanode in the pipeline.
*/
@Test
public void testLastDatanodeFailureInPipeline() throws Exception {
testWriteFileAndVerifyAfterDNStop(2, 1, 10, false);
}
/**
* Test fail first datanode in the pipeline.
*/
@Test
public void testFirstDatanodeFailureInPipeline() throws Exception {
testWriteFileAndVerifyAfterDNStop(2, 0, 10, false);
}
/**
* Test fail all the datanodes except first in the pipeline.
*/
@Test
public void testWithOnlyFirstDatanodeIsAlive() throws Exception {
testWriteFileAndVerifyAfterDNStop(1, 1, 1, true);
}
/**
* Test fail all the datanodes except lastnode in the pipeline.
*/
@Test
public void testWithOnlyLastDatanodeIsAlive() throws Exception {
testWriteFileAndVerifyAfterDNStop(1, 0, 1, true);
}
/**
* Test when number of live nodes are less than the
* "dfs.client.block.write.replace-datanode-on-failure.min.replication".
*/
@Test
public void testLessNumberOfLiveDatanodesThanWriteReplaceDatanodeOnFailureRF()
throws Exception {
final MiniDFSCluster cluster = setupCluster(2);
try {
final DistributedFileSystem fs = cluster.getFileSystem();
final Path dir = new Path(DIR);
final SlowWriter[] slowwriters = new SlowWriter[1];
for (int i = 1; i <= slowwriters.length; i++) {
// create slow writers in different speed
slowwriters[i - 1] = new SlowWriter(fs, new Path(dir, "file" + i),
i * 200L);
}
for (SlowWriter s : slowwriters) {
s.start();
}
// Let slow writers write something.
// Some of them are too slow and will be not yet started.
sleepSeconds(1);
// stop an old datanode
cluster.stopDataNode(0);
cluster.stopDataNode(0);
// Let the slow writer writes a few more seconds
// Everyone should have written something.
sleepSeconds(20);
// check replication and interrupt.
for (SlowWriter s : slowwriters) {
try {
s.out.getCurrentBlockReplication();
Assertions.fail(
"Must throw exception as failed to add a new datanode for write "
+ "pipeline, minimum failure replication");
} catch (IOException e) {
// expected
}
s.interruptRunning();
}
// close files
for (SlowWriter s : slowwriters) {
s.joinAndClose();
}
// Verify the file
verifyFileContent(fs, slowwriters);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
private MiniDFSCluster setupCluster(int failRF) throws IOException {
final Configuration conf = new HdfsConfiguration();
conf.setInt(HdfsClientConfigKeys.BlockWrite.ReplaceDatanodeOnFailure.
MIN_REPLICATION, failRF);
// always replace a datanode
ReplaceDatanodeOnFailure.write(Policy.ALWAYS, false, conf);
final String[] racks = new String[REPLICATION];
Arrays.fill(racks, RACK0);
return new MiniDFSCluster.Builder(conf).racks(racks)
.numDataNodes(REPLICATION).build();
}
private void testWriteFileAndVerifyAfterDNStop(int failRF, int dnindex,
int slowWrites, boolean failPipeLine)
throws IOException, InterruptedException, TimeoutException {
final MiniDFSCluster cluster = setupCluster(failRF);
try {
final DistributedFileSystem fs = cluster.getFileSystem();
final Path dir = new Path(DIR);
final SlowWriter[] slowwriters = new SlowWriter[slowWrites];
for (int i = 1; i <= slowwriters.length; i++) {
// create slow writers in different speed
slowwriters[i - 1] = new SlowWriter(fs, new Path(dir, "file" + i),
i * 200L);
}
for (SlowWriter s : slowwriters) {
s.start();
}
// Let slow writers write something.
// Some of them are too slow and will be not yet started.
sleepSeconds(3);
// stop an datanode
cluster.stopDataNode(dnindex);
if (failPipeLine) {
cluster.stopDataNode(dnindex);
}
// Let the slow writer writes a few more seconds
// Everyone should have written something.
sleepSeconds(5);
cluster.waitFirstBRCompleted(0, 10000);
// check replication and interrupt.
for (SlowWriter s : slowwriters) {
Assertions.assertEquals(failRF, s.out.getCurrentBlockReplication());
s.interruptRunning();
}
// close files
for (SlowWriter s : slowwriters) {
s.joinAndClose();
}
// Verify the file
verifyFileContent(fs, slowwriters);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
private void verifyFileContent(DistributedFileSystem fs,
SlowWriter[] slowwriters) throws IOException {
LOG.info("Verify the file");
for (int i = 0; i < slowwriters.length; i++) {
LOG.info(slowwriters[i].filepath + ": length=" + fs
.getFileStatus(slowwriters[i].filepath).getLen());
FSDataInputStream in = null;
try {
in = fs.open(slowwriters[i].filepath);
for (int j = 0, x;; j++) {
x = in.read();
if ((x) != -1) {
Assertions.assertEquals(j, x);
} else {
return;
}
}
} finally {
IOUtils.closeStream(in);
}
}
}
static void sleepSeconds(final int waittime) throws InterruptedException {
LOG.info("Wait " + waittime + " seconds");
Thread.sleep(waittime * 1000L);
}
static
|
TestReplaceDatanodeFailureReplication
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/test/java/org/apache/log4j/PropertyConfiguratorTest.java
|
{
"start": 2102,
"end": 2253
}
|
class ____ {
/**
* Mock definition of FilterBasedTriggeringPolicy from extras companion.
*/
public static final
|
PropertyConfiguratorTest
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/subscribers/QueueDrainSubscriber.java
|
{
"start": 1399,
"end": 5076
}
|
class ____<T, U, V> extends QueueDrainSubscriberPad4 implements FlowableSubscriber<T>, QueueDrain<U, V> {
protected final Subscriber<? super V> downstream;
protected final SimplePlainQueue<U> queue;
protected volatile boolean cancelled;
protected volatile boolean done;
protected Throwable error;
public QueueDrainSubscriber(Subscriber<? super V> actual, SimplePlainQueue<U> queue) {
this.downstream = actual;
this.queue = queue;
}
@Override
public final boolean cancelled() {
return cancelled;
}
@Override
public final boolean done() {
return done;
}
@Override
public final boolean enter() {
return wip.getAndIncrement() == 0;
}
public final boolean fastEnter() {
return wip.get() == 0 && wip.compareAndSet(0, 1);
}
protected final void fastPathEmitMax(U value, boolean delayError, Disposable dispose) {
final Subscriber<? super V> s = downstream;
final SimplePlainQueue<U> q = queue;
if (fastEnter()) {
long r = requested.get();
if (r != 0L) {
if (accept(s, value)) {
if (r != Long.MAX_VALUE) {
produced(1);
}
}
if (leave(-1) == 0) {
return;
}
} else {
dispose.dispose();
s.onError(MissingBackpressureException.createDefault());
return;
}
} else {
q.offer(value);
if (!enter()) {
return;
}
}
QueueDrainHelper.drainMaxLoop(q, s, delayError, dispose, this);
}
protected final void fastPathOrderedEmitMax(U value, boolean delayError, Disposable dispose) {
final Subscriber<? super V> s = downstream;
final SimplePlainQueue<U> q = queue;
if (fastEnter()) {
long r = requested.get();
if (r != 0L) {
if (q.isEmpty()) {
if (accept(s, value)) {
if (r != Long.MAX_VALUE) {
produced(1);
}
}
if (leave(-1) == 0) {
return;
}
} else {
q.offer(value);
}
} else {
cancelled = true;
dispose.dispose();
s.onError(MissingBackpressureException.createDefault());
return;
}
} else {
q.offer(value);
if (!enter()) {
return;
}
}
QueueDrainHelper.drainMaxLoop(q, s, delayError, dispose, this);
}
@Override
public boolean accept(Subscriber<? super V> a, U v) {
return false;
}
@Override
public final Throwable error() {
return error;
}
@Override
public final int leave(int m) {
return wip.addAndGet(m);
}
@Override
public final long requested() {
return requested.get();
}
@Override
public final long produced(long n) {
return requested.addAndGet(-n);
}
public final void requested(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
}
}
}
// -------------------------------------------------------------------
// Padding superclasses
//-------------------------------------------------------------------
/** Pads the header away from other fields. */
|
QueueDrainSubscriber
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddable/SimpleEntity.java
|
{
"start": 502,
"end": 1063
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
@NotNullAllowed
@Embedded
private SimpleEmbeddable simpleEmbeddable;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public SimpleEmbeddable getSimpleEmbeddable() {
return simpleEmbeddable;
}
public void setSimpleEmbeddable(SimpleEmbeddable simpleEmbeddable) {
this.simpleEmbeddable = simpleEmbeddable;
}
// represents a mock TYPE_USE based annotation
@Target({ ElementType.TYPE_USE })
@Retention(RetentionPolicy.RUNTIME)
public @
|
SimpleEntity
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/JavaFileTest.java
|
{
"start": 10480,
"end": 10937
}
|
class ____ {\n"
+ "}\n");
}
@Test public void singleImport() throws Exception {
String source = JavaFile.builder("com.squareup.tacos",
TypeSpec.classBuilder("Taco")
.addField(Date.class, "madeFreshDate")
.build())
.build()
.toString();
assertThat(source).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.util.Date;\n"
+ "\n"
+ "
|
Taco
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/results/graph/entity/internal/AbstractBatchEntitySelectFetchInitializer.java
|
{
"start": 1770,
"end": 12949
}
|
class ____ extends EntitySelectFetchInitializerData {
final boolean batchDisabled;
// per-row state
protected @Nullable EntityKey entityKey;
public AbstractBatchEntitySelectFetchInitializerData(
AbstractBatchEntitySelectFetchInitializer<?> initializer,
RowProcessingState rowProcessingState) {
super( initializer, rowProcessingState );
batchDisabled = isBatchDisabled( initializer, rowProcessingState );
}
private static boolean isBatchDisabled(
AbstractBatchEntitySelectFetchInitializer<?> initializer,
RowProcessingState rowProcessingState) {
return rowProcessingState.isScrollResult()
|| !rowProcessingState.getLoadQueryInfluencers()
.effectivelyBatchLoadable( initializer.toOneMapping.getEntityMappingType().getEntityPersister() );
}
}
public AbstractBatchEntitySelectFetchInitializer(
InitializerParent<?> parent,
ToOneAttributeMapping toOneMapping,
NavigablePath fetchedNavigable,
EntityPersister concreteDescriptor,
DomainResult<?> keyResult,
boolean affectedByFilter,
AssemblerCreationState creationState) {
super( parent, toOneMapping, fetchedNavigable, concreteDescriptor, keyResult, affectedByFilter, creationState );
//noinspection unchecked
owningEntityInitializer =
(EntityInitializer<InitializerData>)
Initializer.findOwningEntityInitializer( parent );
assert owningEntityInitializer != null : "This initializer requires an owning parent entity initializer";
}
protected abstract void registerResolutionListener(Data data);
@Override
public void resolveKey(Data data) {
if ( data.getState() == State.UNINITIALIZED ) {
data.entityKey = null;
data.setInstance( null );
final var rowProcessingState = data.getRowProcessingState();
//noinspection unchecked
final var initializer = (Initializer<InitializerData>) keyAssembler.getInitializer();
if ( initializer != null ) {
final var subData = initializer.getData( rowProcessingState );
initializer.resolveKey( subData );
data.entityIdentifier = null;
data.setState( subData.getState() == State.MISSING ? State.MISSING : State.KEY_RESOLVED );
}
else {
data.entityIdentifier = keyAssembler.assemble( rowProcessingState );
data.setState( data.entityIdentifier == null ? State.MISSING : State.KEY_RESOLVED );
}
}
}
@Override
public void resolveInstance(Data data) {
if ( data.getState() == State.KEY_RESOLVED ) {
data.setState( State.RESOLVED );
final var rowProcessingState = data.getRowProcessingState();
if ( data.entityIdentifier == null ) {
// entityIdentifier can be null if the identifier is based on an initializer
data.entityIdentifier = keyAssembler.assemble( rowProcessingState );
if ( data.entityIdentifier == null ) {
data.entityKey = null;
data.setInstance( null );
data.setState( State.MISSING );
return;
}
}
resolveInstanceFromIdentifier( data );
}
}
protected void resolveInstanceFromIdentifier(Data data) {
if ( data.batchDisabled ) {
initialize( data );
}
else {
data.entityKey = new EntityKey( data.entityIdentifier, concreteDescriptor );
data.setInstance( getExistingInitializedInstance( data ) );
if ( data.getInstance() == null ) {
// need to add the key to the batch queue only when the entity has not been already loaded or
// there isn't another initializer that is loading it
registerToBatchFetchQueue( data );
}
}
}
@Override
public void resolveInstance(Object instance, Data data) {
if ( instance == null ) {
data.setState( State.MISSING );
data.entityKey = null;
data.setInstance( null );
}
else {
resolve( instance, data );
}
}
private void resolve(Object instance, Data data) {
final var rowProcessingState = data.getRowProcessingState();
final var session = rowProcessingState.getSession();
final var persistenceContext = session.getPersistenceContextInternal();
// Only need to extract the identifier if the identifier has a many to one
final var lazyInitializer = extractLazyInitializer( instance );
data.entityIdentifier = null;
if ( lazyInitializer == null ) {
// Entity is most probably initialized
data.setInstance( instance );
if ( concreteDescriptor.getBytecodeEnhancementMetadata().isEnhancedForLazyLoading()
&& isPersistentAttributeInterceptable( instance )
&& getAttributeInterceptor( instance )
instanceof EnhancementAsProxyLazinessInterceptor enhancementInterceptor ) {
if ( enhancementInterceptor.isInitialized() ) {
data.setState( State.INITIALIZED );
}
else {
data.setState( State.RESOLVED );
data.entityIdentifier = enhancementInterceptor.getIdentifier();
}
if ( data.entityIdentifier == null ) {
data.entityIdentifier = concreteDescriptor.getIdentifier( instance, session );
}
}
else {
// If the entity initializer is null, we know the entity is fully initialized;
// otherwise, it will be initialized by some other initializer
data.setState( State.RESOLVED );
data.entityIdentifier = concreteDescriptor.getIdentifier( instance, session );
}
}
else if ( lazyInitializer.isUninitialized() ) {
data.setState( State.RESOLVED );
data.entityIdentifier = lazyInitializer.getInternalIdentifier();
}
else {
// Entity is initialized
data.setState( State.INITIALIZED );
data.entityIdentifier = lazyInitializer.getInternalIdentifier();
data.setInstance( lazyInitializer.getImplementation() );
}
data.entityKey = new EntityKey( data.entityIdentifier, concreteDescriptor );
final var entityHolder = persistenceContext.getEntityHolder(
data.entityKey
);
if ( entityHolder == null || entityHolder.getEntity() != instance && entityHolder.getProxy() != instance ) {
// the existing entity instance is detached or transient
if ( entityHolder != null ) {
final var managed = entityHolder.getManagedObject();
data.setInstance( managed );
data.entityKey = entityHolder.getEntityKey();
data.entityIdentifier = data.entityKey.getIdentifier();
if ( entityHolder.isInitialized() ) {
data.setState( State.INITIALIZED );
}
else {
data.setState( State.RESOLVED );
}
}
else {
data.setState( State.RESOLVED );
}
}
if ( data.getState() == State.RESOLVED ) {
// similar to resolveInstanceFromIdentifier, but we already have the holder here
if ( data.batchDisabled ) {
initialize( data, entityHolder, session, persistenceContext );
}
else if ( entityHolder == null || !entityHolder.isEventuallyInitialized() ) {
// need to add the key to the batch queue only when the entity has not been already loaded or
// there isn't another initializer that is loading it
registerResolutionListener( data );
registerToBatchFetchQueue( data );
}
}
if ( keyIsEager ) {
final var initializer = keyAssembler.getInitializer();
assert initializer != null;
initializer.resolveInstance( data.entityIdentifier, rowProcessingState );
}
else if ( rowProcessingState.needsResolveState() ) {
// Resolve the state of the identifier if result caching is enabled and this is not a query cache hit
keyAssembler.resolveState( rowProcessingState );
}
}
@Override
public void initializeInstance(Data data) {
if ( data.getState() == State.RESOLVED ) {
data.setState( State.INITIALIZED );
if ( data.batchDisabled ) {
Hibernate.initialize( data.getInstance() );
}
}
}
protected Object getExistingInitializedInstance(Data data) {
final var session = data.getRowProcessingState().getSession();
final var persistenceContext = session.getPersistenceContextInternal();
final var holder = persistenceContext.getEntityHolder( data.entityKey );
if ( holder != null ) {
final Object entity = holder.getEntity();
if ( entity != null && holder.isEventuallyInitialized() ) {
return entity;
}
}
// we need to register a resolution listener only if there is not an already initialized instance
// or an instance that another initializer is loading
registerResolutionListener( data );
return null;
}
protected void registerToBatchFetchQueue(Data data) {
assert data.entityKey != null;
data.getRowProcessingState().getSession().getPersistenceContextInternal()
.getBatchFetchQueue().addBatchLoadableEntityKey( data.entityKey );
}
@Override
public void initializeInstanceFromParent(Object parentInstance, Data data) {
final var attributeMapping = getInitializedPart().asAttributeMapping();
final Object instance =
attributeMapping != null
? attributeMapping.getValue( parentInstance )
: parentInstance;
// No need to initialize these fields
data.entityKey = null;
data.entityIdentifier = null;
data.setInstance( null );
if ( instance == null ) {
data.setState( State.MISSING );
}
else {
final var lazyInitializer = extractLazyInitializer( instance );
if ( lazyInitializer != null && lazyInitializer.isUninitialized() ) {
data.entityKey = new EntityKey( lazyInitializer.getInternalIdentifier(), concreteDescriptor );
registerToBatchFetchQueue( data );
}
data.setState( State.INITIALIZED );
}
}
protected static Object loadInstance(
EntityKey entityKey,
ToOneAttributeMapping toOneMapping,
boolean affectedByFilter,
SharedSessionContractImplementor session) {
final String entityName = entityKey.getEntityName();
final Object identifier = entityKey.getIdentifier();
final Object instance =
session.internalLoad( entityName, identifier, true,
toOneMapping.isInternalLoadNullable() );
if ( instance == null ) {
checkNotFound( toOneMapping, affectedByFilter, entityName, identifier );
}
return instance;
}
protected AttributeMapping[] getParentEntityAttributes(String attributeName) {
final var entityDescriptor = owningEntityInitializer.getEntityDescriptor();
final int size =
entityDescriptor.getRootEntityDescriptor()
.getSubclassEntityNames().size();
final var parentEntityAttributes = new AttributeMapping[size];
parentEntityAttributes[ entityDescriptor.getSubclassId() ] =
getParentEntityAttribute( entityDescriptor, toOneMapping, attributeName );
for ( EntityMappingType subMappingType : entityDescriptor.getSubMappingTypes() ) {
parentEntityAttributes[ subMappingType.getSubclassId() ] =
getParentEntityAttribute( subMappingType, toOneMapping, attributeName );
}
return parentEntityAttributes;
}
protected static AttributeMapping getParentEntityAttribute(
EntityMappingType subMappingType,
ToOneAttributeMapping referencedModelPart,
String attributeName) {
final var parentAttribute = subMappingType.findAttributeMapping( attributeName );
// These checks are needed to avoid setting the instance using the wrong (child's) model part or
// setting it multiple times in case parent and child share the same attribute name for the association.
return parentAttribute != null
&& parentAttribute.getDeclaringType()
== referencedModelPart.getDeclaringType().findContainingEntityMapping()
? parentAttribute
: null;
}
}
|
AbstractBatchEntitySelectFetchInitializerData
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/PropertyAnnotationsTest.java
|
{
"start": 1710,
"end": 1901
}
|
enum ____ {
A,
B;
@Override
public String toString() {
// used to prove that the method we determine the value does not use the `toString()` method
// of the
|
TestEnum
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableToListSingle.java
|
{
"start": 2459,
"end": 3661
}
|
class ____<T, U extends Collection<? super T>> implements Observer<T>, Disposable {
final SingleObserver<? super U> downstream;
U collection;
Disposable upstream;
ToListObserver(SingleObserver<? super U> actual, U collection) {
this.downstream = actual;
this.collection = collection;
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void dispose() {
upstream.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
@Override
public void onNext(T t) {
collection.add(t);
}
@Override
public void onError(Throwable t) {
collection = null;
downstream.onError(t);
}
@Override
public void onComplete() {
U c = collection;
collection = null;
downstream.onSuccess(c);
}
}
}
|
ToListObserver
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
|
{
"start": 7960,
"end": 266024
}
|
class ____ extends AbstractStatementParserTests {
private static final LogicalPlan PROCESSING_CMD_INPUT = new Row(EMPTY, List.of(new Alias(EMPTY, "a", integer(1))));
public void testRowCommand() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "a", integer(1)), new Alias(EMPTY, "b", integer(2)))),
statement("row a = 1, b = 2")
);
}
public void testRowCommandImplicitFieldName() {
assertEqualsIgnoringIds(
new Row(
EMPTY,
List.of(new Alias(EMPTY, "1", integer(1)), new Alias(EMPTY, "2", integer(2)), new Alias(EMPTY, "c", integer(3)))
),
statement("row 1, 2, c = 3")
);
}
public void testRowCommandLong() {
assertEqualsIgnoringIds(new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalLong(2147483648L)))), statement("row c = 2147483648"));
}
public void testRowCommandHugeInt() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalUnsignedLong("9223372036854775808")))),
statement("row c = 9223372036854775808")
);
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDouble(18446744073709551616.)))),
statement("row c = 18446744073709551616")
);
}
public void testRowCommandHugeNegativeInt() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDouble(-92233720368547758080d)))),
statement("row c = -92233720368547758080")
);
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDouble(-18446744073709551616d)))),
statement("row c = -18446744073709551616")
);
}
public void testRowCommandDouble() {
assertEqualsIgnoringIds(new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDouble(1.0)))), statement("row c = 1.0"));
}
public void testRowCommandMultivalueInt() {
assertEqualsIgnoringIds(new Row(EMPTY, List.of(new Alias(EMPTY, "c", integers(1, 2, -5)))), statement("row c = [1, 2, -5]"));
}
public void testRowCommandMultivalueLong() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalLongs(2147483648L, 2147483649L, -434366649L)))),
statement("row c = [2147483648, 2147483649, -434366649]")
);
}
public void testRowCommandMultivalueLongAndInt() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalLongs(2147483648L, 1L)))),
statement("row c = [2147483648, 1]")
);
}
public void testRowCommandMultivalueHugeInts() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDoubles(18446744073709551616., 18446744073709551617.)))),
statement("row c = [18446744073709551616, 18446744073709551617]")
);
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalUnsignedLongs("9223372036854775808", "9223372036854775809")))),
statement("row c = [9223372036854775808, 9223372036854775809]")
);
}
public void testRowCommandMultivalueHugeIntAndNormalInt() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDoubles(18446744073709551616., 1.0)))),
statement("row c = [18446744073709551616, 1]")
);
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalUnsignedLongs("9223372036854775808", "1")))),
statement("row c = [9223372036854775808, 1]")
);
}
public void testRowCommandMultivalueDouble() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalDoubles(1.0, 2.0, -3.4)))),
statement("row c = [1.0, 2.0, -3.4]")
);
}
public void testRowCommandBoolean() {
assertEqualsIgnoringIds(new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalBoolean(false)))), statement("row c = false"));
}
public void testRowCommandMultivalueBoolean() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalBooleans(false, true)))),
statement("row c = [false, true]")
);
}
public void testRowCommandString() {
assertEqualsIgnoringIds(new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalString("chicken")))), statement("row c = \"chicken\""));
}
public void testRowCommandMultivalueString() {
assertEqualsIgnoringIds(
new Row(EMPTY, List.of(new Alias(EMPTY, "c", literalStrings("cat", "dog")))),
statement("row c = [\"cat\", \"dog\"]")
);
}
public void testRowCommandWithEscapedFieldName() {
assertEqualsIgnoringIds(
new Row(
EMPTY,
List.of(
new Alias(EMPTY, "a.b.c", integer(1)),
new Alias(EMPTY, "b", integer(2)),
new Alias(EMPTY, "@timestamp", Literal.keyword(EMPTY, "2022-26-08T00:00:00"))
)
),
statement("row a.b.c = 1, `b` = 2, `@timestamp`=\"2022-26-08T00:00:00\"")
);
}
public void testCompositeCommand() {
assertEqualsIgnoringIds(
new Filter(EMPTY, new Row(EMPTY, List.of(new Alias(EMPTY, "a", integer(1)))), TRUE),
statement("row a = 1 | where true")
);
}
public void testMultipleCompositeCommands() {
assertEqualsIgnoringIds(
new Filter(
EMPTY,
new Filter(EMPTY, new Filter(EMPTY, new Row(EMPTY, List.of(new Alias(EMPTY, "a", integer(1)))), TRUE), FALSE),
TRUE
),
statement("row a = 1 | where true | where false | where true")
);
}
public void testEval() {
assertEqualsIgnoringIds(
new Eval(EMPTY, PROCESSING_CMD_INPUT, List.of(new Alias(EMPTY, "b", attribute("a")))),
processingCommand("eval b = a")
);
assertEqualsIgnoringIds(
new Eval(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(new Alias(EMPTY, "b", attribute("a")), new Alias(EMPTY, "c", new Add(EMPTY, attribute("a"), integer(1))))
),
processingCommand("eval b = a, c = a + 1")
);
}
public void testEvalImplicitNames() {
assertEqualsIgnoringIds(
new Eval(EMPTY, PROCESSING_CMD_INPUT, List.of(new Alias(EMPTY, "a", attribute("a")))),
processingCommand("eval a")
);
assertEqualsIgnoringIds(
new Eval(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(
new Alias(
EMPTY,
"fn(a + 1)",
new UnresolvedFunction(EMPTY, "fn", DEFAULT, List.of(new Add(EMPTY, attribute("a"), integer(1))))
)
)
),
processingCommand("eval fn(a + 1)")
);
}
public void testStatsWithGroups() {
assertEqualsIgnoringIds(
new Aggregate(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(attribute("c"), attribute("d.e")),
List.of(
new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))),
attribute("c"),
attribute("d.e")
)
),
processingCommand("stats b = min(a) by c, d.e")
);
}
public void testStatsWithoutGroups() {
assertEqualsIgnoringIds(
new Aggregate(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(),
List.of(
new Alias(EMPTY, "min(a)", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))),
new Alias(EMPTY, "c", integer(1))
)
),
processingCommand("stats min(a), c = 1")
);
}
public void testStatsWithoutAggs() {
assertEqualsIgnoringIds(
new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(attribute("a")), List.of(attribute("a"))),
processingCommand("stats by a")
);
}
public void testStatsWithoutAggsOrGroup() {
expectError("from text | stats", "At least one aggregation or grouping expression required in [stats]");
}
public void testAggsWithGroupKeyAsAgg() {
var queries = new String[] { """
row a = 1, b = 2
| stats a by a
""", """
row a = 1, b = 2
| stats a by a
| sort a
""", """
row a = 1, b = 2
| stats a = a by a
""", """
row a = 1, b = 2
| stats x = a by a
""" };
for (String query : queries) {
expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause");
}
}
public void testStatsWithGroupKeyAndAggFilter() {
var a = attribute("a");
var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a));
var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1))));
assertEqualsIgnoringIds(
new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(a), List.of(filter, a)),
processingCommand("stats min(a) where a > 1 by a")
);
}
public void testStatsWithGroupKeyAndMixedAggAndFilter() {
var a = attribute("a");
var min = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a));
var max = new UnresolvedFunction(EMPTY, "max", DEFAULT, List.of(a));
var avg = new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(a));
var min_alias = new Alias(EMPTY, "min", min);
var max_filter_ex = new Or(
EMPTY,
new GreaterThan(EMPTY, new Mod(EMPTY, a, integer(3)), integer(10)),
new GreaterThan(EMPTY, new Div(EMPTY, a, integer(2)), integer(100))
);
var max_filter = new Alias(EMPTY, "max", new FilteredExpression(EMPTY, max, max_filter_ex));
var avg_filter_ex = new GreaterThan(EMPTY, new Div(EMPTY, a, integer(2)), integer(100));
var avg_filter = new Alias(EMPTY, "avg", new FilteredExpression(EMPTY, avg, avg_filter_ex));
assertEqualsIgnoringIds(
new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(a), List.of(min_alias, max_filter, avg_filter, a)),
processingCommand("""
stats
min = min(a),
max = max(a) WHERE (a % 3 > 10 OR a / 2 > 100),
avg = avg(a) WHERE a / 2 > 100
BY a
""")
);
}
public void testStatsWithoutGroupKeyMixedAggAndFilter() {
var a = attribute("a");
var f = new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(a));
var filter = new Alias(EMPTY, "min(a) where a > 1", new FilteredExpression(EMPTY, f, new GreaterThan(EMPTY, a, integer(1))));
assertEqualsIgnoringIds(
new Aggregate(EMPTY, PROCESSING_CMD_INPUT, List.of(), List.of(filter)),
processingCommand("stats min(a) where a > 1")
);
}
public void testInlineStatsWithGroups() {
if (releaseBuildForInlineStats(null)) {
return;
}
for (var cmd : List.of("INLINE STATS", "INLINESTATS")) {
var query = cmd + " b = MIN(a) BY c, d.e";
assertThat(
processingCommand(query),
equalToIgnoringIds(
new InlineStats(
EMPTY,
new Aggregate(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(attribute("c"), attribute("d.e")),
List.of(
new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "MIN", DEFAULT, List.of(attribute("a")))),
attribute("c"),
attribute("d.e")
)
)
)
)
);
}
}
public void testInlineStatsWithoutGroups() {
if (releaseBuildForInlineStats(null)) {
return;
}
for (var cmd : List.of("INLINE STATS", "INLINESTATS")) {
var query = cmd + " MIN(a), c = 1";
assertEqualsIgnoringIds(
processingCommand(query),
new InlineStats(
EMPTY,
new Aggregate(
EMPTY,
PROCESSING_CMD_INPUT,
List.of(),
List.of(
new Alias(EMPTY, "MIN(a)", new UnresolvedFunction(EMPTY, "MIN", DEFAULT, List.of(attribute("a")))),
new Alias(EMPTY, "c", integer(1))
)
)
)
);
}
}
@Override
protected List<String> filteredWarnings() {
return withInlinestatsWarning(super.filteredWarnings());
}
public void testInlineStatsParsing() {
if (releaseBuildForInlineStats(null)) {
return;
}
expectThrows(
ParsingException.class,
containsString("line 1:19: token recognition error at: 'I'"),
() -> statement("FROM foo | INLINE INLINE STATS COUNT(*)")
);
expectThrows(
ParsingException.class,
containsString("line 1:19: token recognition error at: 'F'"),
() -> statement("FROM foo | INLINE FOO COUNT(*)")
);
}
/*
* Fork[[]]
* |_Eval[[fork1[KEYWORD] AS _fork#3]]
* | \_Limit[11[INTEGER],false]
* | \_Filter[:(?a,baz[KEYWORD])]
* | \_UnresolvedRelation[foo*]
* |_Eval[[fork2[KEYWORD] AS _fork#3]]
* | \_Aggregate[[],[?COUNT[*] AS COUNT(*)#4]]
* | \_UnresolvedRelation[foo*]
* \_Eval[[fork3[KEYWORD] AS _fork#3]]
* \_InlineStats[]
* \_Aggregate[[],[?COUNT[*] AS COUNT(*)#5]]
* \_UnresolvedRelation[foo*]
*/
public void testInlineStatsWithinFork() {
if (releaseBuildForInlineStats(null)) {
return;
}
var query = """
FROM foo*
| FORK ( WHERE a:"baz" | LIMIT 11 )
( STATS COUNT(*) )
( INLINE STATS COUNT(*) )
""";
var plan = statement(query);
var fork = as(plan, Fork.class);
var subPlans = fork.children();
// first subplan
var eval = as(subPlans.get(0), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork1"))));
var limit = as(eval.child(), Limit.class);
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(11));
var filter = as(limit.child(), Filter.class);
var match = (MatchOperator) filter.condition();
var matchField = (UnresolvedAttribute) match.field();
assertThat(matchField.name(), equalTo("a"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("baz")));
// second subplan
eval = as(subPlans.get(1), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork2"))));
var aggregate = as(eval.child(), Aggregate.class);
assertThat(aggregate.aggregates().size(), equalTo(1));
var alias = as(aggregate.aggregates().get(0), Alias.class);
assertThat(alias.name(), equalTo("COUNT(*)"));
var countFn = as(alias.child(), UnresolvedFunction.class);
assertThat(countFn.children().get(0), instanceOf(Literal.class));
assertThat(countFn.children().get(0).fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("*")));
// third subplan
eval = as(subPlans.get(2), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork3"))));
var inlineStats = as(eval.child(), InlineStats.class);
aggregate = as(inlineStats.child(), Aggregate.class);
assertThat(aggregate.aggregates().size(), equalTo(1));
alias = as(aggregate.aggregates().get(0), Alias.class);
assertThat(alias.name(), equalTo("COUNT(*)"));
countFn = as(alias.child(), UnresolvedFunction.class);
assertThat(countFn.children().get(0), instanceOf(Literal.class));
assertThat(countFn.children().get(0).fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("*")));
}
public void testStringAsIndexPattern() {
List<String> commands = new ArrayList<>();
commands.add("FROM");
if (Build.current().isSnapshot()) {
commands.add("TS");
}
for (String command : commands) {
assertStringAsIndexPattern("foo", command + " \"foo\"");
assertStringAsIndexPattern("foo,test-*", command + """
"foo","test-*"
""");
assertStringAsIndexPattern("foo,test-*", command + " foo,test-*");
assertStringAsIndexPattern("123-test@foo_bar+baz1", command + " 123-test@foo_bar+baz1");
assertStringAsIndexPattern("foo,test-*,abc", command + """
"foo","test-*",abc
""");
assertStringAsIndexPattern("foo, test-*, abc, xyz", command + """
"foo, test-*, abc, xyz"
""");
assertStringAsIndexPattern("foo, test-*, abc, xyz,test123", command + """
"foo, test-*, abc, xyz", test123
""");
assertStringAsIndexPattern("foo,test,xyz", command + " foo, test,xyz");
assertStringAsIndexPattern("<logstash-{now/M{yyyy.MM}}>", command + " <logstash-{now/M{yyyy.MM}}>");
assertStringAsIndexPattern(
"<logstash-{now/M{yyyy.MM}}>,<logstash-{now/d{yyyy.MM.dd|+12:00}}>",
command + " <logstash-{now/M{yyyy.MM}}>, \"<logstash-{now/d{yyyy.MM.dd|+12:00}}>\""
);
assertStringAsIndexPattern("<logstash-{now/d{yyyy.MM.dd|+12:00}}>", command + " \"<logstash-{now/d{yyyy.MM.dd|+12:00}}>\"");
assertStringAsIndexPattern(
"-<logstash-{now/M{yyyy.MM}}>,-<-logstash-{now/M{yyyy.MM}}>,"
+ "-<logstash-{now/d{yyyy.MM.dd|+12:00}}>,-<-logstash-{now/d{yyyy.MM.dd|+12:00}}>",
command
+ " -<logstash-{now/M{yyyy.MM}}>, -<-logstash-{now/M{yyyy.MM}}>, "
+ "\"-<logstash-{now/d{yyyy.MM.dd|+12:00}}>\", \"-<-logstash-{now/d{yyyy.MM.dd|+12:00}}>\""
);
assertStringAsIndexPattern("foo,test,xyz", command + " \"\"\"foo\"\"\", test,\"xyz\"");
assertStringAsIndexPattern("`backtick`,``multiple`back``ticks```", command + " `backtick`, ``multiple`back``ticks```");
assertStringAsIndexPattern("test,metadata,metaata,.metadata", command + " test,\"metadata\", metaata, .metadata");
assertStringAsIndexPattern(".dot", command + " .dot");
String lineNumber = command.equals("FROM") ? "line 1:14: " : "line 1:12: ";
expectErrorWithLineNumber(
command + " cluster:\"index|pattern\"",
" cluster:\"index|pattern\"",
lineNumber,
"mismatched input '\"index|pattern\"' expecting UNQUOTED_SOURCE"
);
// Entire index pattern is quoted. So it's not a parse error but a semantic error where the index name
// is invalid.
expectError(command + " \"*:index|pattern\"", "Invalid index name [index|pattern], must not contain the following characters");
clusterAndIndexAsIndexPattern(command, "cluster:index");
clusterAndIndexAsIndexPattern(command, "cluster:.index");
clusterAndIndexAsIndexPattern(command, "cluster*:index*");
clusterAndIndexAsIndexPattern(command, "cluster*:<logstash-{now/d}>*");
clusterAndIndexAsIndexPattern(command, "cluster*:*");
clusterAndIndexAsIndexPattern(command, "*:index*");
clusterAndIndexAsIndexPattern(command, "*:*");
expectError(
command + " \"cluster:index|pattern\"",
"Invalid index name [index|pattern], must not contain the following characters"
);
expectError(command + " *:\"index|pattern\"", "expecting UNQUOTED_SOURCE");
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled()) {
assertStringAsIndexPattern("foo::data", command + " foo::data");
assertStringAsIndexPattern("foo::failures", command + " foo::failures");
expectErrorWithLineNumber(
command + " *,\"-foo\"::data",
"*,-foo::data",
lineNumber,
"mismatched input '::' expecting {<EOF>, '|', ',', 'metadata'}"
);
expectErrorWithLineNumber(
command + " cluster:\"foo::data\"",
" cluster:\"foo::data\"",
lineNumber,
"mismatched input '\"foo::data\"' expecting UNQUOTED_SOURCE"
);
expectErrorWithLineNumber(
command + " cluster:\"foo::failures\"",
" cluster:\"foo::failures\"",
lineNumber,
"mismatched input '\"foo::failures\"' expecting UNQUOTED_SOURCE"
);
lineNumber = command.equals("FROM") ? "line 1:15: " : "line 1:13: ";
expectErrorWithLineNumber(
command + " *, \"-foo\"::data",
" *, \"-foo\"::data",
lineNumber,
"mismatched input '::' expecting {<EOF>, '|', ',', 'metadata'}"
);
assertStringAsIndexPattern("*,-foo::data", command + " *, \"-foo::data\"");
assertStringAsIndexPattern("*::data", command + " *::data");
lineNumber = command.equals("FROM") ? "line 1:79: " : "line 1:77: ";
expectErrorWithLineNumber(
command + " \"<logstash-{now/M{yyyy.MM}}>::data,<logstash-{now/d{yyyy.MM.dd|+12:00}}>\"::failures",
" \"<logstash-{now/M{yyyy.MM}}>::data,<logstash-{now/d{yyyy.MM.dd|+12:00}}>\"::failures",
lineNumber,
"mismatched input '::' expecting {<EOF>, '|', ',', 'metadata'}"
);
assertStringAsIndexPattern(
"<logstash-{now/M{yyyy.MM}}>::data,<logstash-{now/d{yyyy.MM.dd|+12:00}}>::failures",
command + " <logstash-{now/M{yyyy.MM}}>::data, \"<logstash-{now/d{yyyy.MM.dd|+12:00}}>::failures\""
);
}
}
}
private void clusterAndIndexAsIndexPattern(String command, String clusterAndIndex) {
assertStringAsIndexPattern(clusterAndIndex, command + " " + clusterAndIndex);
assertStringAsIndexPattern(clusterAndIndex, command + " \"" + clusterAndIndex + "\"");
}
public void testStringAsLookupIndexPattern() {
assumeTrue("requires snapshot build", Build.current().isSnapshot());
assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"foo\" ON j");
assertStringAsLookupIndexPattern("test-*", """
ROW x = 1 | LOOKUP_🐔 "test-*" ON j
""");
assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP_🐔 test-* ON j");
assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP_🐔 123-test@foo_bar+baz1 ON j");
assertStringAsLookupIndexPattern("foo, test-*, abc, xyz", """
ROW x = 1 | LOOKUP_🐔 "foo, test-*, abc, xyz" ON j
""");
assertStringAsLookupIndexPattern("<logstash-{now/M{yyyy.MM}}>", "ROW x = 1 | LOOKUP_🐔 <logstash-{now/M{yyyy.MM}}> ON j");
assertStringAsLookupIndexPattern(
"<logstash-{now/d{yyyy.MM.dd|+12:00}}>",
"ROW x = 1 | LOOKUP_🐔 \"<logstash-{now/d{yyyy.MM.dd|+12:00}}>\" ON j"
);
assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\" ON j");
assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP_🐔 `backtick` ON j");
assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP_🐔 ``multiple`back``ticks``` ON j");
assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP_🐔 .dot ON j");
clusterAndIndexAsLookupIndexPattern("cluster:index");
clusterAndIndexAsLookupIndexPattern("cluster:.index");
clusterAndIndexAsLookupIndexPattern("cluster*:index*");
clusterAndIndexAsLookupIndexPattern("cluster*:*");
clusterAndIndexAsLookupIndexPattern("*:index*");
clusterAndIndexAsLookupIndexPattern("*:*");
}
private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) {
assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 " + clusterAndIndex + " ON j");
assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 \"" + clusterAndIndex + "\"" + " ON j");
}
public void testInvalidCharacterInIndexPattern() {
Map<String, String> commands = new HashMap<>();
commands.put("FROM {}", "line 1:6: ");
if (Build.current().isSnapshot()) {
commands.put("TS {}", "line 1:4: ");
commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:22: ");
}
String lineNumber;
for (String command : commands.keySet()) {
lineNumber = commands.get(command);
expectInvalidIndexNameErrorWithLineNumber(command, "index|pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index#pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index?pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index>pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index<pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index/pattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "_indexpattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "+indexpattern", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "..", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "+<logstash-{now/d}>", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "_<logstash-{now/d}>", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index\\pattern", lineNumber, "index\\pattern");
expectInvalidIndexNameErrorWithLineNumber(command, "\"index\\\\pattern\"", lineNumber, "index\\pattern");
expectInvalidIndexNameErrorWithLineNumber(command, "\"--indexpattern\"", lineNumber, "-indexpattern");
expectInvalidIndexNameErrorWithLineNumber(command, "--indexpattern", lineNumber, "-indexpattern");
expectInvalidIndexNameErrorWithLineNumber(command, "<--logstash-{now/M{yyyy.MM}}>", lineNumber, "-logstash-");
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"--<logstash-{now/M{yyyy.MM}}>\"",
lineNumber,
"-<logstash-{now/M{yyyy.MM}}>"
);
expectInvalidIndexNameErrorWithLineNumber(command, "<logstash#{now/d}>", lineNumber, "logstash#");
expectInvalidIndexNameErrorWithLineNumber(command, "\"<logstash#{now/d}>\"", lineNumber, "logstash#");
expectInvalidIndexNameErrorWithLineNumber(command, "<<logstash{now/d}>>", lineNumber, "<logstash");
expectInvalidIndexNameErrorWithLineNumber(command, "\"<<logstash{now/d}>>\"", lineNumber, "<logstash");
expectInvalidIndexNameErrorWithLineNumber(command, "<<logstash<{now/d}>>>", lineNumber, "<logstash<");
expectInvalidIndexNameErrorWithLineNumber(command, "\"<<logstash<{now/d}>>>\"", lineNumber, "<logstash<");
expectInvalidIndexNameErrorWithLineNumber(command, "\"-<logstash- {now/d{yyyy.MM.dd|+12:00}}>\"", lineNumber, "logstash- ");
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled() && command.contains("LOOKUP_🐔") == false) {
expectInvalidIndexNameErrorWithLineNumber(command, "index::dat", lineNumber);
expectInvalidIndexNameErrorWithLineNumber(command, "index::failure", lineNumber);
// Cluster name cannot be combined with selector yet.
int parseLineNumber = 6;
if (command.startsWith("TS")) {
parseLineNumber = 4;
}
expectDoubleColonErrorWithLineNumber(command, "cluster:foo::data", parseLineNumber + 11);
expectDoubleColonErrorWithLineNumber(command, "cluster:foo::failures", parseLineNumber + 11);
// Index pattern cannot be quoted if cluster string is present.
expectErrorWithLineNumber(
command,
"cluster:\"foo\"::data",
command.startsWith("FROM") ? "line 1:14: " : "line 1:12: ",
"mismatched input '\"foo\"' expecting UNQUOTED_SOURCE"
);
expectErrorWithLineNumber(
command,
"cluster:\"foo\"::failures",
command.startsWith("FROM") ? "line 1:14: " : "line 1:12: ",
"mismatched input '\"foo\"' expecting UNQUOTED_SOURCE"
);
expectDoubleColonErrorWithLineNumber(command, "\"cluster:foo\"::data", parseLineNumber + 13);
expectDoubleColonErrorWithLineNumber(command, "\"cluster:foo\"::failures", parseLineNumber + 13);
expectErrorWithLineNumber(
command,
"\"cluster:foo::data\"",
lineNumber,
"Invalid index name [cluster:foo::data], Selectors are not yet supported on remote cluster patterns"
);
expectErrorWithLineNumber(
command,
"\"cluster:foo::failures\"",
lineNumber,
"Invalid index name [cluster:foo::failures], Selectors are not yet supported on remote cluster patterns"
);
// Wildcards
expectDoubleColonErrorWithLineNumber(command, "cluster:*::data", parseLineNumber + 9);
expectDoubleColonErrorWithLineNumber(command, "cluster:*::failures", parseLineNumber + 9);
expectDoubleColonErrorWithLineNumber(command, "*:index::data", parseLineNumber + 7);
expectDoubleColonErrorWithLineNumber(command, "*:index::failures", parseLineNumber + 7);
expectDoubleColonErrorWithLineNumber(command, "*:index*::data", parseLineNumber + 8);
expectDoubleColonErrorWithLineNumber(command, "*:index*::failures", parseLineNumber + 8);
expectDoubleColonErrorWithLineNumber(command, "*:*::data", parseLineNumber + 3);
expectDoubleColonErrorWithLineNumber(command, "*:*::failures", parseLineNumber + 3);
// Too many colons
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"index:::data\"",
lineNumber,
"index:::data",
"Selectors are not yet supported on remote cluster patterns"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"index::::data\"",
lineNumber,
"index::::data",
"Invalid usage of :: separator"
);
expectErrorWithLineNumber(
command,
"cluster:\"index,index2\"::failures",
command.startsWith("FROM") ? "line 1:14: " : "line 1:12: ",
"mismatched input '\"index,index2\"' expecting UNQUOTED_SOURCE"
);
}
}
// comma separated indices, with exclusions
// Invalid index names after removing exclusion fail, when there is no index name with wildcard before it
for (String command : commands.keySet()) {
if (command.contains("LOOKUP_🐔") || command.contains("TS")) {
continue;
}
lineNumber = command.contains("FROM") ? "line 1:20: " : "line 1:23: ";
expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern");
expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern");
expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern");
expectInvalidIndexNameErrorWithLineNumber(command, "\"- , -\"", commands.get(command), "", "must not be empty");
expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern,-\"", commands.get(command), "", "must not be empty");
clustersAndIndices(command, "indexpattern", "*-");
clustersAndIndices(command, "indexpattern", "-indexpattern");
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled()) {
expectInvalidIndexNameErrorWithLineNumber(
command,
"indexpattern, --index::data",
lineNumber,
"-index",
"must not start with '_', '-', or '+'"
);
expectErrorWithLineNumber(
command,
"indexpattern, \"--index\"::data",
"line 1:29: ",
"mismatched input '::' expecting {<EOF>, '|', ',', 'metadata'}"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"indexpattern, --index::data\"",
commands.get(command),
"-index",
"must not start with '_', '-', or '+'"
);
}
}
// Invalid index names, except invalid DateMath, are ignored if there is an index name with wildcard before it
String dateMathError = "unit [D] not supported for date math [/D]";
for (String command : commands.keySet()) {
if (command.contains("LOOKUP_🐔") || command.contains("TS")) {
continue;
}
lineNumber = command.contains("FROM") ? "line 1:9: " : "line 1:12: ";
String indexStarLineNumber = command.contains("FROM") ? "line 1:14: " : "line 1:17: ";
clustersAndIndices(command, "*", "-index#pattern");
clustersAndIndices(command, "index*", "-index#pattern");
clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>");
clustersAndIndices(command, "index*", "-<--logstash#-{now/M{yyyy.MM}}>");
expectInvalidIndexNameErrorWithLineNumber(command, "*, index#pattern", lineNumber, "index#pattern", "must not contain '#'");
expectInvalidIndexNameErrorWithLineNumber(
command,
"index*, index#pattern",
indexStarLineNumber,
"index#pattern",
"must not contain '#'"
);
expectDateMathErrorWithLineNumber(command, "cluster*:<logstash-{now/D}*>", commands.get(command), dateMathError);
expectDateMathErrorWithLineNumber(command, "*, \"-<-logstash-{now/D}>\"", lineNumber, dateMathError);
expectDateMathErrorWithLineNumber(command, "*, -<-logstash-{now/D}>", lineNumber, dateMathError);
expectDateMathErrorWithLineNumber(command, "\"*, -<-logstash-{now/D}>\"", commands.get(command), dateMathError);
expectDateMathErrorWithLineNumber(command, "\"*, -<-logst:ash-{now/D}>\"", commands.get(command), dateMathError);
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled()) {
clustersAndIndices(command, "*", "-index::data");
clustersAndIndices(command, "*", "-index::failures");
clustersAndIndices(command, "*", "-index*pattern::data");
clustersAndIndices(command, "*", "-index*pattern::failures");
// This is by existing design: refer to the comment in IdentifierBuilder#resolveAndValidateIndex() in the last
// catch clause. If there's an index with a wildcard before an invalid index, we don't error out.
clustersAndIndices(command, "index*", "-index#pattern::data");
clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>::data");
clustersAndIndices(command, "index*", "-<--logstash#-{now/M{yyyy.MM}}>::data");
expectError(command + "index1,<logstash-{now+-/d}>", "unit [-] not supported for date math [+-/d]");
// Throw on invalid date math
expectDateMathErrorWithLineNumber(
command,
"*, \"-<-logstash-{now/D}>\"::data",
"line 1:31: ",
"mismatched input '::' expecting {<EOF>, '|', ',', 'metadata'}"
);
expectDateMathErrorWithLineNumber(command, "*, -<-logstash-{now/D}>::data", lineNumber, dateMathError);
// Check that invalid selectors throw (they're resolved first in /_search, and always validated)
expectInvalidIndexNameErrorWithLineNumber(
command,
"*, -index::garbage",
lineNumber,
"-index::garbage",
"invalid usage of :: separator, [garbage] is not a recognized selector"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"index*, -index::garbage",
indexStarLineNumber,
"-index::garbage",
"invalid usage of :: separator, [garbage] is not a recognized selector"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"*, -<logstash-{now/M{yyyy.MM}}>::garbage",
lineNumber,
"-<logstash-{now/M{yyyy.MM}}>::garbage",
"invalid usage of :: separator, [garbage] is not a recognized selector"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"index*, -<logstash-{now/M{yyyy.MM}}>::garbage",
indexStarLineNumber,
"-<logstash-{now/M{yyyy.MM}}>::garbage",
"invalid usage of :: separator, [garbage] is not a recognized selector"
);
// Invalid selectors will throw validation errors before invalid date math
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"*, -<-logstash-{now/D}>::d\"",
commands.get(command),
"-<-logstash-{now/D}>::d",
"invalid usage of :: separator, [d] is not a recognized selector"
);
expectInvalidIndexNameErrorWithLineNumber(
command,
"\"*, -<-logstash-{now/D}>::\"",
commands.get(command),
"-<-logstash-{now/D}>::",
"invalid usage of :: separator, [] is not a recognized selector"
);
}
}
}
private void clustersAndIndices(String command, String indexString1, String indexString2) {
assertEqualsIgnoringIds(
unresolvedRelation(indexString1 + "," + indexString2),
statement(command, indexString1 + ", " + indexString2)
);
assertEqualsIgnoringIds(
unresolvedRelation(indexString1 + "," + indexString2),
statement(command, indexString1 + ", \"" + indexString2 + "\"")
);
assertEqualsIgnoringIds(
unresolvedRelation(indexString1 + ", " + indexString2),
statement(command, "\"" + indexString1 + ", " + indexString2 + "\"")
);
}
public void testInvalidQuotingAsFromIndexPattern() {
expectError("FROM \"foo", ": token recognition error at: '\"foo'");
expectError("FROM \"foo | LIMIT 1", ": token recognition error at: '\"foo | LIMIT 1'");
expectError("FROM \"\"\"foo", ": token recognition error at: '\"foo'");
expectError("FROM foo\"", ": token recognition error at: '\"'");
expectError("FROM foo\" | LIMIT 2", ": token recognition error at: '\" | LIMIT 2'");
expectError("FROM foo\"\"\"", ": token recognition error at: '\"'");
expectError("FROM \"foo\"bar\"", ": token recognition error at: '\"'");
expectError("FROM \"foo\"\"bar\"", ": extraneous input '\"bar\"' expecting <EOF>");
expectError("FROM \"\"\"foo\"\"\"bar\"\"\"", ": mismatched input 'bar' expecting {<EOF>, '|', ',', 'metadata'}");
expectError("FROM \"\"\"foo\"\"\"\"\"\"bar\"\"\"", ": mismatched input '\"bar\"' expecting {<EOF>, '|', ',', 'metadata'}");
}
public void testInvalidQuotingAsLookupIndexPattern() {
assumeTrue("requires snapshot builds", Build.current().isSnapshot());
expectError("ROW x = 1 | LOOKUP_🐔 \"foo ON j", ": token recognition error at: '\"foo ON j'");
expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'");
expectError("ROW x = 1 | LOOKUP_🐔 foo\" ON j", ": token recognition error at: '\" ON j'");
expectError("ROW x = 1 | LOOKUP_🐔 foo\"\"\" ON j", ": token recognition error at: '\" ON j'");
expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'");
expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'");
expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'");
expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", ": mismatched input '\"bar\"' expecting 'on'");
}
public void testIdentifierAsFieldName() {
String[] operators = new String[] { "==", "!=", ">", "<", ">=", "<=" };
Class<?>[] expectedOperators = new Class<?>[] {
Equals.class,
Not.class,
GreaterThan.class,
LessThan.class,
GreaterThanOrEqual.class,
LessThanOrEqual.class };
String[] identifiers = new String[] { "abc", "`abc`", "ab_c", "a.b.c", "@a", "a.@b", "`a@b.c`" };
String[] expectedIdentifiers = new String[] { "abc", "abc", "ab_c", "a.b.c", "@a", "a.@b", "a@b.c" };
LogicalPlan where;
for (int i = 0; i < operators.length; i++) {
for (int j = 0; j < identifiers.length; j++) {
where = processingCommand("where " + identifiers[j] + operators[i] + "123");
assertThat(where, instanceOf(Filter.class));
Filter filter = (Filter) where;
assertThat(filter.children().size(), equalTo(1));
assertThat(filter.condition(), instanceOf(expectedOperators[i]));
BinaryComparison comparison;
if (filter.condition() instanceof Not not) {
assertThat(not.children().get(0), instanceOf(Equals.class));
comparison = (BinaryComparison) (not.children().get(0));
} else {
comparison = (BinaryComparison) filter.condition();
}
assertThat(comparison.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) comparison.left()).name(), equalTo(expectedIdentifiers[j]));
assertThat(comparison.right(), instanceOf(Literal.class));
assertThat(((Literal) comparison.right()).value(), equalTo(123));
assertThat(filter.child(), equalToIgnoringIds(PROCESSING_CMD_INPUT));
}
}
}
public void testBooleanLiteralCondition() {
LogicalPlan where = processingCommand("where true");
assertThat(where, instanceOf(Filter.class));
Filter w = (Filter) where;
assertThat(w.child(), equalToIgnoringIds(PROCESSING_CMD_INPUT));
assertThat(w.condition(), equalTo(TRUE));
}
public void testBasicLimitCommand() {
LogicalPlan plan = statement("from text | where true | limit 5");
assertThat(plan, instanceOf(Limit.class));
Limit limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(5));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
}
public void testBasicSortCommand() {
LogicalPlan plan = statement("from text | where true | sort a+b asc nulls first, x desc nulls last | sort y asc | sort z desc");
assertThat(plan, instanceOf(OrderBy.class));
OrderBy orderBy = (OrderBy) plan;
assertThat(orderBy.order().size(), equalTo(1));
Order order = orderBy.order().get(0);
assertThat(order.direction(), equalTo(Order.OrderDirection.DESC));
assertThat(order.nullsPosition(), equalTo(Order.NullsPosition.FIRST));
assertThat(order.child(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) order.child()).name(), equalTo("z"));
assertThat(orderBy.children().size(), equalTo(1));
assertThat(orderBy.children().get(0), instanceOf(OrderBy.class));
orderBy = (OrderBy) orderBy.children().get(0);
assertThat(orderBy.order().size(), equalTo(1));
order = orderBy.order().get(0);
assertThat(order.direction(), equalTo(Order.OrderDirection.ASC));
assertThat(order.nullsPosition(), equalTo(Order.NullsPosition.LAST));
assertThat(order.child(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) order.child()).name(), equalTo("y"));
assertThat(orderBy.children().size(), equalTo(1));
assertThat(orderBy.children().get(0), instanceOf(OrderBy.class));
orderBy = (OrderBy) orderBy.children().get(0);
assertThat(orderBy.order().size(), equalTo(2));
order = orderBy.order().get(0);
assertThat(order.direction(), equalTo(Order.OrderDirection.ASC));
assertThat(order.nullsPosition(), equalTo(Order.NullsPosition.FIRST));
assertThat(order.child(), instanceOf(Add.class));
Add add = (Add) order.child();
assertThat(add.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) add.left()).name(), equalTo("a"));
assertThat(add.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) add.right()).name(), equalTo("b"));
order = orderBy.order().get(1);
assertThat(order.direction(), equalTo(Order.OrderDirection.DESC));
assertThat(order.nullsPosition(), equalTo(Order.NullsPosition.LAST));
assertThat(order.child(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) order.child()).name(), equalTo("x"));
assertThat(orderBy.children().size(), equalTo(1));
assertThat(orderBy.children().get(0), instanceOf(Filter.class));
assertThat(orderBy.children().get(0).children().size(), equalTo(1));
assertThat(orderBy.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
}
public void testSubquery() {
assumeTrue("Requires EXPLAIN capability", EsqlCapabilities.Cap.EXPLAIN.isEnabled());
assertEqualsIgnoringIds(new Explain(EMPTY, PROCESSING_CMD_INPUT), statement("explain ( row a = 1 )"));
}
public void testBlockComments() {
assumeTrue("Requires EXPLAIN capability", EsqlCapabilities.Cap.EXPLAIN.isEnabled());
String query = " explain ( from foo )";
LogicalPlan expected = statement(query);
int wsIndex = query.indexOf(' ');
do {
String queryWithComment = query.substring(0, wsIndex) + "/*explain ( \nfrom bar ) */" + query.substring(wsIndex + 1);
assertEqualsIgnoringIds(expected, statement(queryWithComment));
wsIndex = query.indexOf(' ', wsIndex + 1);
} while (wsIndex >= 0);
}
public void testSingleLineComments() {
assumeTrue("Requires EXPLAIN capability", EsqlCapabilities.Cap.EXPLAIN.isEnabled());
String query = " explain ( from foo ) ";
LogicalPlan expected = statement(query);
int wsIndex = query.indexOf(' ');
do {
String queryWithComment = query.substring(0, wsIndex) + "//explain ( from bar ) \n" + query.substring(wsIndex + 1);
assertEqualsIgnoringIds(expected, statement(queryWithComment));
wsIndex = query.indexOf(' ', wsIndex + 1);
} while (wsIndex >= 0);
}
public void testNewLines() {
String[] delims = new String[] { "", "\r", "\n", "\r\n" };
Function<String, String> queryFun = d -> d + "from " + d + " foo " + d + "| eval " + d + " x = concat(bar, \"baz\")" + d;
LogicalPlan reference = statement(queryFun.apply(delims[0]));
for (int i = 1; i < delims.length; i++) {
LogicalPlan candidate = statement(queryFun.apply(delims[i]));
assertThat(candidate, equalToIgnoringIds(reference));
}
}
public void testSuggestAvailableSourceCommandsOnParsingError() {
var cases = new ArrayList<Tuple<String, String>>();
cases.add(Tuple.tuple("frm foo", "frm"));
cases.add(Tuple.tuple("expln[from bar]", "expln"));
cases.add(Tuple.tuple("not-a-thing logs", "not-a-thing"));
cases.add(Tuple.tuple("high5 a", "high5"));
cases.add(Tuple.tuple("a+b = c", "a+b"));
cases.add(Tuple.tuple("a//hi", "a"));
cases.add(Tuple.tuple("a/*hi*/", "a"));
if (EsqlCapabilities.Cap.EXPLAIN.isEnabled()) {
cases.add(Tuple.tuple("explain ( frm a )", "frm"));
}
for (Tuple<String, String> queryWithUnexpectedCmd : cases) {
expectThrows(
ParsingException.class,
allOf(
containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"),
containsString("'from'"),
containsString("'row'")
),
() -> statement(queryWithUnexpectedCmd.v1())
);
}
}
public void testSuggestAvailableProcessingCommandsOnParsingError() {
for (Tuple<String, String> queryWithUnexpectedCmd : List.of(
Tuple.tuple("from a | filter b > 1", "filter"),
Tuple.tuple("from a | explain ( row 1 )", "explain"),
Tuple.tuple("from a | not-a-thing", "not-a-thing"),
Tuple.tuple("from a | high5 a", "high5"),
Tuple.tuple("from a | a+b = c", "a+b"),
Tuple.tuple("from a | a//hi", "a"),
Tuple.tuple("from a | a/*hi*/", "a")
)) {
expectThrows(
ParsingException.class,
allOf(
containsString("mismatched input '" + queryWithUnexpectedCmd.v2() + "'"),
containsString("'eval'"),
containsString("'limit'"),
containsString("'where'")
),
() -> statement(queryWithUnexpectedCmd.v1())
);
}
}
public void testDeprecatedIsNullFunction() {
expectError(
"from test | eval x = is_null(f)",
"line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead"
);
expectError(
"row x = is_null(f)",
"line 1:9: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead"
);
if (Build.current().isSnapshot()) {
expectError(
"from test | eval x = ?fn1(f)",
List.of(paramAsIdentifier("fn1", "IS_NULL")),
"line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead"
);
}
if (EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled()) {
expectError(
"from test | eval x = ??fn1(f)",
List.of(paramAsConstant("fn1", "IS_NULL")),
"line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead"
);
}
}
public void testMetadataFieldOnOtherSources() {
expectError("row a = 1 metadata _index", "line 1:20: extraneous input '_index' expecting <EOF>");
expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'");
if (EsqlCapabilities.Cap.EXPLAIN.isEnabled()) {
expectError("explain ( from foo ) metadata _index", "line 1:22: token recognition error at: 'm'");
}
}
public void testMetadataFieldMultipleDeclarations() {
expectError("from test metadata _index, _version, _index", "1:38: metadata field [_index] already declared [@1:20]");
}
public void testMetadataFieldUnsupportedPrimitiveType() {
expectError("from test metadata _tier", "line 1:20: unsupported metadata field [_tier]");
}
public void testMetadataFieldUnsupportedCustomType() {
expectError("from test metadata _feature", "line 1:20: unsupported metadata field [_feature]");
}
public void testMetadataFieldNotFoundNonExistent() {
expectError("from test metadata _doesnot_compute", "line 1:20: unsupported metadata field [_doesnot_compute]");
}
public void testMetadataFieldNotFoundNormalField() {
expectError("from test metadata emp_no", "line 1:20: unsupported metadata field [emp_no]");
}
public void testDissectPattern() {
LogicalPlan cmd = processingCommand("dissect a \"%{foo}\"");
assertEquals(Dissect.class, cmd.getClass());
Dissect dissect = (Dissect) cmd;
assertEquals("%{foo}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), dissect.extractedFields());
for (String separatorName : List.of("append_separator", "APPEND_SEPARATOR", "AppEnd_SeparAtor")) {
cmd = processingCommand("dissect a \"%{foo}\" " + separatorName + "=\",\"");
assertEquals(Dissect.class, cmd.getClass());
dissect = (Dissect) cmd;
assertEquals("%{foo}", dissect.parser().pattern());
assertEquals(",", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), dissect.extractedFields());
}
for (Tuple<String, String> queryWithUnexpectedCmd : List.of(
Tuple.tuple("from a | dissect foo \"\"", "[]"),
Tuple.tuple("from a | dissect foo \" \"", "[ ]"),
Tuple.tuple("from a | dissect foo \"no fields\"", "[no fields]")
)) {
expectError(queryWithUnexpectedCmd.v1(), "Invalid pattern for dissect: " + queryWithUnexpectedCmd.v2());
}
expectError("from a | dissect foo \"%{*a}:%{&a}\"", "Reference keys not supported in dissect patterns: [%{*a}]");
expectError("from a | dissect foo \"%{bar}\" invalid_option=3", "Invalid option for dissect: [invalid_option]");
expectError(
"from a | dissect foo \"%{bar}\" append_separator=3",
"Invalid value for dissect append_separator: expected a string, but was [3]"
);
expectError("from a | dissect foo \"%{}\"", "Invalid pattern for dissect: [%{}]");
}
public void testGrokPattern() {
LogicalPlan cmd = processingCommand("grok a \"%{WORD:foo}\"");
assertEquals(Grok.class, cmd.getClass());
Grok grok = (Grok) cmd;
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
expectThrows(
ParsingException.class,
containsString("Invalid pattern [%{_invalid_:x}] for grok: Unable to find pattern [_invalid_] in Grok's pattern dictionary"),
() -> statement("row a = \"foo bar\" | grok a \"%{_invalid_:x}\"")
);
cmd = processingCommand("grok a \"%{WORD:foo} %{WORD:foo}\"");
assertEquals(Grok.class, cmd.getClass());
grok = (Grok) cmd;
assertEquals("%{WORD:foo} %{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
expectError(
"row a = \"foo bar\" | GROK a \"%{NUMBER:foo} %{WORD:foo}\"",
"line 1:21: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:"
+ " the attribute [foo] is defined multiple times with different types"
);
expectError(
"row a = \"foo\" | GROK a \"(?P<justification>.+)\"",
"line 1:24: Invalid GROK pattern [(?P<justification>.+)]: [undefined group option]"
);
expectError(
"row a = \"foo bar\" | GROK a \"%{NUMBER:foo}\", \"%{WORD:foo}\"",
"line 1:21: Invalid GROK patterns [%{NUMBER:foo}, %{WORD:foo}]:"
+ " the attribute [foo] is defined multiple times with different types"
);
expectError(
"row a = \"foo\" | GROK a \"%{WORD:foo}\", \"(?P<justification>.+)\"",
"line 1:39: Invalid GROK pattern [(?P<justification>.+)]: [undefined group option]"
);
// when combining the pattern, the resulting string could be valid, but the single patterns are invalid
expectError("""
ROW a = "foo bar"
| GROK a "(%{WORD:word}", "x)"
""", "line 2:10: Invalid GROK pattern [(%{WORD:word}]: [end pattern with unmatched parenthesis]");
}
public void testLikeRLike() {
LogicalPlan cmd = processingCommand("where foo like \"*bar*\"");
assertEquals(Filter.class, cmd.getClass());
Filter filter = (Filter) cmd;
assertEquals(WildcardLike.class, filter.condition().getClass());
WildcardLike like = (WildcardLike) filter.condition();
assertEquals("*bar*", like.pattern().pattern());
cmd = processingCommand("where foo rlike \".*bar.*\"");
assertEquals(Filter.class, cmd.getClass());
filter = (Filter) cmd;
assertEquals(RLike.class, filter.condition().getClass());
RLike rlike = (RLike) filter.condition();
assertEquals(".*bar.*", rlike.pattern().asJavaRegex());
expectError("from a | where foo like 12", "no viable alternative at input 'foo like 12'");
expectError("from a | where foo rlike 12", "no viable alternative at input 'foo rlike 12'");
expectError(
"from a | where foo like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\"",
"line 1:16: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: "
+ "[Invalid sequence - escape character is not followed by special wildcard char]"
);
}
public void testLikeParam() {
if (EsqlCapabilities.Cap.LIKE_PARAMETER_SUPPORT.isEnabled()) {
LogicalPlan anonymous = statement(
// comment keeps following arguments on separate lines like other tests
"row a = \"abc\" | where a like ?",
new QueryParams(List.of(paramAsConstant(null, "a*")))
);
Filter filter = as(anonymous, Filter.class);
WildcardLike like = as(filter.condition(), WildcardLike.class);
assertEquals("a*", like.pattern().pattern());
expectError(
"row a = \"abc\" | where a like ?",
List.of(paramAsConstant(null, 1)),
"Invalid pattern parameter type for like [?]: expected string, found integer"
);
expectError(
"row a = \"abc\" | where a like ?",
List.of(paramAsConstant(null, List.of("a*", "b*"))),
"Invalid pattern parameter type for like [?]: expected string, found list"
);
}
}
public void testLikeListParam() {
if (EsqlCapabilities.Cap.LIKE_PARAMETER_SUPPORT.isEnabled()) {
LogicalPlan positional = statement(
"row a = \"abc\" | where a like ( ?1, ?2 )",
new QueryParams(List.of(paramAsConstant(null, "a*"), paramAsConstant(null, "b*")))
);
Filter filter = as(positional, Filter.class);
WildcardLikeList likelist = as(filter.condition(), WildcardLikeList.class);
WildcardPatternList patternlist = as(likelist.pattern(), WildcardPatternList.class);
assertEquals("(\"a*\", \"b*\")", patternlist.pattern());
expectError(
"row a = \"abc\" | where a like ( ?1, ?2 )",
List.of(paramAsConstant(null, "a*"), paramAsConstant(null, 1)),
"Invalid pattern parameter type for like [?2]: expected string, found integer"
);
expectError(
"row a = \"abc\" | where a like ( ?1, ?3 )",
List.of(paramAsConstant(null, "a*"), paramAsConstant(null, 1)),
"No parameter is defined for position 3, did you mean any position between 1 and 2?"
);
}
}
public void testRLikeParam() {
if (EsqlCapabilities.Cap.LIKE_PARAMETER_SUPPORT.isEnabled()) {
LogicalPlan named = statement(
"row a = \"abc\" | where a rlike ?pattern",
new QueryParams(List.of(paramAsConstant("pattern", "a*")))
);
Filter filter = as(named, Filter.class);
RLike rlike = as(filter.condition(), RLike.class);
assertEquals("a*", rlike.pattern().pattern());
expectError(
"row a = \"abc\" | where a rlike ?pattern",
List.of(paramAsConstant("pattern", 1)),
"Invalid pattern parameter type for rlike [?pattern]: expected string, found integer"
);
expectError(
"row a = \"abc\" | where a rlike ?pattern1",
List.of(paramAsConstant("pattern", 1)),
"Unknown query parameter [pattern1], did you mean [pattern]?"
);
}
}
public void testRLikeListParam() {
if (EsqlCapabilities.Cap.LIKE_PARAMETER_SUPPORT.isEnabled()) {
LogicalPlan named = statement(
"row a = \"abc\" | where a rlike ( ?p1, ?p2 )",
new QueryParams(List.of(paramAsConstant("p1", "a*"), paramAsConstant("p2", "b*")))
);
Filter filter = as(named, Filter.class);
RLikeList rlikelist = as(filter.condition(), RLikeList.class);
RLikePatternList patternlist = as(rlikelist.pattern(), RLikePatternList.class);
assertEquals("(\"a*\", \"b*\")", patternlist.pattern());
expectError(
"row a = \"abc\" | where a rlike ( ?p1, ?p2 )",
List.of(paramAsConstant("p1", "a*"), paramAsConstant("p2", 1)),
"Invalid pattern parameter type for rlike [?p2]: expected string, found integer"
);
expectError(
"row a = \"abc\" | where a rlike ( ?p1, ?p3 )",
List.of(paramAsConstant("p1", "a*"), paramAsConstant("p2", 1)),
"Unknown query parameter [p3], did you mean any of [p1, p2]?"
);
}
}
public void testIdentifierPatternTooComplex() {
// It is incredibly unlikely that we will see this limit hit in practice
// The repetition value 2450 was a ballpark estimate and validated experimentally
String explodingWildcard = "a*".repeat(2450);
expectError("FROM a | KEEP " + explodingWildcard, "Pattern was too complex to determinize");
}
public void testEnrich() {
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
PROCESSING_CMD_INPUT,
null,
Literal.keyword(EMPTY, "countries"),
new EmptyAttribute(EMPTY),
null,
Map.of(),
List.of()
),
processingCommand("enrich countries")
);
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
PROCESSING_CMD_INPUT,
null,
Literal.keyword(EMPTY, "index-policy"),
new UnresolvedAttribute(EMPTY, "field_underscore"),
null,
Map.of(),
List.of()
),
processingCommand("enrich index-policy ON field_underscore")
);
Enrich.Mode mode = randomFrom(Enrich.Mode.values());
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
PROCESSING_CMD_INPUT,
mode,
Literal.keyword(EMPTY, "countries"),
new UnresolvedAttribute(EMPTY, "country_code"),
null,
Map.of(),
List.of()
),
processingCommand("enrich _" + mode.name() + ":countries ON country_code")
);
expectError("from a | enrich countries on foo* ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [foo*]");
expectError("from a | enrich countries on * ", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]");
expectError(
"from a | enrich countries on foo with bar*",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]"
);
expectError("from a | enrich countries on foo with *", "Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]");
expectError(
"from a | enrich countries on foo with x = bar* ",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [bar*]"
);
expectError(
"from a | enrich countries on foo with x = * ",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"
);
expectError(
"from a | enrich countries on foo with x* = bar ",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [x*]"
);
expectError(
"from a | enrich countries on foo with * = bar ",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [*]"
);
expectError(
"from a | enrich countries on foo . * ",
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [foo.*]"
);
expectError(
"from a | enrich typo:countries on foo",
"line 1:17: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]"
);
}
public void testMvExpand() {
LogicalPlan cmd = processingCommand("mv_expand a");
assertEquals(MvExpand.class, cmd.getClass());
MvExpand expand = (MvExpand) cmd;
assertThat(expand.target(), equalToIgnoringIds(attribute("a")));
}
// see https://github.com/elastic/elasticsearch/issues/103331
public void testKeepStarMvExpand() {
try {
String query = "from test | keep * | mv_expand a";
var plan = statement(query);
} catch (UnresolvedException e) {
fail(e, "Regression: https://github.com/elastic/elasticsearch/issues/103331");
}
}
public void testUsageOfProject() {
String query = "from test | project foo, bar";
expectThrows(ParsingException.class, containsString("mismatched input 'project' expecting"), () -> statement(query));
}
public void testInputParams() {
LogicalPlan stm = statement(
"row x = ?, y = ?, a = ?, b = ?, c = ?, d = ?, e = ?-1, f = ?+1",
new QueryParams(
List.of(
paramAsConstant(null, 1),
paramAsConstant(null, "2"),
paramAsConstant(null, "2 days"),
paramAsConstant(null, "4 hours"),
paramAsConstant(null, "1.2.3"),
paramAsConstant(null, "127.0.0.1"),
paramAsConstant(null, 10),
paramAsConstant(null, 10)
)
)
);
assertThat(stm, instanceOf(Row.class));
Row row = (Row) stm;
assertThat(row.fields().size(), is(8));
NamedExpression field = row.fields().get(0);
assertThat(field.name(), is("x"));
assertThat(field, instanceOf(Alias.class));
Alias alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(1));
field = row.fields().get(1);
assertThat(field.name(), is("y"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(BytesRefs.toBytesRef("2")));
field = row.fields().get(2);
assertThat(field.name(), is("a"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(BytesRefs.toBytesRef("2 days")));
field = row.fields().get(3);
assertThat(field.name(), is("b"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(BytesRefs.toBytesRef("4 hours")));
field = row.fields().get(4);
assertThat(field.name(), is("c"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()).getClass(), is(BytesRef.class));
assertThat(alias.child().fold(FoldContext.small()), is(BytesRefs.toBytesRef("1.2.3")));
field = row.fields().get(5);
assertThat(field.name(), is("d"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()).getClass(), is(BytesRef.class));
assertThat(alias.child().fold(FoldContext.small()), is(BytesRefs.toBytesRef("127.0.0.1")));
field = row.fields().get(6);
assertThat(field.name(), is("e"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(9));
field = row.fields().get(7);
assertThat(field.name(), is("f"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(11));
}
public void testMissingInputParams() {
expectError("row x = ?, y = ?", List.of(paramAsConstant(null, 1)), "Not enough actual parameters 1");
if (EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled()) {
expectError("from test | eval x = ??, y = ??", List.of(paramAsConstant(null, 1)), "Not enough actual parameters 1");
expectError("from test | eval x = ??, y = ?", List.of(paramAsConstant(null, 1)), "Not enough actual parameters 1");
}
}
public void testNamedParams() {
LogicalPlan stm = statement("row x=?name1, y = ?name1", new QueryParams(List.of(paramAsConstant("name1", 1))));
assertThat(stm, instanceOf(Row.class));
Row row = (Row) stm;
assertThat(row.fields().size(), is(2));
NamedExpression field = row.fields().get(0);
assertThat(field.name(), is("x"));
assertThat(field, instanceOf(Alias.class));
Alias alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(1));
field = row.fields().get(1);
assertThat(field.name(), is("y"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(1));
}
public void testInvalidNamedParams() {
expectError(
"from test | where x < ?n1 | eval y = ?n2",
List.of(paramAsConstant("n1", 5)),
"Unknown query parameter [n2], did you mean [n1]?"
);
expectError(
"from test | where x < ?n1 | eval y = ?n2",
List.of(paramAsConstant("n1", 5), paramAsConstant("n3", 5)),
"Unknown query parameter [n2], did you mean any of [n3, n1]?"
);
expectError("from test | where x < ?@1", List.of(paramAsConstant("@1", 5)), "extraneous input '@1' expecting <EOF>");
expectError("from test | where x < ?#1", List.of(paramAsConstant("#1", 5)), "token recognition error at: '#'");
expectError("from test | where x < ?Å", List.of(paramAsConstant("Å", 5)), "line 1:24: token recognition error at: 'Å'");
expectError("from test | eval x = ?Å", List.of(paramAsConstant("Å", 5)), "line 1:23: token recognition error at: 'Å'");
if (EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled()) {
expectError(
"from test | where x < ???",
List.of(paramAsConstant("n_1", 5), paramAsConstant("n_2", 5)),
"extraneous input '?' expecting <EOF>"
);
} else {
expectError(
"from test | where x < ??",
List.of(paramAsConstant("n_1", 5), paramAsConstant("n_2", 5)),
"extraneous input '?' expecting <EOF>"
);
}
}
public void testPositionalParams() {
LogicalPlan stm = statement("row x=?1, y=?1", new QueryParams(List.of(paramAsConstant(null, 1))));
assertThat(stm, instanceOf(Row.class));
Row row = (Row) stm;
assertThat(row.fields().size(), is(2));
NamedExpression field = row.fields().get(0);
assertThat(field.name(), is("x"));
assertThat(field, instanceOf(Alias.class));
Alias alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(1));
field = row.fields().get(1);
assertThat(field.name(), is("y"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
assertThat(alias.child().fold(FoldContext.small()), is(1));
}
public void testInvalidPositionalParams() {
expectError(
"from test | where x < ?0",
List.of(paramAsConstant(null, 5)),
"No parameter is defined for position 0, did you mean position 1"
);
expectError(
"from test | where x < ?2",
List.of(paramAsConstant(null, 5)),
"No parameter is defined for position 2, did you mean position 1"
);
expectError(
"from test | where x < ?0 and y < ?2",
List.of(paramAsConstant(null, 5)),
"line 1:23: No parameter is defined for position 0, did you mean position 1?; "
+ "line 1:34: No parameter is defined for position 2, did you mean position 1?"
);
expectError(
"from test | where x < ?0",
List.of(paramAsConstant(null, 5), paramAsConstant(null, 10)),
"No parameter is defined for position 0, did you mean any position between 1 and 2?"
);
}
public void testParamInWhere() {
LogicalPlan plan = statement("from test | where x < ? | limit 10", new QueryParams(List.of(paramAsConstant(null, 5))));
assertThat(plan, instanceOf(Limit.class));
Limit limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
Filter w = (Filter) limit.children().get(0);
assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement("from test | where x < ?n1 | limit 10", new QueryParams(List.of(paramAsConstant("n1", 5))));
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
w = (Filter) limit.children().get(0);
assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement("from test | where x < ?_n1 | limit 10", new QueryParams(List.of(paramAsConstant("_n1", 5))));
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
w = (Filter) limit.children().get(0);
assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement("from test | where x < ?1 | limit 10", new QueryParams(List.of(paramAsConstant(null, 5))));
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
w = (Filter) limit.children().get(0);
assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement("from test | where x < ?__1 | limit 10", new QueryParams(List.of(paramAsConstant("__1", 5))));
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Filter.class));
w = (Filter) limit.children().get(0);
assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
assertThat(limit.children().get(0).children().size(), equalTo(1));
assertThat(limit.children().get(0).children().get(0), instanceOf(UnresolvedRelation.class));
}
public void testParamInEval() {
LogicalPlan plan = statement(
"from test | where x < ? | eval y = ? + ? | limit 10",
new QueryParams(List.of(paramAsConstant(null, 5), paramAsConstant(null, -1), paramAsConstant(null, 100)))
);
assertThat(plan, instanceOf(Limit.class));
Limit limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Eval.class));
Eval eval = (Eval) limit.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
Filter f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?n1 | eval y = ?n2 + ?n3 | limit 10",
new QueryParams(List.of(paramAsConstant("n1", 5), paramAsConstant("n2", -1), paramAsConstant("n3", 100)))
);
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Eval.class));
eval = (Eval) limit.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?_n1 | eval y = ?_n2 + ?_n3 | limit 10",
new QueryParams(List.of(paramAsConstant("_n1", 5), paramAsConstant("_n2", -1), paramAsConstant("_n3", 100)))
);
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Eval.class));
eval = (Eval) limit.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?1 | eval y = ?2 + ?1 | limit 10",
new QueryParams(List.of(paramAsConstant(null, 5), paramAsConstant(null, -1)))
);
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Eval.class));
eval = (Eval) limit.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?_1 | eval y = ?_2 + ?_1 | limit 10",
new QueryParams(List.of(paramAsConstant("_1", 5), paramAsConstant("_2", -1)))
);
assertThat(plan, instanceOf(Limit.class));
limit = (Limit) plan;
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(10));
assertThat(limit.children().size(), equalTo(1));
assertThat(limit.children().get(0), instanceOf(Eval.class));
eval = (Eval) limit.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
}
public void testParamInAggFunction() {
LogicalPlan plan = statement(
"from test | where x < ? | eval y = ? + ? | stats count(?) by z",
new QueryParams(
List.of(paramAsConstant(null, 5), paramAsConstant(null, -1), paramAsConstant(null, 100), paramAsConstant(null, "*"))
)
);
assertThat(plan, instanceOf(Aggregate.class));
Aggregate agg = (Aggregate) plan;
assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo(BytesRefs.toBytesRef("*")));
assertThat(agg.child(), instanceOf(Eval.class));
assertThat(agg.children().size(), equalTo(1));
assertThat(agg.children().get(0), instanceOf(Eval.class));
Eval eval = (Eval) agg.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
Filter f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?n1 | eval y = ?n2 + ?n3 | stats count(?n4) by z",
new QueryParams(
List.of(paramAsConstant("n1", 5), paramAsConstant("n2", -1), paramAsConstant("n3", 100), paramAsConstant("n4", "*"))
)
);
assertThat(plan, instanceOf(Aggregate.class));
agg = (Aggregate) plan;
assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo(BytesRefs.toBytesRef("*")));
assertThat(agg.child(), instanceOf(Eval.class));
assertThat(agg.children().size(), equalTo(1));
assertThat(agg.children().get(0), instanceOf(Eval.class));
eval = (Eval) agg.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?_n1 | eval y = ?_n2 + ?_n3 | stats count(?_n4) by z",
new QueryParams(
List.of(paramAsConstant("_n1", 5), paramAsConstant("_n2", -1), paramAsConstant("_n3", 100), paramAsConstant("_n4", "*"))
)
);
assertThat(plan, instanceOf(Aggregate.class));
agg = (Aggregate) plan;
assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo(BytesRefs.toBytesRef("*")));
assertThat(agg.child(), instanceOf(Eval.class));
assertThat(agg.children().size(), equalTo(1));
assertThat(agg.children().get(0), instanceOf(Eval.class));
eval = (Eval) agg.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?1 | eval y = ?2 + ?1 | stats count(?3) by z",
new QueryParams(List.of(paramAsConstant(null, 5), paramAsConstant(null, -1), paramAsConstant(null, "*")))
);
assertThat(plan, instanceOf(Aggregate.class));
agg = (Aggregate) plan;
assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo(BytesRefs.toBytesRef("*")));
assertThat(agg.child(), instanceOf(Eval.class));
assertThat(agg.children().size(), equalTo(1));
assertThat(agg.children().get(0), instanceOf(Eval.class));
eval = (Eval) agg.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
plan = statement(
"from test | where x < ?_1 | eval y = ?_2 + ?_1 | stats count(?_3) by z",
new QueryParams(List.of(paramAsConstant("_1", 5), paramAsConstant("_2", -1), paramAsConstant("_3", "*")))
);
assertThat(plan, instanceOf(Aggregate.class));
agg = (Aggregate) plan;
assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo(BytesRefs.toBytesRef("*")));
assertThat(agg.child(), instanceOf(Eval.class));
assertThat(agg.children().size(), equalTo(1));
assertThat(agg.children().get(0), instanceOf(Eval.class));
eval = (Eval) agg.children().get(0);
assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
f = (Filter) eval.children().get(0);
assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
assertThat(f.children().size(), equalTo(1));
assertThat(f.children().get(0), instanceOf(UnresolvedRelation.class));
}
public void testParamMixed() {
Map<List<String>, String> mixedParams = new HashMap<>(
Map.ofEntries(
Map.entry(List.of("?", "?n2", "?n3"), "named and anonymous"),
Map.entry(List.of("?", "?_n2", "?n3"), "named and anonymous"),
Map.entry(List.of("?1", "?n2", "?_n3"), "named and positional"),
Map.entry(List.of("?", "?2", "?n3"), "positional and anonymous")
)
);
if (EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled()) {
mixedParams.put(List.of("??", "??n2", "??n3"), "named and anonymous");
mixedParams.put(List.of("?", "??_n2", "?n3"), "named and anonymous");
mixedParams.put(List.of("??1", "?n2", "?_n3"), "named and positional");
mixedParams.put(List.of("?", "??2", "?n3"), "positional and anonymous");
}
for (Map.Entry<List<String>, String> mixedParam : mixedParams.entrySet()) {
List<String> params = mixedParam.getKey();
String errorMessage = mixedParam.getValue();
String query = LoggerMessageFormat.format(
null,
"from test | where x < {} | eval y = {}() + {}",
params.get(0),
params.get(1),
params.get(2)
);
expectError(
query,
List.of(paramAsConstant("n1", "f1"), paramAsConstant("n2", "fn2"), paramAsConstant("n3", "f3")),
"Inconsistent parameter declaration, "
+ "use one of positional, named or anonymous params but not a combination of "
+ errorMessage
);
}
}
public void testIntervalParam() {
LogicalPlan stm = statement(
"row x = ?1::datetime | eval y = ?1::datetime + ?2::date_period",
new QueryParams(List.of(paramAsConstant("datetime", "2024-01-01"), paramAsConstant("date_period", "3 days")))
);
assertThat(stm, instanceOf(Eval.class));
Eval eval = (Eval) stm;
assertThat(eval.fields().size(), is(1));
NamedExpression field = eval.fields().get(0);
assertThat(field.name(), is("y"));
assertThat(field, instanceOf(Alias.class));
assertThat(
((Literal) ((Add) eval.fields().get(0).child()).left().children().get(0)).value(),
equalTo(BytesRefs.toBytesRef("2024-01-01"))
);
assertThat(
((Literal) ((Add) eval.fields().get(0).child()).right().children().get(0)).value(),
equalTo(BytesRefs.toBytesRef("3 days"))
);
}
public void testParamForIdentifier() {
// TODO will be replaced by testDoubleParamsForIdentifier after providing an identifier with a single parameter marker is deprecated
// field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand
// eval, where
assertEqualsIgnoringIds(
new Limit(
EMPTY,
new Literal(EMPTY, 1, INTEGER),
new Filter(
EMPTY,
new Eval(EMPTY, relation("test"), List.of(new Alias(EMPTY, "x", function("toString", List.of(attribute("f1.")))))),
new Equals(EMPTY, attribute("f1."), attribute("f.2"))
)
),
statement(
"""
from test
| eval ?f0 = ?fn1(?f1)
| where ?f1 == ?f2
| limit 1""",
new QueryParams(
List.of(
paramAsIdentifier("f0", "x"),
paramAsIdentifier("f1", "f1."),
paramAsIdentifier("f2", "f.2"),
paramAsIdentifier("fn1", "toString")
)
)
)
);
assertEqualsIgnoringIds(
new Limit(
EMPTY,
new Literal(EMPTY, 1, INTEGER),
new Filter(
EMPTY,
new Eval(EMPTY, relation("test"), List.of(new Alias(EMPTY, "x", function("toString", List.of(attribute("f1..f.2")))))),
new Equals(EMPTY, attribute("f3.*.f.4."), attribute("f.5.*.f.*.6"))
)
),
statement(
"""
from test
| eval ?f0 = ?fn1(?f1.?f2)
| where ?f3.?f4 == ?f5.?f6
| limit 1""",
new QueryParams(
List.of(
paramAsIdentifier("f0", "x"),
paramAsIdentifier("f1", "f1."),
paramAsIdentifier("f2", "f.2"),
paramAsIdentifier("f3", "f3.*"),
paramAsIdentifier("f4", "f.4."),
paramAsIdentifier("f5", "f.5.*"),
paramAsIdentifier("f6", "f.*.6"),
paramAsIdentifier("fn1", "toString")
)
)
)
);
// stats, sort, mv_expand
assertEqualsIgnoringIds(
new MvExpand(
EMPTY,
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(attribute("f.4.")),
List.of(new Alias(EMPTY, "y", function("count", List.of(attribute("f3.*")))), attribute("f.4."))
),
List.of(new Order(EMPTY, attribute("f.5.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST))
),
attribute("f.6*"),
attribute("f.6*")
),
statement(
"""
from test
| stats y = ?fn2(?f3) by ?f4
| sort ?f5
| mv_expand ?f6""",
new QueryParams(
List.of(
paramAsIdentifier("f3", "f3.*"),
paramAsIdentifier("f4", "f.4."),
paramAsIdentifier("f5", "f.5.*"),
paramAsIdentifier("f6", "f.6*"),
paramAsIdentifier("fn2", "count")
)
)
)
);
assertEqualsIgnoringIds(
new MvExpand(
EMPTY,
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(attribute("f.9.f10.*")),
List.of(new Alias(EMPTY, "y", function("count", List.of(attribute("f.7*.f8.")))), attribute("f.9.f10.*"))
),
List.of(new Order(EMPTY, attribute("f.11..f.12.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST))
),
attribute("f.*.13.f.14*"),
attribute("f.*.13.f.14*")
),
statement(
"""
from test
| stats y = ?fn2(?f7.?f8) by ?f9.?f10
| sort ?f11.?f12
| mv_expand ?f13.?f14""",
new QueryParams(
List.of(
paramAsIdentifier("f7", "f.7*"),
paramAsIdentifier("f8", "f8."),
paramAsIdentifier("f9", "f.9"),
paramAsIdentifier("f10", "f10.*"),
paramAsIdentifier("f11", "f.11."),
paramAsIdentifier("f12", "f.12.*"),
paramAsIdentifier("f13", "f.*.13"),
paramAsIdentifier("f14", "f.14*"),
paramAsIdentifier("fn2", "count")
)
)
)
);
// keep, drop, rename, grok, dissect
LogicalPlan plan = statement(
"""
from test | keep ?f1, ?f2 | drop ?f3, ?f4 | dissect ?f5 "%{bar}" | grok ?f6 "%{WORD:foo}" | rename ?f7 as ?f8 | limit 1""",
new QueryParams(
List.of(
paramAsIdentifier("f1", "f.1.*"),
paramAsIdentifier("f2", "f.2"),
paramAsIdentifier("f3", "f3."),
paramAsIdentifier("f4", "f4.*"),
paramAsIdentifier("f5", "f.5*"),
paramAsIdentifier("f6", "f.6."),
paramAsIdentifier("f7", "f7*."),
paramAsIdentifier("f8", "f.8")
)
)
);
Limit limit = as(plan, Limit.class);
Rename rename = as(limit.child(), Rename.class);
assertEqualsIgnoringIds(rename.renamings(), List.of(new Alias(EMPTY, "f.8", attribute("f7*."))));
Grok grok = as(rename.child(), Grok.class);
assertEqualsIgnoringIds(grok.input(), attribute("f.6."));
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
Dissect dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(dissect.input(), attribute("f.5*"));
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
Drop drop = as(dissect.child(), Drop.class);
List<? extends NamedExpression> removals = drop.removals();
assertEqualsIgnoringIds(removals, List.of(attribute("f3."), attribute("f4.*")));
Keep keep = as(drop.child(), Keep.class);
assertEqualsIgnoringIds(keep.projections(), List.of(attribute("f.1.*"), attribute("f.2")));
plan = statement(
"""
from test | keep ?f1.?f2 | drop ?f3.?f4
| dissect ?f5.?f6 "%{bar}" | grok ?f7.?f8 "%{WORD:foo}"
| rename ?f9.?f10 as ?f11.?f12
| limit 1""",
new QueryParams(
List.of(
paramAsIdentifier("f1", "f.1.*"),
paramAsIdentifier("f2", "f.2"),
paramAsIdentifier("f3", "f3."),
paramAsIdentifier("f4", "f4.*"),
paramAsIdentifier("f5", "f.5*"),
paramAsIdentifier("f6", "f.6."),
paramAsIdentifier("f7", "f7*."),
paramAsIdentifier("f8", "f.8"),
paramAsIdentifier("f9", "f.9*"),
paramAsIdentifier("f10", "f.10."),
paramAsIdentifier("f11", "f11*."),
paramAsIdentifier("f12", "f.12")
)
)
);
limit = as(plan, Limit.class);
rename = as(limit.child(), Rename.class);
assertEqualsIgnoringIds(rename.renamings(), List.of(new Alias(EMPTY, "f11*..f.12", attribute("f.9*.f.10."))));
grok = as(rename.child(), Grok.class);
assertEqualsIgnoringIds(grok.input(), attribute("f7*..f.8"));
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(dissect.input(), attribute("f.5*.f.6."));
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
drop = as(dissect.child(), Drop.class);
removals = drop.removals();
assertEqualsIgnoringIds(removals, List.of(attribute("f3..f4.*")));
keep = as(drop.child(), Keep.class);
assertEqualsIgnoringIds(keep.projections(), List.of(attribute("f.1.*.f.2")));
// enrich
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
relation("idx1"),
null,
Literal.keyword(EMPTY, "idx2"),
attribute("f.1.*"),
null,
Map.of(),
List.of(new Alias(EMPTY, "f.2", attribute("f.3*")))
),
statement(
"from idx1 | ENRICH idx2 ON ?f1 WITH ?f2 = ?f3",
new QueryParams(List.of(paramAsIdentifier("f1", "f.1.*"), paramAsIdentifier("f2", "f.2"), paramAsIdentifier("f3", "f.3*")))
)
);
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
relation("idx1"),
null,
Literal.keyword(EMPTY, "idx2"),
attribute("f.1.*.f.2"),
null,
Map.of(),
List.of(new Alias(EMPTY, "f.3*.f.4.*", attribute("f.5.f.6*")))
),
statement(
"from idx1 | ENRICH idx2 ON ?f1.?f2 WITH ?f3.?f4 = ?f5.?f6",
new QueryParams(
List.of(
paramAsIdentifier("f1", "f.1.*"),
paramAsIdentifier("f2", "f.2"),
paramAsIdentifier("f3", "f.3*"),
paramAsIdentifier("f4", "f.4.*"),
paramAsIdentifier("f5", "f.5"),
paramAsIdentifier("f6", "f.6*")
)
)
)
);
}
public void testParamForIdentifierPattern() {
// name patterns can appear in keep and drop
// all patterns
LogicalPlan plan = statement(
"from test | keep ?f1, ?f2 | drop ?f3, ?f4",
new QueryParams(
List.of(
paramAsPattern("f1", "f*1."),
paramAsPattern("f2", "f.2*"),
paramAsPattern("f3", "f3.*"),
paramAsPattern("f4", "f.4.*")
)
)
);
Drop drop = as(plan, Drop.class);
List<? extends NamedExpression> removals = drop.removals();
assertEquals(removals.size(), 2);
UnresolvedNamePattern up = as(removals.get(0), UnresolvedNamePattern.class);
assertEquals(up.name(), "f3.*");
assertEquals(up.pattern(), "f3.*");
up = as(removals.get(1), UnresolvedNamePattern.class);
assertEquals(up.name(), "f.4.*");
assertEquals(up.pattern(), "f.4.*");
Keep keep = as(drop.child(), Keep.class);
assertEquals(keep.projections().size(), 2);
up = as(keep.projections().get(0), UnresolvedNamePattern.class);
assertEquals(up.name(), "f*1.");
assertEquals(up.pattern(), "f*1.");
up = as(keep.projections().get(1), UnresolvedNamePattern.class);
assertEquals(up.name(), "f.2*");
assertEquals(up.pattern(), "f.2*");
UnresolvedRelation ur = as(keep.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
plan = statement(
"from test | keep ?f1.?f2 | drop ?f3.?f4",
new QueryParams(
List.of(
paramAsPattern("f1", "f*1."),
paramAsPattern("f2", "f.2*"),
paramAsPattern("f3", "f3.*"),
paramAsPattern("f4", "f.4.*")
)
)
);
drop = as(plan, Drop.class);
removals = drop.removals();
assertEquals(removals.size(), 1);
up = as(removals.get(0), UnresolvedNamePattern.class);
assertEquals(up.name(), "f3.*.f.4.*");
assertEquals(up.pattern(), "f3.*.f.4.*");
keep = as(drop.child(), Keep.class);
assertEquals(keep.projections().size(), 1);
up = as(keep.projections().get(0), UnresolvedNamePattern.class);
assertEquals(up.name(), "f*1..f.2*");
assertEquals(up.pattern(), "f*1..f.2*");
ur = as(keep.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
// mixed names and patterns
plan = statement(
"from test | keep ?f1.?f2 | drop ?f3.?f4",
new QueryParams(
List.of(
paramAsPattern("f1", "f*1."),
paramAsPattern("f2", "`f.2*`*"),
paramAsPattern("f3", "f3.*"),
paramAsIdentifier("f4", "f.4.*")
)
)
);
drop = as(plan, Drop.class);
removals = drop.removals();
assertEquals(removals.size(), 1);
up = as(removals.get(0), UnresolvedNamePattern.class);
assertEquals("f3.*.f.4.*", up.name());
assertEquals("f3.*.`f.4.*`", up.pattern());
keep = as(drop.child(), Keep.class);
assertEquals(keep.projections().size(), 1);
up = as(keep.projections().get(0), UnresolvedNamePattern.class);
assertEquals("f*1..f.2**", up.name());
assertEquals("f*1..`f.2*`*", up.pattern());
ur = as(keep.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
}
public void testParamInInvalidPosition() {
// param for pattern is not supported in eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand
// where/stats/sort/dissect/grok are covered in RestEsqlTestCase
List<String> invalidParamPositions = List.of("eval ?f1 = 1", "stats x = ?f1(*)", "mv_expand ?f1", "rename ?f1 as ?f2");
for (String invalidParamPosition : invalidParamPositions) {
for (String pattern : List.of("f1*", "*", "`f1*`", "`*`")) {
// pattern is not supported
expectError(
"from test | " + invalidParamPosition,
List.of(paramAsPattern("f1", pattern), paramAsPattern("f2", "f*2")),
invalidParamPosition.contains("rename")
? "Using wildcards [*] in RENAME is not allowed [?f1 as ?f2]"
: "Query parameter [?f1][" + pattern + "] declared as a pattern, cannot be used as an identifier"
);
// constant is not supported
expectError(
"from test | " + invalidParamPosition,
List.of(paramAsConstant("f1", pattern), paramAsConstant("f2", "f*2")),
invalidParamPosition.contains("rename")
? "Query parameter [?f2] with value [f*2] declared as a constant, cannot be used as an identifier or pattern"
: "Query parameter [?f1] with value [" + pattern + "] declared as a constant, cannot be used as an identifier"
);
}
// nulls
if (invalidParamPosition.contains("rename")) {
// rename null as null is allowed, there is no ParsingException or VerificationException thrown
// named parameter doesn't change this behavior, it will need to be revisited
continue;
}
expectError(
"from test | " + invalidParamPosition,
List.of(paramAsConstant("f1", null), paramAsConstant("f2", null)),
"Query parameter [?f1] is null or undefined"
);
}
// enrich with wildcard as pattern or constant is not supported
String enrich = "ENRICH idx2 ON ?f1 WITH ?f2 = ?f3";
for (String pattern : List.of("f.1.*", "*")) {
expectError(
"from idx1 | " + enrich,
List.of(paramAsPattern("f1", pattern), paramAsIdentifier("f2", "f.2"), paramAsIdentifier("f3", "f.3*")),
"Using wildcards [*] in ENRICH WITH projections is not allowed, found [" + pattern + "]"
);
expectError(
"from idx1 | " + enrich,
List.of(paramAsConstant("f1", pattern), paramAsIdentifier("f2", "f.2"), paramAsIdentifier("f3", "f.3*")),
"Query parameter [?f1] with value [" + pattern + "] declared as a constant, cannot be used as an identifier or pattern"
);
}
}
public void testMissingParam() {
// cover all processing commands eval/where/stats/sort/rename/dissect/grok/enrich/mvexpand/keep/drop
String error = "Unknown query parameter [f1], did you mean [f4]?";
String errorMvExpandFunctionNameCommandOption = "Query parameter [?f1] is null or undefined, cannot be used as an identifier";
List<String> missingParamGroupA = List.of(
"eval x = ?f1",
"where ?f1 == \"a\"",
"stats x = count(?f1)",
"sort ?f1",
"rename ?f1 as ?f2",
"dissect ?f1 \"%{bar}\"",
"grok ?f1 \"%{WORD:foo}\"",
"enrich idx2 ON ?f1 WITH ?f2 = ?f3",
"keep ?f1",
"drop ?f1"
);
List<String> missingParamGroupB = List.of("eval x = ?f1(f1)", "mv_expand ?f1");
for (String missingParam : Stream.concat(missingParamGroupA.stream(), missingParamGroupB.stream()).toList()) {
for (String identifierOrPattern : List.of("identifier", "identifierpattern")) {
expectError(
"from test | " + missingParam,
List.of(identifierOrPattern.equals("identifier") ? paramAsIdentifier("f4", "f1*") : paramAsPattern("f4", "f1*")),
missingParamGroupB.contains(missingParam) ? errorMvExpandFunctionNameCommandOption : error
);
}
if (EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled()) {
expectError("from test | " + missingParam.replace("?", "??"), List.of(paramAsConstant("f4", "f1*")), error);
}
}
}
public void testFieldContainingDotsAndNumbers() {
LogicalPlan where = processingCommand("where `a.b.1m.4321`");
assertThat(where, instanceOf(Filter.class));
Filter w = (Filter) where;
assertThat(w.child(), equalToIgnoringIds(PROCESSING_CMD_INPUT));
assertThat(Expressions.name(w.condition()), equalTo("a.b.1m.4321"));
}
public void testFieldQualifiedName() {
LogicalPlan where = processingCommand("where a.b.`1m`.`4321`");
assertThat(where, instanceOf(Filter.class));
Filter w = (Filter) where;
assertThat(w.child(), equalToIgnoringIds(PROCESSING_CMD_INPUT));
assertThat(Expressions.name(w.condition()), equalTo("a.b.1m.4321"));
}
public void testQuotedName() {
// row `my-field`=123 | stats count(`my-field`) | eval x = `count(`my-field`)`
LogicalPlan plan = processingCommand("stats count(`my-field`) | keep `count(``my-field``)`");
var project = as(plan, Project.class);
assertThat(Expressions.names(project.projections()), contains("count(`my-field`)"));
}
private void assertStringAsIndexPattern(String string, String statement) {
if (Build.current().isSnapshot() == false && statement.startsWith("TS ")) {
expectThrows(ParsingException.class, containsString("mismatched input 'TS' expecting {"), () -> statement(statement));
return;
}
LogicalPlan from = statement(statement);
assertThat(from, instanceOf(UnresolvedRelation.class));
UnresolvedRelation table = (UnresolvedRelation) from;
assertThat(table.indexPattern().indexPattern(), is(string));
}
private void assertStringAsLookupIndexPattern(String string, String statement) {
if (Build.current().isSnapshot() == false) {
expectThrows(
ParsingException.class,
containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build"),
() -> statement(statement)
);
return;
}
var plan = statement(statement);
var lookup = as(plan, Lookup.class);
var tableName = as(lookup.tableName(), Literal.class);
assertThat(tableName.fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef(string)));
}
public void testIdPatternUnquoted() {
var string = "regularString";
assertThat(breakIntoFragments(string), contains(string));
}
public void testIdPatternQuoted() {
var string = "`escaped string`";
assertThat(breakIntoFragments(string), contains(string));
}
public void testIdPatternQuotedWithDoubleBackticks() {
var string = "`escaped``string`";
assertThat(breakIntoFragments(string), contains(string));
}
public void testIdPatternUnquotedAndQuoted() {
var string = "this`is`a`mix`of`ids`";
assertThat(breakIntoFragments(string), contains("this", "`is`", "a", "`mix`", "of", "`ids`"));
}
public void testIdPatternQuotedTraling() {
var string = "`foo`*";
assertThat(breakIntoFragments(string), contains("`foo`", "*"));
}
public void testIdPatternWithDoubleQuotedStrings() {
var string = "`this``is`a`quoted `` string``with`backticks";
assertThat(breakIntoFragments(string), contains("`this``is`", "a", "`quoted `` string``with`", "backticks"));
}
public void testSpaceNotAllowedInIdPattern() {
expectError("ROW a = 1| RENAME a AS this is `not okay`", "mismatched input 'is' expecting {<EOF>, '|', ',', '.'}");
}
public void testEnrichOnMatchField() {
var plan = statement("ROW a = \"1\" | ENRICH languages_policy ON a WITH ```name``* = language_name`");
var enrich = as(plan, Enrich.class);
var lists = enrich.enrichFields();
assertThat(lists, hasSize(1));
var ua = as(lists.get(0), UnresolvedAttribute.class);
assertThat(ua.name(), is("`name`* = language_name"));
}
public void testInlineConvertWithNonexistentType() {
expectError("ROW 1::doesnotexist", "line 1:8: Unknown data type named [doesnotexist]");
expectError("ROW \"1\"::doesnotexist", "line 1:10: Unknown data type named [doesnotexist]");
expectError("ROW false::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]");
expectError("ROW abs(1)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]");
expectError("ROW (1+2)::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]");
}
public void testLookup() {
String query = "ROW a = 1 | LOOKUP_🐔 t ON j";
if (Build.current().isSnapshot() == false) {
expectThrows(
ParsingException.class,
containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {"),
() -> statement(query)
);
return;
}
var plan = statement(query);
var lookup = as(plan, Lookup.class);
var tableName = as(lookup.tableName(), Literal.class);
assertThat(tableName.fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("t")));
assertThat(lookup.matchFields(), hasSize(1));
var matchField = as(lookup.matchFields().get(0), UnresolvedAttribute.class);
assertThat(matchField.name(), equalTo("j"));
}
public void testInlineConvertUnsupportedType() {
expectError("ROW 3::BYTE", "line 1:5: Unsupported conversion to type [BYTE]");
}
public void testMetricsWithoutStats() {
assertStatement("TS foo", unresolvedTSRelation("foo"));
assertStatement("TS foo,bar", unresolvedTSRelation("foo,bar"));
assertStatement("TS foo*,bar", unresolvedTSRelation("foo*,bar"));
assertStatement("TS foo-*,bar", unresolvedTSRelation("foo-*,bar"));
assertStatement("TS foo-*,bar+*", unresolvedTSRelation("foo-*,bar+*"));
}
public void testMetricsIdentifiers() {
Map<String, String> patterns = Map.ofEntries(
Map.entry("ts foo,test-*", "foo,test-*"),
Map.entry("ts 123-test@foo_bar+baz1", "123-test@foo_bar+baz1"),
Map.entry("ts foo, test,xyz", "foo,test,xyz"),
Map.entry("ts <logstash-{now/M{yyyy.MM}}>", "<logstash-{now/M{yyyy.MM}}>")
);
for (Map.Entry<String, String> e : patterns.entrySet()) {
assertStatement(e.getKey(), unresolvedTSRelation(e.getValue()));
}
}
public void testSimpleMetricsWithStats() {
assertStatement(
"TS foo | STATS load=avg(cpu) BY ts",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo"),
List.of(attribute("ts")),
List.of(
new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))),
attribute("ts")
),
null
)
);
assertStatement(
"TS foo,bar | STATS load=avg(cpu) BY ts",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo,bar"),
List.of(attribute("ts")),
List.of(
new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))),
attribute("ts")
),
null
)
);
assertStatement(
"TS foo,bar | STATS load=avg(cpu),max(rate(requests)) BY ts",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo,bar"),
List.of(attribute("ts")),
List.of(
new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))),
new Alias(
EMPTY,
"max(rate(requests))",
new UnresolvedFunction(
EMPTY,
"max",
DEFAULT,
List.of(new UnresolvedFunction(EMPTY, "rate", DEFAULT, List.of(attribute("requests"))))
)
),
attribute("ts")
),
null
)
);
assertStatement(
"TS foo* | STATS count(errors)",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo*"),
List.of(),
List.of(new Alias(EMPTY, "count(errors)", new UnresolvedFunction(EMPTY, "count", DEFAULT, List.of(attribute("errors"))))),
null
)
);
assertStatement(
"TS foo* | STATS a(b)",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo*"),
List.of(),
List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))),
null
)
);
assertStatement(
"TS foo* | STATS a(b)",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo*"),
List.of(),
List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))),
null
)
);
assertStatement(
"TS foo* | STATS a1(b2)",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo*"),
List.of(),
List.of(new Alias(EMPTY, "a1(b2)", new UnresolvedFunction(EMPTY, "a1", DEFAULT, List.of(attribute("b2"))))),
null
)
);
assertStatement(
"TS foo*,bar* | STATS b = min(a) by c, d.e",
new TimeSeriesAggregate(
EMPTY,
unresolvedTSRelation("foo*,bar*"),
List.of(attribute("c"), attribute("d.e")),
List.of(
new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))),
attribute("c"),
attribute("d.e")
),
null
)
);
}
public void testInvalidAlias() {
expectError("row Å = 1", "line 1:5: token recognition error at: 'Å'");
expectError("from test | eval Å = 1", "line 1:18: token recognition error at: 'Å'");
expectError("from test | where Å == 1", "line 1:19: token recognition error at: 'Å'");
expectError("from test | keep Å", "line 1:18: token recognition error at: 'Å'");
expectError("from test | drop Å", "line 1:18: token recognition error at: 'Å'");
expectError("from test | sort Å", "line 1:18: token recognition error at: 'Å'");
expectError("from test | rename Å as A", "line 1:20: token recognition error at: 'Å'");
expectError("from test | rename A as Å", "line 1:25: token recognition error at: 'Å'");
expectError("from test | rename Å as Å", "line 1:20: token recognition error at: 'Å'");
expectError("from test | stats Å = count(*)", "line 1:19: token recognition error at: 'Å'");
expectError("from test | stats count(Å)", "line 1:25: token recognition error at: 'Å'");
expectError("from test | eval A = coalesce(Å, null)", "line 1:31: token recognition error at: 'Å'");
expectError("from test | eval A = coalesce(\"Å\", Å)", "line 1:36: token recognition error at: 'Å'");
}
public void testInvalidRemoteClusterPattern() {
expectError("from \"rem:ote\":index", "mismatched input ':' expecting {<EOF>, '|', ',', 'metadata'}");
}
private LogicalPlan unresolvedRelation(String index) {
return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, List.of(), IndexMode.STANDARD, null, "FROM");
}
private LogicalPlan unresolvedTSRelation(String index) {
return new UnresolvedRelation(EMPTY, new IndexPattern(EMPTY, index), false, List.of(), IndexMode.TIME_SERIES, null, "TS");
}
public void testMetricWithGroupKeyAsAgg() {
var queries = List.of("TS foo | STATS a BY a");
for (String query : queries) {
expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause");
}
}
public void testMatchOperatorConstantQueryString() {
var plan = statement("FROM test | WHERE field:\"value\"");
var filter = as(plan, Filter.class);
var match = (MatchOperator) filter.condition();
var matchField = (UnresolvedAttribute) match.field();
assertThat(matchField.name(), equalTo("field"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("value")));
}
public void testInvalidMatchOperator() {
expectError("from test | WHERE field:", "line 1:25: mismatched input '<EOF>' expecting {QUOTED_STRING, ");
expectError(
"from test | WHERE field:CONCAT(\"hello\", \"world\")",
"line 1:25: mismatched input 'CONCAT' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, "
);
expectError(
"from test | WHERE field:(true OR false)",
"line 1:25: extraneous input '(' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, "
);
expectError(
"from test | WHERE field:another_field_or_value",
"line 1:25: mismatched input 'another_field_or_value' expecting {QUOTED_STRING, INTEGER_LITERAL, DECIMAL_LITERAL, "
);
expectError("from test | WHERE field:2+3", "line 1:26: mismatched input '+'");
expectError(
"from test | WHERE \"field\":\"value\"",
"line 1:26: mismatched input ':' expecting {<EOF>, '|', 'and', '::', 'or', '+', '-', '*', '/', '%'}"
);
expectError(
"from test | WHERE CONCAT(\"field\", 1):\"value\"",
"line 1:37: mismatched input ':' expecting {<EOF>, '|', 'and', '::', 'or', '+', '-', '*', '/', '%'}"
);
}
public void testMatchFunctionFieldCasting() {
var plan = statement("FROM test | WHERE match(field::int, \"value\")");
var filter = as(plan, Filter.class);
var function = (UnresolvedFunction) filter.condition();
var toInteger = (ToInteger) function.children().get(0);
var matchField = (UnresolvedAttribute) toInteger.field();
assertThat(matchField.name(), equalTo("field"));
assertThat(function.children().get(1).fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("value")));
}
public void testMatchOperatorFieldCasting() {
var plan = statement("FROM test | WHERE field::int : \"value\"");
var filter = as(plan, Filter.class);
var match = (MatchOperator) filter.condition();
var toInteger = (ToInteger) match.field();
var matchField = (UnresolvedAttribute) toInteger.field();
assertThat(matchField.name(), equalTo("field"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("value")));
}
public void testFailingMetadataWithSquareBrackets() {
expectError("FROM test [METADATA _index] | STATS count(*)", "line 1:11: token recognition error at: '['");
}
public void testFunctionNamedParameterInMap() {
// functions can be scalar, grouping and aggregation
// functions can be in eval/where/stats/sort/dissect/grok commands, commands in snapshot are not covered
// positive
// In eval and where clause as function named parameters
LinkedHashMap<String, Object> expectedMap1 = new LinkedHashMap<>(4);
expectedMap1.put("option1", "string");
expectedMap1.put("option2", 1);
expectedMap1.put("option3", List.of(2.0, 3.0, 4.0));
expectedMap1.put("option4", List.of(true, false));
LinkedHashMap<String, Object> expectedMap2 = new LinkedHashMap<>(4);
expectedMap2.put("option1", List.of("string1", "string2"));
expectedMap2.put("option2", List.of(1, 2, 3));
expectedMap2.put("option3", 2.0);
expectedMap2.put("option4", true);
LinkedHashMap<String, Object> expectedMap3 = new LinkedHashMap<>(4);
expectedMap3.put("option1", "string");
expectedMap3.put("option2", 2.0);
expectedMap3.put("option3", List.of(1, 2, 3));
expectedMap3.put("option4", List.of(true, false));
assertEqualsIgnoringIds(
new Filter(
EMPTY,
new Eval(
EMPTY,
relation("test"),
List.of(
new Alias(
EMPTY,
"x",
function("fn1", List.of(attribute("f1"), Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap1)))
)
)
),
new Equals(
EMPTY,
attribute("y"),
function("fn2", List.of(Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap2)))
)
),
statement("""
from test
| eval x = fn1(f1, "testString", {"option1":"string","option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]})
| where y == fn2("testString", {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true})
""")
);
// In stats, by and sort as function named parameters
assertEqualsIgnoringIds(
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(
new Alias(
EMPTY,
"fn2(f3, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":2.0,\"option4\":true})",
function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2)))
)
),
List.of(
new Alias(EMPTY, "x", function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1)))),
attribute("fn2(f3, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":2.0,\"option4\":true})")
)
),
List.of(
new Order(
EMPTY,
function("fn3", List.of(attribute("f4"), mapExpression(expectedMap3))),
Order.OrderDirection.ASC,
Order.NullsPosition.LAST
)
)
),
statement("""
from test
| stats x = fn1(f1, f2, {"option1":"string","option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]})
by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true})
| sort fn3(f4, {"option1":"string","option2":2.0,"option3":[1,2,3],"option4":[true,false]})
""")
);
// In dissect and grok as function named parameter
LogicalPlan plan = statement("""
from test
| dissect fn1(f1, f2, {"option1":"string", "option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}"
| grok fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true}) "%{WORD:foo}"
""");
Grok grok = as(plan, Grok.class);
assertEqualsIgnoringIds(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input());
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
Dissect dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input());
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
}
public void testFunctionNamedParameterInMapWithNamedParameters() {
// map entry values provided in named parameter, arrays are not supported by named parameters yet
LinkedHashMap<String, Object> expectedMap1 = new LinkedHashMap<>(4);
expectedMap1.put("option1", "string");
expectedMap1.put("option2", 1);
expectedMap1.put("option3", List.of(2.0, 3.0, 4.0));
expectedMap1.put("option4", List.of(true, false));
LinkedHashMap<String, Object> expectedMap2 = new LinkedHashMap<>(4);
expectedMap2.put("option1", List.of("string1", "string2"));
expectedMap2.put("option2", List.of(1, 2, 3));
expectedMap2.put("option3", 2.0);
expectedMap2.put("option4", true);
LinkedHashMap<String, Object> expectedMap3 = new LinkedHashMap<>(4);
expectedMap3.put("option1", "string");
expectedMap3.put("option2", 2.0);
expectedMap3.put("option3", List.of(1, 2, 3));
expectedMap3.put("option4", List.of(true, false));
assertEqualsIgnoringIds(
new Filter(
EMPTY,
new Eval(
EMPTY,
relation("test"),
List.of(
new Alias(
EMPTY,
"x",
function("fn1", List.of(attribute("f1"), Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap1)))
)
)
),
new Equals(
EMPTY,
attribute("y"),
function("fn2", List.of(Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap2)))
)
),
statement(
"""
from test
| eval x = ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]})
| where y == ?fn2(?n2, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6})
""",
new QueryParams(
List.of(
paramAsIdentifier("fn1", "fn1"),
paramAsIdentifier("fn2", "fn2"),
paramAsIdentifier("n1", "f1"),
paramAsConstant("n2", "testString"),
paramAsConstant("n3", "string"),
paramAsConstant("n4", 1),
paramAsConstant("n5", 2.0),
paramAsConstant("n6", true)
)
)
)
);
assertEqualsIgnoringIds(
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(
new Alias(
EMPTY,
"?fn2(?n7, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":?n5,\"option4\":?n6})",
function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2)))
)
),
List.of(
new Alias(EMPTY, "x", function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1)))),
attribute("?fn2(?n7, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":?n5,\"option4\":?n6})")
)
),
List.of(
new Order(
EMPTY,
function("fn3", List.of(attribute("f4"), mapExpression(expectedMap3))),
Order.OrderDirection.ASC,
Order.NullsPosition.LAST
)
)
),
statement(
"""
from test
| stats x = ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]})
by ?fn2(?n7, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6})
| sort ?fn3(?n8, {"option1":?n3,"option2":?n5,"option3":[1,2,3],"option4":[true,false]})
""",
new QueryParams(
List.of(
paramAsIdentifier("fn1", "fn1"),
paramAsIdentifier("fn2", "fn2"),
paramAsIdentifier("fn3", "fn3"),
paramAsIdentifier("n1", "f1"),
paramAsIdentifier("n2", "f2"),
paramAsConstant("n3", "string"),
paramAsConstant("n4", 1),
paramAsConstant("n5", 2.0),
paramAsConstant("n6", true),
paramAsIdentifier("n7", "f3"),
paramAsIdentifier("n8", "f4")
)
)
)
);
LogicalPlan plan = statement(
"""
from test
| dissect ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}"
| grok ?fn2(?n7, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6}) "%{WORD:foo}"
""",
new QueryParams(
List.of(
paramAsIdentifier("fn1", "fn1"),
paramAsIdentifier("fn2", "fn2"),
paramAsIdentifier("n1", "f1"),
paramAsIdentifier("n2", "f2"),
paramAsConstant("n3", "string"),
paramAsConstant("n4", 1),
paramAsConstant("n5", 2.0),
paramAsConstant("n6", true),
paramAsIdentifier("n7", "f3")
)
)
);
Grok grok = as(plan, Grok.class);
assertEqualsIgnoringIds(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input());
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
Dissect dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input());
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
}
public void testFunctionNamedParameterWithCaseSensitiveKeys() {
LinkedHashMap<String, Object> expectedMap1 = new LinkedHashMap<>(3);
expectedMap1.put("option", "string");
expectedMap1.put("Option", 1);
expectedMap1.put("oPtion", List.of(2.0, 3.0, 4.0));
LinkedHashMap<String, Object> expectedMap2 = new LinkedHashMap<>(3);
expectedMap2.put("option", List.of("string1", "string2"));
expectedMap2.put("Option", List.of(1, 2, 3));
expectedMap2.put("oPtion", 2.0);
assertEqualsIgnoringIds(
new Filter(
EMPTY,
new Eval(
EMPTY,
relation("test"),
List.of(
new Alias(
EMPTY,
"x",
function("fn1", List.of(attribute("f1"), Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap1)))
)
)
),
new Equals(
EMPTY,
attribute("y"),
function("fn2", List.of(Literal.keyword(EMPTY, "testString"), mapExpression(expectedMap2)))
)
),
statement("""
from test
| eval x = fn1(f1, "testString", {"option":"string","Option":1,"oPtion":[2.0,3.0,4.0]})
| where y == fn2("testString", {"option":["string1","string2"],"Option":[1,2,3],"oPtion":2.0})
""")
);
}
public void testMultipleFunctionNamedParametersNotAllowed() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "41"),
Map.entry("where {}", "38"),
Map.entry("stats {}", "38"),
Map.entry("stats agg() by {}", "47"),
Map.entry("sort {}", "37"),
Map.entry("dissect {} \"%{bar}\"", "40"),
Map.entry("grok {} \"%{WORD:foo}\"", "37")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok")
? "mismatched input ',' expecting ')'"
: "no viable alternative at input 'fn(f1,";
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option\":1}, {\"option\":2})"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage)
);
}
}
public void testFunctionNamedParameterNotInMap() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "38"),
Map.entry("where {}", "35"),
Map.entry("stats {}", "35"),
Map.entry("stats agg() by {}", "44"),
Map.entry("sort {}", "34"),
Map.entry("dissect {} \"%{bar}\"", "37"),
Map.entry("grok {} \"%{WORD:foo}\"", "34")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok")
? "extraneous input ':' expecting {',', ')'}"
: "no viable alternative at input 'fn(f1, \"option1\":'";
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, \"option1\":\"string\")"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage)
);
}
}
public void testFunctionNamedParameterNotConstant() {
Map<String, String[]> commands = Map.ofEntries(
Map.entry("eval x = {}", new String[] { "31", "35" }),
Map.entry("where {}", new String[] { "28", "32" }),
Map.entry("stats {}", new String[] { "28", "32" }),
Map.entry("stats agg() by {}", new String[] { "37", "41" }),
Map.entry("sort {}", new String[] { "27", "31" }),
Map.entry("dissect {} \"%{bar}\"", new String[] { "30", "34" }),
Map.entry("grok {} \"%{WORD:foo}\"", new String[] { "27", "31" })
);
for (Map.Entry<String, String[]> command : commands.entrySet()) {
String cmd = command.getKey();
String error1 = command.getValue()[0];
String error2 = command.getValue()[1];
String errorMessage1 = cmd.startsWith("dissect") || cmd.startsWith("grok")
? "mismatched input '1' expecting {QUOTED_STRING"
: "no viable alternative at input 'fn(f1, { 1'";
String errorMessage2 = cmd.startsWith("dissect") || cmd.startsWith("grok")
? "mismatched input 'string' expecting {QUOTED_STRING"
: "no viable alternative at input 'fn(f1, {";
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, { 1:\"string\" })"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error1, errorMessage1)
);
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, { \"1\":string })"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error2, errorMessage2)
);
}
}
public void testNamedFunctionNamedParametersEmptyMap() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "30"),
Map.entry("where {}", "27"),
Map.entry("stats {}", "27"),
Map.entry("stats agg() by {}", "36"),
Map.entry("sort {}", "26"),
Map.entry("dissect {} \"%{bar}\"", "29"),
Map.entry("grok {} \"%{WORD:foo}\"", "26")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
statement(LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {})"));
}
}
public void testNamedFunctionNamedParametersMapWithNULL() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
Map.entry("stats {}", "26"),
Map.entry("stats agg() by {}", "35"),
Map.entry("sort {}", "25"),
Map.entry("dissect {} \"%{bar}\"", "28"),
Map.entry("grok {} \"%{WORD:foo}\"", "25")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option\":null})"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\"option\":null], NULL is not supported")
);
}
}
public void testNamedFunctionNamedParametersMapWithEmptyKey() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
Map.entry("stats {}", "26"),
Map.entry("stats agg() by {}", "35"),
Map.entry("sort {}", "25"),
Map.entry("dissect {} \"%{bar}\"", "28"),
Map.entry("grok {} \"%{WORD:foo}\"", "25")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"\":1})"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\"\":1], empty key is not supported")
);
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\" \":1})"),
LoggerMessageFormat.format(null, "line 1:{}: {}", error, "Invalid named parameter [\" \":1], empty key is not supported")
);
}
}
public void testNamedFunctionNamedParametersMapWithDuplicatedKey() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
Map.entry("stats {}", "26"),
Map.entry("stats agg() by {}", "35"),
Map.entry("sort {}", "25"),
Map.entry("dissect {} \"%{bar}\"", "28"),
Map.entry("grok {} \"%{WORD:foo}\"", "25")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"dup\":1,\"dup\":2})"),
LoggerMessageFormat.format(
null,
"line 1:{}: {}",
error,
"Duplicated named parameters with the same name [dup] is not supported"
)
);
}
}
public void testNamedFunctionNamedParametersInInvalidPositions() {
// negative, named arguments are not supported outside of a functionExpression where booleanExpression or indexPattern is supported
String map = "{\"option1\":\"string\", \"option2\":1}";
Map<String, String> commands = Map.ofEntries(
Map.entry("from {}", "line 1:7: mismatched input '\"option1\"' expecting {<EOF>, '|', ',', 'metadata'}"),
Map.entry("row x = {}", "line 1:9: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"),
Map.entry("eval x = {}", "line 1:22: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"),
Map.entry("where x > {}", "line 1:23: no viable alternative at input 'x > {'"),
Map.entry("stats agg() by {}", "line 1:28: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"),
Map.entry("sort {}", "line 1:18: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"),
Map.entry("keep {}", "line 1:18: token recognition error at: '{'"),
Map.entry("drop {}", "line 1:18: token recognition error at: '{'"),
Map.entry("rename a as {}", "line 1:25: token recognition error at: '{'"),
Map.entry("mv_expand {}", "line 1:23: token recognition error at: '{'"),
Map.entry("limit {}", "line 1:19: extraneous input '{' expecting {QUOTED_STRING"),
Map.entry("enrich idx2 on f1 with f2 = {}", "line 1:41: token recognition error at: '{'"),
Map.entry("dissect {} \"%{bar}\"", "line 1:21: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"),
Map.entry("grok {} \"%{WORD:foo}\"", "line 1:18: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String errorMessage = command.getValue();
String from = cmd.startsWith("row") || cmd.startsWith("from") ? "" : "from test | ";
expectError(LoggerMessageFormat.format(null, from + cmd, map), errorMessage);
}
}
public void testNamedFunctionNamedParametersWithUnsupportedNamedParameterTypes() {
Map<String, String> commands = Map.ofEntries(
Map.entry("eval x = {}", "29"),
Map.entry("where {}", "26"),
Map.entry("stats {}", "26"),
Map.entry("stats agg() by {}", "35"),
Map.entry("sort {}", "25"),
Map.entry("dissect {} \"%{bar}\"", "28"),
Map.entry("grok {} \"%{WORD:foo}\"", "25")
);
for (Map.Entry<String, String> command : commands.entrySet()) {
String cmd = command.getKey();
String error = command.getValue();
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option1\":?n1})"),
List.of(paramAsIdentifier("n1", "v1")),
LoggerMessageFormat.format(
null,
"line 1:{}: {}",
error,
"Invalid named parameter [\"option1\":?n1], only constant value is supported"
)
);
expectError(
LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option1\":?n1})"),
List.of(paramAsPattern("n1", "v1")),
LoggerMessageFormat.format(
null,
"line 1:{}: {}",
error,
"Invalid named parameter [\"option1\":?n1], only constant value is supported"
)
);
}
}
public void testValidFromPattern() {
var basePattern = randomIndexPatterns();
var plan = statement("FROM " + basePattern);
assertThat(as(plan, UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern)));
}
public void testValidJoinPatternFieldJoin() {
var basePattern = randomIndexPatterns(without(CROSS_CLUSTER));
var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN), without(CROSS_CLUSTER), without(INDEX_SELECTOR));
var numberOfOnFields = randomIntBetween(1, 5);
List<String> existingIdentifiers = new ArrayList<>();
StringBuilder onFields = new StringBuilder();
for (var i = 0; i < numberOfOnFields; i++) {
if (randomBoolean()) {
onFields.append(" ");
}
String onField = randomValueOtherThanMany(existingIdentifiers::contains, () -> randomIdentifier());
existingIdentifiers.add(onField);
onFields.append(onField);
if (randomBoolean()) {
onFields.append(" ");
}
if (i < numberOfOnFields - 1) {
onFields.append(", ");
}
}
var plan = statement("FROM " + basePattern + " | LOOKUP JOIN " + joinPattern + " ON " + onFields);
var join = as(plan, LookupJoin.class);
assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern)));
assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(joinPattern)));
assertThat(join.config().leftFields(), hasSize(numberOfOnFields));
for (int i = 0; i < numberOfOnFields; i++) {
assertThat(as(join.config().leftFields().get(i), UnresolvedAttribute.class).name(), equalTo(existingIdentifiers.get(i)));
}
assertThat(join.config().type().joinName(), equalTo("LEFT OUTER"));
}
/**
* Verify that both in snapshot and in release build the feature is enabled and the parsing works
* without checking for the capability
*/
public void testExpressionJoinEnabled() {
var plan = statement("FROM test | LOOKUP JOIN test2 ON left_field >= right_field");
var join = as(plan, LookupJoin.class);
}
public void testValidJoinPatternExpressionJoin() {
assumeTrue("LOOKUP JOIN requires corresponding capability", EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled());
var basePattern = randomIndexPatterns(without(CROSS_CLUSTER));
var joinPattern = randomIndexPattern(without(WILDCARD_PATTERN), without(CROSS_CLUSTER), without(INDEX_SELECTOR));
var numberOfExpressions = randomIntBetween(1, 5);
var expressions = new ArrayList<Tuple<Tuple<String, String>, EsqlBinaryComparison.BinaryComparisonOperation>>();
StringBuilder onExpressionString = new StringBuilder();
for (var i = 0; i < numberOfExpressions; i++) {
var left = randomIdentifier();
var right = randomIdentifier();
var op = randomBinaryComparisonOperation();
expressions.add(new Tuple<>(new Tuple<>(left, right), op));
onExpressionString.append(left);
if (randomBoolean()) {
onExpressionString.append(" ");
}
onExpressionString.append(op.symbol());
if (randomBoolean()) {
onExpressionString.append(" ");
}
onExpressionString.append(right);
if (i < numberOfExpressions - 1) {
onExpressionString.append(" AND ");
}
}
String query = "FROM " + basePattern + " | LOOKUP JOIN " + joinPattern + " ON " + onExpressionString;
var plan = statement(query);
var join = as(plan, LookupJoin.class);
assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(basePattern)));
assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(joinPattern)));
var joinType = join.config().type();
assertThat(joinType.joinName(), startsWith("LEFT OUTER"));
List<Expression> actualExpressions = Predicates.splitAnd(join.config().joinOnConditions());
assertThat(actualExpressions.size(), equalTo(numberOfExpressions));
for (int i = 0; i < numberOfExpressions; i++) {
var expected = expressions.get(i);
var actual = actualExpressions.get(i);
assertThat(actual, instanceOf(EsqlBinaryComparison.class));
var actualComp = (EsqlBinaryComparison) actual;
assertThat(((UnresolvedAttribute) actualComp.left()).name(), equalTo(expected.v1().v1()));
assertThat(((UnresolvedAttribute) actualComp.right()).name(), equalTo(expected.v1().v2()));
assertThat(actualComp.getFunctionType(), equalTo(expected.v2()));
}
}
private EsqlBinaryComparison.BinaryComparisonOperation randomBinaryComparisonOperation() {
return randomFrom(EsqlBinaryComparison.BinaryComparisonOperation.values());
}
public void testInvalidFromPatterns() {
var sourceCommands = new String[] { "FROM", "TS" };
var indexIsBlank = "Blank index specified in index pattern";
var remoteIsEmpty = "remote part is empty";
var invalidDoubleColonUsage = "invalid usage of :: separator";
expectError(randomFrom(sourceCommands) + " \"\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \" \"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \",,,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \",,, \"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \", , ,,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \",,,\",*", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*,,,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1,,,,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1,index2,,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1,<-+^,index2\",*", "must not contain the following characters");
expectError(randomFrom(sourceCommands) + " \"\",*", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*: ,*,\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*: ,*,\",validIndexName", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"\", \" \", \" \",validIndexName", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1\", \"index2\", \" ,index3,index4\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1,index2,,index3\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"index1,index2, ,index3\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*, \"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*\", \"\"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*\", \" \"", indexIsBlank);
expectError(randomFrom(sourceCommands) + " \"*\", \":index1\"", remoteIsEmpty);
expectError(randomFrom(sourceCommands) + " \"index1,*,:index2\"", remoteIsEmpty);
expectError(randomFrom(sourceCommands) + " \"*\", \"::data\"", remoteIsEmpty);
expectError(randomFrom(sourceCommands) + " \"*\", \"::failures\"", remoteIsEmpty);
expectError(randomFrom(sourceCommands) + " \"*,index1::\"", invalidDoubleColonUsage);
expectError(randomFrom(sourceCommands) + " \"*\", index1, index2, \"index3:: \"", invalidDoubleColonUsage);
expectError(randomFrom(sourceCommands) + " \"*,index1::*\"", invalidDoubleColonUsage);
}
public void testInvalidPatternsWithIntermittentQuotes() {
// There are 3 ways of crafting invalid index patterns that conforms to the grammar defined through ANTLR.
// 1. Not quoting the pattern,
// 2. Quoting individual patterns ("index1", "index2", ...), and,
// 3. Clubbing all the patterns into a single quoted string ("index1,index2,...).
//
// Note that in these tests, we unquote a pattern and then quote it immediately.
// This is because when randomly generating an index pattern, it may look like: "foo"::data.
// To convert it into a quoted string like "foo::data", we need to unquote and then re-quote it.
// Prohibited char in a quoted cross cluster index pattern should result in an error.
{
var randomIndex = randomIndexPattern();
// Select an invalid char to sneak in.
// Note: some chars like '|' and '"' are excluded to generate a proper invalid name.
Character[] invalidChars = { ' ', '/', '<', '>', '?' };
var randomInvalidChar = randomFrom(invalidChars);
// Construct the new invalid index pattern.
var invalidIndexName = "foo" + randomInvalidChar + "bar";
var remoteIndexWithInvalidChar = quote(randomIdentifier() + ":" + invalidIndexName);
var query = "FROM " + randomIndex + "," + remoteIndexWithInvalidChar;
expectError(
query,
"Invalid index name ["
+ invalidIndexName
+ "], must not contain the following characters [' ','\"',',','/','<','>','?','\\','|']"
);
}
// Colon outside a quoted string should result in an ANTLR error: a comma is expected.
{
var randomIndex = randomIndexPattern();
// In the form of: "*|cluster alias:random string".
var malformedClusterAlias = quote((randomBoolean() ? "*" : randomIdentifier()) + ":" + randomIdentifier());
// We do not generate a cross cluster pattern or else we'd be getting a different error (which is tested in
// the next test).
var remoteIndex = quote(unquoteIndexPattern(randomIndexPattern(without(CROSS_CLUSTER))));
// Format: FROM <some index>, "<cluster alias: random string>":<remote index>
var query = "FROM " + randomIndex + "," + malformedClusterAlias + ":" + remoteIndex;
expectError(query, " mismatched input ':'");
}
// If an explicit cluster string is present, then we expect an unquoted string next.
{
var randomIndex = randomIndexPattern();
var remoteClusterAlias = randomBoolean() ? "*" : randomIdentifier();
// In the form of: random string:random string.
var malformedRemoteIndex = quote(unquoteIndexPattern(randomIndexPattern(CROSS_CLUSTER)));
// Format: FROM <some index>, <cluster alias>:"random string:random string"
var query = "FROM " + randomIndex + "," + remoteClusterAlias + ":" + malformedRemoteIndex;
// Since "random string:random string" is partially quoted, expect a ANTLR's parse error.
expectError(query, "expecting UNQUOTED_SOURCE");
}
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled()) {
// If a stream in on a remote and the pattern is entirely quoted, we should be able to validate it.
// Note: invalid selector syntax is covered in a different test.
{
var fromPattern = randomIndexPattern();
var malformedIndexSelectorPattern = quote(
(randomIdentifier()) + ":" + unquoteIndexPattern(randomIndexPattern(INDEX_SELECTOR, without(CROSS_CLUSTER)))
);
// Format: FROM <some index>, "<cluster alias>:<some index>::<data|failures>"
var query = "FROM " + fromPattern + "," + malformedIndexSelectorPattern;
expectError(query, "Selectors are not yet supported on remote cluster patterns");
}
// If a stream in on a remote and the cluster alias and index pattern are separately quoted, we should
// still be able to validate it.
// Note: invalid selector syntax is covered in a different test.
{
var fromPattern = randomIndexPattern();
var malformedIndexSelectorPattern = quote(randomIdentifier())
+ ":"
+ quote(unquoteIndexPattern(randomIndexPattern(INDEX_SELECTOR, without(CROSS_CLUSTER))));
// Format: FROM <some index>, "<cluster alias>":"<some index>::<data|failures>"
var query = "FROM " + fromPattern + "," + malformedIndexSelectorPattern;
// Everything after "<cluster alias>" is extraneous input and hence ANTLR's error.
expectError(query, "mismatched input ':'");
}
}
}
public void testValidJoinPatternWithRemoteFieldJoin() {
testValidJoinPatternWithRemote(randomIdentifier());
}
public void testValidJoinPatternWithRemoteExpressionJoin() {
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()
);
testValidJoinPatternWithRemote(singleExpressionJoinClause());
}
private void testValidJoinPatternWithRemote(String onClause) {
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN), without(INDEX_SELECTOR));
var plan = statement("FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause);
var join = as(plan, LookupJoin.class);
assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(fromPatterns)));
assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo(unquoteIndexPattern(joinPattern)));
}
public void testInvalidJoinPatternsFieldJoin() {
testInvalidJoinPatterns(randomIdentifier());
}
public void testInvalidJoinPatternsFieldJoinTwo() {
testInvalidJoinPatterns(randomIdentifier() + ", " + randomIdentifier());
}
public void testInvalidJoinPatternsExpressionJoin() {
testInvalidJoinPatterns(singleExpressionJoinClause());
}
public void testInvalidJoinPatternsExpressionJoinTwo() {
testInvalidJoinPatterns(singleExpressionJoinClause() + " AND " + singleExpressionJoinClause());
}
public void testInvalidJoinPatternsExpressionJoinMix() {
testInvalidJoinPatterns(randomIdentifier() + ", " + singleExpressionJoinClause());
}
public void testInvalidJoinPatternsExpressionJoinMixTwo() {
testInvalidJoinPatterns(singleExpressionJoinClause() + " AND " + randomIdentifier());
}
public void testInvalidLookupJoinOnClause() {
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + randomIdentifier() + " , " + singleExpressionJoinClause(),
"JOIN ON clause must be a comma separated list of fields or a single expression, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + singleExpressionJoinClause() + " , " + randomIdentifier(),
"JOIN ON clause with expressions only supports a single expression, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + singleExpressionJoinClause() + " , " + singleExpressionJoinClause(),
"JOIN ON clause with expressions only supports a single expression, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + singleExpressionJoinClause() + " AND " + randomIdentifier(),
"JOIN ON clause only supports fields or AND of Binary Expressions at the moment, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON "
+ singleExpressionJoinClause()
+ " AND ("
+ randomIdentifier()
+ " OR "
+ singleExpressionJoinClause()
+ ")",
"JOIN ON clause only supports fields or AND of Binary Expressions at the moment, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON "
+ singleExpressionJoinClause()
+ " AND ("
+ randomIdentifier()
+ "OR"
+ randomIdentifier()
+ ")",
"JOIN ON clause only supports fields or AND of Binary Expressions at the moment, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + randomIdentifier() + " AND " + randomIdentifier(),
"JOIN ON clause only supports fields or AND of Binary Expressions at the moment, found"
);
expectError(
"FROM test | LOOKUP JOIN test2 ON " + randomIdentifier() + " AND " + singleExpressionJoinClause(),
"JOIN ON clause only supports fields or AND of Binary Expressions at the moment, found "
);
}
private String singleExpressionJoinClause() {
var left = randomIdentifier();
var right = randomValueOtherThan(left, ESTestCase::randomIdentifier);
var op = randomBinaryComparisonOperation();
return left + (randomBoolean() ? " " : "") + op.symbol() + (randomBoolean() ? " " : "") + right;
}
private void testInvalidJoinPatterns(String onClause) {
{
// wildcard
var joinPattern = randomIndexPattern(WILDCARD_PATTERN, without(CROSS_CLUSTER), without(INDEX_SELECTOR));
expectError(
"FROM " + randomIndexPatterns() + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], * is not allowed in LOOKUP JOIN"
);
}
{
// remote cluster on the right
var fromPatterns = randomIndexPatterns(without(CROSS_CLUSTER));
var joinPattern = randomIndexPattern(CROSS_CLUSTER, without(WILDCARD_PATTERN), without(INDEX_SELECTOR));
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"invalid index pattern [" + unquoteIndexPattern(joinPattern) + "], remote clusters are not supported with LOOKUP JOIN"
);
}
{
// Generate a syntactically invalid (partial quoted) pattern.
var fromPatterns = quote(randomIdentifier()) + ":" + unquoteIndexPattern(randomIndexPattern(without(CROSS_CLUSTER)));
var joinPattern = randomIndexPattern();
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
// Since the from pattern is partially quoted, we get an error at the end of the partially quoted string.
" mismatched input ':'"
);
}
{
// Generate a syntactically invalid (partial quoted) pattern.
var fromPatterns = randomIdentifier() + ":" + quote(randomIndexPatterns(without(CROSS_CLUSTER)));
var joinPattern = randomIndexPattern();
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
// Since the from pattern is partially quoted, we get an error at the beginning of the partially quoted
// index name that we're expecting an unquoted string.
"expecting UNQUOTED_SOURCE"
);
}
{
var fromPatterns = randomIndexPattern();
// Generate a syntactically invalid (partial quoted) pattern.
var joinPattern = quote(randomIdentifier()) + ":" + unquoteIndexPattern(randomIndexPattern(without(CROSS_CLUSTER)));
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
// Since the join pattern is partially quoted, we get an error at the end of the partially quoted string.
"mismatched input ':'"
);
}
{
var fromPatterns = randomIndexPattern();
// Generate a syntactically invalid (partially quoted) pattern.
var joinPattern = randomIdentifier() + ":" + quote(randomIndexPattern(without(CROSS_CLUSTER)));
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
// Since the from pattern is partially quoted, we get an error at the beginning of the partially quoted
// index name that we're expecting an unquoted string.
"no viable alternative at input"
);
}
if (EsqlCapabilities.Cap.INDEX_COMPONENT_SELECTORS.isEnabled()) {
{
// Selectors are not supported on the left of the join query if used with cluster ids.
// Unquoted case: The language specification does not allow mixing `:` and `::` characters in an index expression
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER, without(DATE_MATH));
// We do different validation based on the quotation of the pattern
// Autogenerated patterns will not mix cluster ids with selectors. Unquote it to ensure stable tests
fromPatterns = unquoteIndexPattern(fromPatterns) + "::data";
var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN), without(INDEX_SELECTOR));
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"mismatched input '::' expecting {"
);
}
{
// Selectors are not supported on the left of the join query if used with cluster ids.
// Quoted case: The language specification allows mixing `:` and `::` characters in a quoted expression, but this usage
// must cause a validation exception in the non-generated code.
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER, without(INDEX_SELECTOR));
// We do different validation based on the quotation of the pattern
// Autogenerated patterns will not mix cluster ids with selectors. Unquote, modify, and requote it to ensure stable tests
fromPatterns = "\"" + unquoteIndexPattern(fromPatterns) + "::data\"";
var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN), without(INDEX_SELECTOR));
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"Selectors are not yet supported on remote cluster patterns"
);
}
{
// Selectors are not yet supported in join patterns on the right.
// Unquoted case: The language specification does not allow mixing `:` and `::` characters in an index expression
var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN), without(DATE_MATH), INDEX_SELECTOR);
// We do different validation based on the quotation of the pattern, so forcefully unquote the expression instead of leaving
// it to chance.
joinPattern = unquoteIndexPattern(joinPattern);
expectError(
"FROM " + randomIndexPatterns(without(CROSS_CLUSTER)) + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"no viable alternative at input "
);
}
{
// Selectors are not yet supported in join patterns on the right.
// Quoted case: The language specification allows `::` characters in a quoted expression, but this usage
// must cause a validation exception in the non-generated code.
var joinPattern = randomIndexPattern(without(CROSS_CLUSTER), without(WILDCARD_PATTERN), without(DATE_MATH), INDEX_SELECTOR);
// We do different validation based on the quotation of the pattern, so forcefully quote the expression instead of leaving
// it to chance.
joinPattern = "\"" + unquoteIndexPattern(joinPattern) + "\"";
expectError(
"FROM " + randomIndexPatterns(without(CROSS_CLUSTER)) + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"invalid index pattern ["
+ unquoteIndexPattern(joinPattern)
+ "], index pattern selectors are not supported in LOOKUP JOIN"
);
}
{
// Although we don't support selector strings for remote indices, it's alright.
// The parser error message takes precedence.
var fromPatterns = randomIndexPatterns();
var joinPattern = quote(randomIdentifier()) + "::" + randomFrom("data", "failures");
// After the end of the partially quoted string, i.e. the index name, parser now expects "ON..." and not a selector string.
expectError(
"FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause,
"mismatched input ':' expecting 'on'"
);
}
{
// Although we don't support selector strings for remote indices, it's alright.
// The parser error message takes precedence.
var fromPatterns = randomIndexPatterns();
var joinPattern = randomIdentifier() + "::" + quote(randomFrom("data", "failures"));
// After the index name and "::", parser expects an unquoted string, i.e. the selector string should not be
// partially quoted.
expectError("FROM " + fromPatterns + " | LOOKUP JOIN " + joinPattern + " ON " + onClause, "no viable alternative at input");
}
}
}
public void testLookupJoinOnExpressionWithNamedQueryParameters() {
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_WITH_FULL_TEXT_FUNCTION.isEnabled()
);
// Test LOOKUP JOIN ON expression with named query parameters and MATCH function
var plan = statement(
"FROM test | LOOKUP JOIN test2 ON left_field >= right_field AND match(left_field, ?search_term)",
new QueryParams(List.of(paramAsConstant("search_term", "elasticsearch")))
);
var join = as(plan, LookupJoin.class);
assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo("test"));
assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo("test2"));
// Verify the join condition contains both the comparison and MATCH function
var condition = join.config().joinOnConditions();
assertThat(condition, instanceOf(And.class));
var andCondition = (And) condition;
// Check that we have both conditions in the correct order
assertThat(andCondition.children().size(), equalTo(2));
// First child should be a binary comparison (left_field >= right_field)
var firstChild = andCondition.children().get(0);
assertThat("First condition should be binary comparison", firstChild, instanceOf(EsqlBinaryComparison.class));
// Second child should be a MATCH function (match(left_field, ?search_term))
var secondChild = andCondition.children().get(1);
assertThat("Second condition should be UnresolvedFunction", secondChild, instanceOf(UnresolvedFunction.class));
var function = (UnresolvedFunction) secondChild;
assertThat("Second condition should be MATCH function", function.name(), equalTo("match"));
}
public void testLookupJoinOnExpressionWithPositionalQueryParameters() {
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_WITH_FULL_TEXT_FUNCTION.isEnabled()
);
// Test LOOKUP JOIN ON expression with positional query parameters and MATCH function
var plan = statement(
"FROM test | LOOKUP JOIN test2 ON left_field >= right_field AND match(left_field, ?2)",
new QueryParams(List.of(paramAsConstant(null, "dummy"), paramAsConstant(null, "elasticsearch")))
);
var join = as(plan, LookupJoin.class);
assertThat(as(join.left(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo("test"));
assertThat(as(join.right(), UnresolvedRelation.class).indexPattern().indexPattern(), equalTo("test2"));
// Verify the join condition contains both the comparison and MATCH function
var condition = join.config().joinOnConditions();
assertThat(condition, instanceOf(And.class));
var andCondition = (And) condition;
// Check that we have both conditions in the correct order
assertThat(andCondition.children().size(), equalTo(2));
// First child should be a binary comparison (left_field >= right_field)
var firstChild = andCondition.children().get(0);
assertThat("First condition should be binary comparison", firstChild, instanceOf(EsqlBinaryComparison.class));
// Second child should be a MATCH function (match(left_field, ?))
var secondChild = andCondition.children().get(1);
assertThat("Second condition should be UnresolvedFunction", secondChild, instanceOf(UnresolvedFunction.class));
var function = (UnresolvedFunction) secondChild;
assertThat("Second condition should be MATCH function", function.name(), equalTo("match"));
assertEquals(2, function.children().size());
assertEquals("elasticsearch", function.children().get(1).toString());
}
public void testInvalidInsistAsterisk() {
assumeTrue("requires snapshot build", Build.current().isSnapshot());
expectError("FROM text | EVAL x = 4 | INSIST_🐔 *", "INSIST doesn't support wildcards, found [*]");
expectError("FROM text | EVAL x = 4 | INSIST_🐔 foo*", "INSIST doesn't support wildcards, found [foo*]");
}
public void testValidFork() {
var plan = statement("""
FROM foo*
| FORK ( WHERE a:"baz" | LIMIT 11 )
( WHERE b:"bar" | SORT b )
( WHERE c:"bat" )
( SORT c )
( LIMIT 5 )
( DISSECT a "%{d} %{e} %{f}" | STATS x = MIN(a), y = MAX(b) WHERE d > 1000 | EVAL xyz = "abc")
""");
var fork = as(plan, Fork.class);
var subPlans = fork.children();
// first subplan
var eval = as(subPlans.get(0), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork1"))));
var limit = as(eval.child(), Limit.class);
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(11));
var filter = as(limit.child(), Filter.class);
var match = (MatchOperator) filter.condition();
var matchField = (UnresolvedAttribute) match.field();
assertThat(matchField.name(), equalTo("a"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("baz")));
// second subplan
eval = as(subPlans.get(1), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork2"))));
var orderBy = as(eval.child(), OrderBy.class);
assertThat(orderBy.order().size(), equalTo(1));
Order order = orderBy.order().get(0);
assertThat(order.child(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) order.child()).name(), equalTo("b"));
filter = as(orderBy.child(), Filter.class);
match = (MatchOperator) filter.condition();
matchField = (UnresolvedAttribute) match.field();
assertThat(matchField.name(), equalTo("b"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("bar")));
// third subplan
eval = as(subPlans.get(2), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork3"))));
filter = as(eval.child(), Filter.class);
match = (MatchOperator) filter.condition();
matchField = (UnresolvedAttribute) match.field();
assertThat(matchField.name(), equalTo("c"));
assertThat(match.query().fold(FoldContext.small()), equalTo(BytesRefs.toBytesRef("bat")));
// fourth subplan
eval = as(subPlans.get(3), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork4"))));
orderBy = as(eval.child(), OrderBy.class);
assertThat(orderBy.order().size(), equalTo(1));
order = orderBy.order().get(0);
assertThat(order.child(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) order.child()).name(), equalTo("c"));
// fifth subplan
eval = as(subPlans.get(4), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork5"))));
limit = as(eval.child(), Limit.class);
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(5));
// sixth subplan
eval = as(subPlans.get(5), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("_fork", literalString("fork6"))));
eval = as(eval.child(), Eval.class);
assertThat(as(eval.fields().get(0), Alias.class), equalToIgnoringIds(alias("xyz", literalString("abc"))));
Aggregate aggregate = as(eval.child(), Aggregate.class);
assertThat(aggregate.aggregates().size(), equalTo(2));
var alias = as(aggregate.aggregates().get(0), Alias.class);
assertThat(alias.name(), equalTo("x"));
assertThat(as(alias.child(), UnresolvedFunction.class).name(), equalTo("MIN"));
alias = as(aggregate.aggregates().get(1), Alias.class);
assertThat(alias.name(), equalTo("y"));
var filteredExp = as(alias.child(), FilteredExpression.class);
assertThat(as(filteredExp.delegate(), UnresolvedFunction.class).name(), equalTo("MAX"));
var greaterThan = as(filteredExp.filter(), GreaterThan.class);
assertThat(as(greaterThan.left(), UnresolvedAttribute.class).name(), equalTo("d"));
assertThat(as(greaterThan.right(), Literal.class).value(), equalTo(1000));
var dissect = as(aggregate.child(), Dissect.class);
assertThat(as(dissect.input(), UnresolvedAttribute.class).name(), equalTo("a"));
assertThat(dissect.parser().pattern(), equalTo("%{d} %{e} %{f}"));
}
public void testForkAllReleasedCommands() {
var query = """
FROM foo*
| FORK
( SORT c )
( LIMIT 5 )
( DISSECT a "%{d} %{e} %{f}" )
( GROK a "%{WORD:foo}" )
( STATS x = MIN(a), y = MAX(b) WHERE d > 1000 )
( EVAL xyz = ( (a/b) * (b/a)) )
( WHERE a < 1 )
( KEEP a )
| KEEP a
""";
var plan = statement(query);
assertThat(plan, instanceOf(Keep.class));
query = """
FROM foo*
| FORK
( RENAME a as c )
( MV_EXPAND a )
( CHANGE_POINT a on b )
( LOOKUP JOIN idx2 ON f1 )
( ENRICH idx2 on f1 with f2 = f3 )
( FORK ( WHERE a:"baz" ) ( EVAL x = [ 1, 2, 3 ] ) )
( COMPLETION a=b WITH { "inference_id": "c" } )
| KEEP a
""";
plan = statement(query);
assertThat(plan, instanceOf(Keep.class));
}
public void testForkAllCommands() {
assumeTrue("requires snapshot build", Build.current().isSnapshot());
var query = """
FROM foo*
| FORK
( SORT c )
( LIMIT 5 )
( DISSECT a "%{d} %{e} %{f}" )
( GROK a "%{WORD:foo}" )
( STATS x = MIN(a), y = MAX(b) WHERE d > 1000 )
( EVAL xyz = ( (a/b) * (b/a)) )
( WHERE a < 1 )
( KEEP a )
| KEEP a
""";
var plan = statement(query);
assertThat(plan, instanceOf(Keep.class));
query = """
FROM foo*
| FORK
( RENAME a as c )
( MV_EXPAND a )
( CHANGE_POINT a on b )
( LOOKUP JOIN idx2 ON f1 | LOOKUP JOIN idx3 ON f1 > f3)
( ENRICH idx2 on f1 with f2 = f3 )
( FORK ( WHERE a:"baz" ) ( EVAL x = [ 1, 2, 3 ] ) )
( COMPLETION a=b WITH { "inference_id": "c" } )
( SAMPLE 0.99 )
| KEEP a
""";
plan = statement(query);
assertThat(plan, instanceOf(Keep.class));
query = """
FROM foo*
| FORK
( INLINE STATS x = MIN(a), y = MAX(b) WHERE d > 1000 )
( INSIST_🐔 a )
( LOOKUP_🐔 a on b )
| KEEP a
""";
plan = statement(query);
assertThat(plan, instanceOf(Keep.class));
}
public void testInvalidFork() {
expectError("""
FROM foo* | FORK
""", "line 2:1: mismatched input '<EOF>' expecting '('");
expectError("""
FROM foo* | FORK ()
""", "line 1:19: mismatched input ')'");
expectError("""
FROM foo*
| FORK (where true) (where true) (where true) (where true)
(where true) (where true) (where true) (where true)
(where true)
""", "Fork supports up to 8 branches");
expectError("FROM foo* | FORK ( x+1 ) ( WHERE y>2 )", "line 1:20: mismatched input 'x+1'");
expectError("FROM foo* | FORK ( LIMIT 10 ) ( y+2 )", "line 1:33: mismatched input 'y+2'");
expectError("FROM foo* | FORK (where true) ()", "line 1:32: mismatched input ')'");
expectError("FROM foo* | FORK () (where true)", "line 1:19: mismatched input ')'");
if (EsqlCapabilities.Cap.ENABLE_FORK_FOR_REMOTE_INDICES.isEnabled() == false) {
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
expectError(
"FROM " + fromPatterns + " | FORK (EVAL a = 1) (EVAL a = 2)",
"invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported with FORK"
);
}
}
public void testFieldNamesAsCommands() throws Exception {
String[] keywords = new String[] {
"dissect",
"drop",
"enrich",
"eval",
"explain",
"from",
"grok",
"keep",
"limit",
"mv_expand",
"rename",
"sort",
"stats" };
for (String keyword : keywords) {
var plan = statement("FROM test | STATS avg(" + keyword + ")");
var aggregate = as(plan, Aggregate.class);
}
}
// [ and ( are used to trigger a double mode causing their symbol name (instead of text) to be used in error reporting
// this test checks that they are properly replaced in the error message
public void testPreserveParentheses() {
// test for (
expectError("row a = 1 not in", "line 1:17: mismatched input '<EOF>' expecting '('");
expectError("row a = 1 | where a not in", "line 1:27: mismatched input '<EOF>' expecting '('");
expectError("row a = 1 | where a not in (1", "line 1:30: mismatched input '<EOF>' expecting {',', ')'}");
expectError("row a = 1 | where a not in [1", "line 1:28: missing '(' at '['");
expectError("row a = 1 | where a not in 123", "line 1:28: missing '(' at '123'");
// test for [
if (EsqlCapabilities.Cap.EXPLAIN.isEnabled()) {
expectError("explain", "line 1:8: mismatched input '<EOF>' expecting '('");
expectError("explain ]", "line 1:9: token recognition error at: ']'");
expectError("explain ( row x = 1", "line 1:20: missing ')' at '<EOF>'");
}
}
public void testExplainErrors() {
assumeTrue("Requires EXPLAIN capability", EsqlCapabilities.Cap.EXPLAIN.isEnabled());
// TODO this one is incorrect
expectError("explain ( from test ) | limit 1", "line 1:1: EXPLAIN does not support downstream commands");
expectError(
"explain (row x=\"Elastic\" | eval y=concat(x,to_upper(\"search\"))) | mv_expand y",
"line 1:1: EXPLAIN does not support downstream commands"
);
}
public void testRerankDefaultInferenceIdAndScoreAttribute() {
var plan = processingCommand("RERANK \"query text\" ON title");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
}
public void testRerankEmptyOptions() {
var plan = processingCommand("RERANK \"query text\" ON title WITH {}");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
}
public void testRerankInferenceId() {
var plan = processingCommand("RERANK \"query text\" ON title WITH { \"inference_id\" : \"inferenceId\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceId")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
}
public void testRerankScoreAttribute() {
var plan = processingCommand("RERANK rerank_score=\"query text\" ON title");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString(".rerank-v1-elasticsearch")));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("rerank_score")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
}
public void testRerankInferenceIdAnddScoreAttribute() {
var plan = processingCommand("RERANK rerank_score=\"query text\" ON title WITH { \"inference_id\" : \"inferenceId\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceId")));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("rerank_score")));
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
}
public void testRerankSingleField() {
var plan = processingCommand("RERANK \"query text\" ON title WITH { \"inference_id\" : \"inferenceID\" }");
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
}
public void testRerankMultipleFields() {
var plan = processingCommand(
"RERANK \"query text\" ON title, description, authors_renamed=authors WITH { \"inference_id\" : \"inferenceID\" }"
);
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(
rerank.rerankFields(),
equalToIgnoringIds(
List.of(
alias("title", attribute("title")),
alias("description", attribute("description")),
alias("authors_renamed", attribute("authors"))
)
)
);
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
}
public void testRerankComputedFields() {
var plan = processingCommand("""
RERANK "query text" ON title, short_description = SUBSTRING(description, 0, 100) WITH { "inference_id": "inferenceID" }
""");
var rerank = as(plan, Rerank.class);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(
rerank.rerankFields(),
equalToIgnoringIds(
List.of(
alias("title", attribute("title")),
alias("short_description", function("SUBSTRING", List.of(attribute("description"), integer(0), integer(100))))
)
)
);
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("_score")));
}
public void testRerankComputedFieldsWithoutName() {
// Unnamed alias are forbidden
expectError(
"FROM books METADATA _score | RERANK \"food\" ON title, SUBSTRING(description, 0, 100), yearRenamed=year`",
"line 1:63: mismatched input '(' expecting {<EOF>, '|', '=', ',', '.', 'with'}"
);
}
public void testRerankWithPositionalParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant(null, "query text"), paramAsConstant(null, "reranker")));
var rerank = as(
parser.createStatement("row a = 1 | RERANK rerank_score = ? ON title WITH { \"inference_id\" : ? }", queryParams),
Rerank.class
);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("reranker")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("rerank_score")));
}
public void testRerankWithNamedParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant("queryText", "query text"), paramAsConstant("inferenceId", "reranker")));
var rerank = as(
parser.createStatement(
"row a = 1 | RERANK rerank_score=?queryText ON title WITH { \"inference_id\": ?inferenceId }",
queryParams
),
Rerank.class
);
assertThat(rerank.queryText(), equalTo(literalString("query text")));
assertThat(rerank.inferenceId(), equalTo(literalString("reranker")));
assertThat(rerank.rerankFields(), equalToIgnoringIds(List.of(alias("title", attribute("title")))));
assertThat(rerank.scoreAttribute(), equalToIgnoringIds(attribute("rerank_score")));
}
public void testInvalidRerank() {
expectError(
"FROM foo* | RERANK \"query text\" ON title WITH { \"inference_id\": 3 }",
"line 1:65: Option [inference_id] must be a valid string, found [3]"
);
expectError(
"FROM foo* | RERANK \"query text\" ON title WITH { \"inference_id\": \"inferenceId\", \"unknown_option\": 3 }",
"line 1:42: Inavalid option [unknown_option] in RERANK, expected one of [[inference_id]]"
);
expectError("FROM foo* | RERANK ON title WITH inferenceId", "line 1:20: extraneous input 'ON' expecting {QUOTED_STRING");
expectError("FROM foo* | RERANK \"query text\" WITH inferenceId", "line 1:33: mismatched input 'WITH' expecting 'on'");
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
expectError(
"FROM " + fromPatterns + " | RERANK \"query text\" ON title WITH { \"inference_id\" : \"inference_id\" }",
"invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported with RERANK"
);
expectError(
"FROM foo* | RERANK \"query text\" ON title WITH { \"inference_id\": { \"a\": 123 } }",
"Option [inference_id] must be a valid string, found [{ \"a\": 123 }]"
);
}
public void testCompletionMissingOptions() {
expectError("FROM foo* | COMPLETION targetField = prompt", "line 1:13: Missing mandatory option [inference_id] in COMPLETION");
}
public void testCompletionEmptyOptions() {
expectError(
"FROM foo* | COMPLETION targetField = prompt WITH { }",
"line 1:13: Missing mandatory option [inference_id] in COMPLETION"
);
}
public void testCompletionUsingFieldAsPrompt() {
var plan = as(
processingCommand("COMPLETION targetField=prompt_field WITH{ \"inference_id\" : \"inferenceID\" }"),
Completion.class
);
assertThat(plan.prompt(), equalToIgnoringIds(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(plan.targetField(), equalToIgnoringIds(attribute("targetField")));
}
public void testCompletionUsingFunctionAsPrompt() {
var plan = as(
processingCommand("COMPLETION targetField=CONCAT(fieldA, fieldB) WITH { \"inference_id\" : \"inferenceID\" }"),
Completion.class
);
assertThat(plan.prompt(), equalToIgnoringIds(function("CONCAT", List.of(attribute("fieldA"), attribute("fieldB")))));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(plan.targetField(), equalToIgnoringIds(attribute("targetField")));
}
public void testCompletionDefaultFieldName() {
var plan = as(processingCommand("COMPLETION prompt_field WITH{ \"inference_id\" : \"inferenceID\" }"), Completion.class);
assertThat(plan.prompt(), equalToIgnoringIds(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceID")));
assertThat(plan.targetField(), equalToIgnoringIds(attribute("completion")));
}
public void testCompletionWithPositionalParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant(null, "inferenceId")));
var plan = as(
parser.createStatement("row a = 1 | COMPLETION prompt_field WITH { \"inference_id\" : ? }", queryParams),
Completion.class
);
assertThat(plan.prompt(), equalToIgnoringIds(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("inferenceId")));
assertThat(plan.targetField(), equalToIgnoringIds(attribute("completion")));
}
public void testCompletionWithNamedParameters() {
var queryParams = new QueryParams(List.of(paramAsConstant("inferenceId", "myInference")));
var plan = as(
parser.createStatement("row a = 1 | COMPLETION prompt_field WITH { \"inference_id\" : ?inferenceId }", queryParams),
Completion.class
);
assertThat(plan.prompt(), equalToIgnoringIds(attribute("prompt_field")));
assertThat(plan.inferenceId(), equalTo(literalString("myInference")));
assertThat(plan.targetField(), equalToIgnoringIds(attribute("completion")));
}
public void testInvalidCompletion() {
expectError(
"FROM foo* | COMPLETION prompt WITH { \"inference_id\": 3 }",
"line 1:54: Option [inference_id] must be a valid string, found [3]"
);
expectError(
"FROM foo* | COMPLETION prompt WITH { \"inference_id\": \"inferenceId\", \"unknown_option\": 3 }",
"line 1:31: Inavalid option [unknown_option] in COMPLETION, expected one of [[inference_id]]"
);
expectError("FROM foo* | COMPLETION WITH inferenceId", "line 1:24: extraneous input 'WITH' expecting {");
expectError("FROM foo* | COMPLETION completion=prompt WITH", "ine 1:46: mismatched input '<EOF>' expecting '{'");
var fromPatterns = randomIndexPatterns(CROSS_CLUSTER);
expectError(
"FROM " + fromPatterns + " | COMPLETION prompt_field WITH { \"inference_id\" : \"inference_id\" }",
"invalid index pattern [" + unquoteIndexPattern(fromPatterns) + "], remote clusters are not supported with COMPLETION"
);
expectError(
"FROM foo* | COMPLETION prompt WITH { \"inference_id\": { \"a\": 123 } }",
"line 1:54: Option [inference_id] must be a valid string, found [{ \"a\": 123 }]"
);
}
public void testSample() {
assumeTrue("SAMPLE requires corresponding capability", EsqlCapabilities.Cap.SAMPLE_V3.isEnabled());
expectError("FROM test | SAMPLE .1 2", "line 1:23: extraneous input '2' expecting <EOF>");
expectError("FROM test | SAMPLE .1 \"2\"", "line 1:23: extraneous input '\"2\"' expecting <EOF>");
expectError(
"FROM test | SAMPLE 1",
"line 1:13: invalid value for SAMPLE probability [1], expecting a number between 0 and 1, exclusive"
);
expectThrows(
ParsingException.class,
startsWith("line 1:19: mismatched input '<EOF>' expecting {"),
() -> statement("FROM test | SAMPLE")
);
}
static Alias alias(String name, Expression value) {
return new Alias(EMPTY, name, value);
}
public void testValidFuse() {
LogicalPlan plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE
""");
var fuse = as(plan, Fuse.class);
assertThat(fuse.keys().size(), equalTo(2));
assertThat(fuse.keys().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(0).name(), equalTo("_id"));
assertThat(fuse.keys().get(1), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(1).name(), equalTo("_index"));
assertThat(fuse.discriminator().name(), equalTo("_fork"));
assertThat(fuse.score().name(), equalTo("_score"));
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
assertThat(fuse.child(), instanceOf(Fork.class));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE RRF
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
assertThat(fuse.child(), instanceOf(Fork.class));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE LINEAR
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.LINEAR));
assertThat(fuse.child(), instanceOf(Fork.class));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE WITH {"rank_constant": 15, "weights": {"fork1": 0.33 } }
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
MapExpression options = fuse.options();
assertThat(options.get("rank_constant"), equalTo(Literal.integer(null, 15)));
assertThat(options.get("weights"), instanceOf(MapExpression.class));
assertThat(((MapExpression) options.get("weights")).get("fork1"), equalTo(Literal.fromDouble(null, 0.33)));
assertThat(fuse.child(), instanceOf(Fork.class));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE SCORE BY my_score KEY BY my_key1,my_key2 GROUP BY my_group WITH {"rank_constant": 15 }
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.keys().size(), equalTo(2));
assertThat(fuse.keys().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(0).name(), equalTo("my_key1"));
assertThat(fuse.keys().get(1), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(1).name(), equalTo("my_key2"));
assertThat(fuse.discriminator().name(), equalTo("my_group"));
assertThat(fuse.score().name(), equalTo("my_score"));
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
options = fuse.options();
assertThat(options.get("rank_constant"), equalTo(Literal.integer(null, 15)));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE GROUP BY my_group KEY BY my_key1,my_key2 SCORE BY my_score WITH {"rank_constant": 15 }
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.keys().size(), equalTo(2));
assertThat(fuse.keys().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(0).name(), equalTo("my_key1"));
assertThat(fuse.keys().get(1), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(1).name(), equalTo("my_key2"));
assertThat(fuse.discriminator().name(), equalTo("my_group"));
assertThat(fuse.score().name(), equalTo("my_score"));
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
options = fuse.options();
assertThat(options.get("rank_constant"), equalTo(Literal.integer(null, 15)));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| EVAL a.b = my_group
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE GROUP BY a.b KEY BY my_key1,my_key2 SCORE BY my_score WITH {"rank_constant": 15 }
""");
fuse = as(plan, Fuse.class);
assertThat(fuse.keys().size(), equalTo(2));
assertThat(fuse.keys().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(0).name(), equalTo("my_key1"));
assertThat(fuse.keys().get(1), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(1).name(), equalTo("my_key2"));
assertThat(fuse.discriminator().name(), equalTo("a.b"));
assertThat(fuse.score().name(), equalTo("my_score"));
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
options = fuse.options();
assertThat(options.get("rank_constant"), equalTo(Literal.integer(null, 15)));
plan = statement("""
FROM foo* METADATA _id, _index, _score
| EVAL ??p = my_group
| FORK ( WHERE a:"baz" )
( WHERE b:"bar" )
| FUSE GROUP BY ??p KEY BY my_key1,my_key2 SCORE BY my_score WITH {"rank_constant": 15 }
""", new QueryParams(List.of(paramAsConstant("p", "a.b"))));
fuse = as(plan, Fuse.class);
assertThat(fuse.keys().size(), equalTo(2));
assertThat(fuse.keys().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(0).name(), equalTo("my_key1"));
assertThat(fuse.keys().get(1), instanceOf(UnresolvedAttribute.class));
assertThat(fuse.keys().get(1).name(), equalTo("my_key2"));
assertThat(fuse.discriminator().name(), equalTo("a.b"));
assertThat(fuse.score().name(), equalTo("my_score"));
assertThat(fuse.fuseType(), equalTo(Fuse.FuseType.RRF));
options = fuse.options();
assertThat(options.get("rank_constant"), equalTo(Literal.integer(null, 15)));
}
public void testInvalidFuse() {
String queryPrefix = "from test metadata _score, _index, _id | fork (where true) (where true)";
expectError(queryPrefix + " | FUSE BLA", "line 1:75: Fuse type BLA is not supported");
expectError(queryPrefix + " | FUSE WITH 1", "line 1:85: mismatched input '1' expecting '{'");
expectError(
queryPrefix + " | FUSE WITH {\"rank_constant\": 15 } WITH {\"rank_constant\": 15 }",
"line 1:110: Only one WITH can be specified"
);
expectError(queryPrefix + " | FUSE GROUP BY foo SCORE BY my_score GROUP BY bar", "line 1:111: Only one GROUP BY can be specified");
expectError(
queryPrefix + " | FUSE SCORE BY my_score GROUP BY bar SCORE BY another_score",
"line 1:111: Only one SCORE BY can be specified"
);
expectError(queryPrefix + " | FUSE KEY BY bar SCORE BY another_score KEY BY bar", "line 1:114: Only one KEY BY can be specified");
expectError(queryPrefix + " | FUSE GROUP BY foo SCORE BY my_score LINEAR", "line 1:111: extraneous input 'LINEAR' expecting <EOF>");
}
public void testDoubleParamsForIdentifier() {
assumeTrue("double parameters markers for identifiers", EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled());
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()
);
// There are three variations of double parameters - named, positional or anonymous, e.g. ??n, ??1 or ??, covered.
// Each query is executed three times with the three variations.
// field names can appear in eval/where/stats/sort/keep/drop/rename/dissect/grok/enrich/mvexpand
// eval, where
List<List<String>> doubleParams = new ArrayList<>(3);
List<String> namedDoubleParams = List.of("??f0", "??fn1", "??f1", "??f2", "??f3");
List<String> positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5");
List<String> anonymousDoubleParams = List.of("??", "??", "??", "??", "??");
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(null, """
from test
| eval {} = {}({})
| where {} == {}
| limit 1""", params.get(0), params.get(1), params.get(2), params.get(3), params.get(4));
assertEqualsIgnoringIds(
new Limit(
EMPTY,
new Literal(EMPTY, 1, INTEGER),
new Filter(
EMPTY,
new Eval(EMPTY, relation("test"), List.of(new Alias(EMPTY, "x", function("toString", List.of(attribute("f1.")))))),
new Equals(EMPTY, attribute("f.2"), attribute("f3"))
)
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("f0", "x"),
paramAsConstant("fn1", "toString"),
paramAsConstant("f1", "f1."),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f3")
)
)
)
);
}
namedDoubleParams = List.of("??f0", "??fn1", "??f1", "??f2", "??f3", "??f4", "??f5", "??f6");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5", "??6", "??7", "??8");
anonymousDoubleParams = List.of("??", "??", "??", "??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"""
from test
| eval {} = {}({}.{})
| where {}.{} == {}.{}
| limit 1""",
params.get(0),
params.get(1),
params.get(2),
params.get(3),
params.get(4),
params.get(5),
params.get(6),
params.get(7)
);
assertEqualsIgnoringIds(
new Limit(
EMPTY,
new Literal(EMPTY, 1, INTEGER),
new Filter(
EMPTY,
new Eval(
EMPTY,
relation("test"),
List.of(new Alias(EMPTY, "x", function("toString", List.of(attribute("f1..f.2")))))
),
new Equals(EMPTY, attribute("f3.*.f.4."), attribute("f.5.*.f.*.6"))
)
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("f0", "x"),
paramAsConstant("fn1", "toString"),
paramAsConstant("f1", "f1."),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f3.*"),
paramAsConstant("f4", "f.4."),
paramAsConstant("f5", "f.5.*"),
paramAsConstant("f6", "f.*.6")
)
)
)
);
}
// stats, sort, mv_expand
namedDoubleParams = List.of("??fn2", "??f3", "??f4", "??f5", "??f6");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5");
anonymousDoubleParams = List.of("??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(null, """
from test
| stats y = {}({}) by {}
| sort {}
| mv_expand {}""", params.get(0), params.get(1), params.get(2), params.get(3), params.get(4));
assertEqualsIgnoringIds(
new MvExpand(
EMPTY,
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(attribute("f.4.")),
List.of(new Alias(EMPTY, "y", function("count", List.of(attribute("f3.*")))), attribute("f.4."))
),
List.of(new Order(EMPTY, attribute("f.5.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST))
),
attribute("f.6*"),
attribute("f.6*")
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("fn2", "count"),
paramAsConstant("f3", "f3.*"),
paramAsConstant("f4", "f.4."),
paramAsConstant("f5", "f.5.*"),
paramAsConstant("f6", "f.6*")
)
)
)
);
}
namedDoubleParams = List.of("??fn2", "??f7", "??f8", "??f9", "??f10", "??f11", "??f12", "??f13", "??f14");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5", "??6", "??7", "??8", "??9");
anonymousDoubleParams = List.of("??", "??", "??", "??", "??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"""
from test
| stats y = {}({}.{}) by {}.{}
| sort {}.{}
| mv_expand {}.{}""",
params.get(0),
params.get(1),
params.get(2),
params.get(3),
params.get(4),
params.get(5),
params.get(6),
params.get(7),
params.get(8)
);
assertEqualsIgnoringIds(
new MvExpand(
EMPTY,
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(attribute("f.9.f10.*")),
List.of(new Alias(EMPTY, "y", function("count", List.of(attribute("f.7*.f8.")))), attribute("f.9.f10.*"))
),
List.of(new Order(EMPTY, attribute("f.11..f.12.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST))
),
attribute("f.*.13.f.14*"),
attribute("f.*.13.f.14*")
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("fn2", "count"),
paramAsConstant("f7", "f.7*"),
paramAsConstant("f8", "f8."),
paramAsConstant("f9", "f.9"),
paramAsConstant("f10", "f10.*"),
paramAsConstant("f11", "f.11."),
paramAsConstant("f12", "f.12.*"),
paramAsConstant("f13", "f.*.13"),
paramAsConstant("f14", "f.14*")
)
)
)
);
}
// keep, drop, rename, grok, dissect, lookup join
namedDoubleParams = List.of("??f1", "??f2", "??f3", "??f4", "??f5", "??f6", "??f7", "??f8", "??f9");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5", "??6", "??7", "??8", "??9");
anonymousDoubleParams = List.of("??", "??", "??", "??", "??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"""
from test
| keep {}, {}
| drop {}, {}
| dissect {} "%{bar}"
| grok {} "%{WORD:foo}"
| rename {} as {}
| lookup join idx on {}
| limit 1""",
params.get(0),
params.get(1),
params.get(2),
params.get(3),
params.get(4),
params.get(5),
params.get(6),
params.get(7),
params.get(8)
);
LogicalPlan plan = statement(
query,
new QueryParams(
List.of(
paramAsConstant("f1", "f.1.*"),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f3."),
paramAsConstant("f4", "f4.*"),
paramAsConstant("f5", "f.5*"),
paramAsConstant("f6", "f.6."),
paramAsConstant("f7", "f7*."),
paramAsConstant("f8", "f.8"),
paramAsConstant("f9", "f9")
)
)
);
Limit limit = as(plan, Limit.class);
LookupJoin join = as(limit.child(), LookupJoin.class);
UnresolvedRelation ur = as(join.right(), UnresolvedRelation.class);
assertEquals(ur.indexPattern().indexPattern(), "idx");
assertEquals(join.config().type().joinName(), "LEFT OUTER");
assertEqualsIgnoringIds(join.config().leftFields(), List.of(attribute("f9")));
Rename rename = as(join.left(), Rename.class);
assertEqualsIgnoringIds(rename.renamings(), List.of(new Alias(EMPTY, "f.8", attribute("f7*."))));
Grok grok = as(rename.child(), Grok.class);
assertEqualsIgnoringIds(grok.input(), attribute("f.6."));
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
Dissect dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(dissect.input(), attribute("f.5*"));
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
Drop drop = as(dissect.child(), Drop.class);
List<? extends NamedExpression> removals = drop.removals();
assertEqualsIgnoringIds(removals, List.of(attribute("f3."), attribute("f4.*")));
Keep keep = as(drop.child(), Keep.class);
assertEqualsIgnoringIds(keep.projections(), List.of(attribute("f.1.*"), attribute("f.2")));
}
namedDoubleParams = List.of(
"??f1",
"??f2",
"??f3",
"??f4",
"??f5",
"??f6",
"??f7",
"??f8",
"??f9",
"??f10",
"??f11",
"??f12",
"??f13",
"??f14"
);
positionalDoubleParams = List.of(
"??1",
"??2",
"??3",
"??4",
"??5",
"??6",
"??7",
"??8",
"??9",
"??10",
"??11",
"??12",
"??13",
"??14"
);
anonymousDoubleParams = List.of("??", "??", "??", "??", "??", "??", "??", "??", "??", "??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"""
from test
| keep {}.{}
| drop {}.{}
| dissect {}.{} "%{bar}"
| grok {}.{} "%{WORD:foo}"
| rename {}.{} as {}.{}
| lookup join idx on {}.{}
| limit 1""",
params.get(0),
params.get(1),
params.get(2),
params.get(3),
params.get(4),
params.get(5),
params.get(6),
params.get(7),
params.get(8),
params.get(9),
params.get(10),
params.get(11),
params.get(12),
params.get(13)
);
LogicalPlan plan = statement(
query,
new QueryParams(
List.of(
paramAsConstant("f1", "f.1.*"),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f3."),
paramAsConstant("f4", "f4.*"),
paramAsConstant("f5", "f.5*"),
paramAsConstant("f6", "f.6."),
paramAsConstant("f7", "f7*."),
paramAsConstant("f8", "f.8"),
paramAsConstant("f9", "f.9*"),
paramAsConstant("f10", "f.10."),
paramAsConstant("f11", "f11*."),
paramAsConstant("f12", "f.12"),
paramAsConstant("f13", "f13"),
paramAsConstant("f14", "f14")
)
)
);
Limit limit = as(plan, Limit.class);
LookupJoin join = as(limit.child(), LookupJoin.class);
UnresolvedRelation ur = as(join.right(), UnresolvedRelation.class);
assertEquals(ur.indexPattern().indexPattern(), "idx");
assertEquals(join.config().type().joinName(), "LEFT OUTER");
assertEqualsIgnoringIds(join.config().leftFields(), List.of(attribute("f13.f14")));
Rename rename = as(join.left(), Rename.class);
assertEqualsIgnoringIds(rename.renamings(), List.of(new Alias(EMPTY, "f11*..f.12", attribute("f.9*.f.10."))));
Grok grok = as(rename.child(), Grok.class);
assertEqualsIgnoringIds(grok.input(), attribute("f7*..f.8"));
assertEquals("%{WORD:foo}", grok.parser().pattern());
assertEqualsIgnoringIds(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields());
Dissect dissect = as(grok.child(), Dissect.class);
assertEqualsIgnoringIds(dissect.input(), attribute("f.5*.f.6."));
assertEquals("%{bar}", dissect.parser().pattern());
assertEquals("", dissect.parser().appendSeparator());
assertEqualsIgnoringIds(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields());
Drop drop = as(dissect.child(), Drop.class);
List<? extends NamedExpression> removals = drop.removals();
assertEqualsIgnoringIds(removals, List.of(attribute("f3..f4.*")));
Keep keep = as(drop.child(), Keep.class);
assertEqualsIgnoringIds(keep.projections(), List.of(attribute("f.1.*.f.2")));
}
// enrich, lookup join
namedDoubleParams = List.of("??f1", "??f2", "??f3");
positionalDoubleParams = List.of("??1", "??2", "??3");
anonymousDoubleParams = List.of("??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"from idx1 | ENRICH idx2 ON {} WITH {} = {}",
params.get(0),
params.get(1),
params.get(2)
);
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
relation("idx1"),
null,
Literal.keyword(EMPTY, "idx2"),
attribute("f.1.*"),
null,
Map.of(),
List.of(new Alias(EMPTY, "f.2", attribute("f.3*")))
),
statement(
query,
new QueryParams(List.of(paramAsConstant("f1", "f.1.*"), paramAsConstant("f2", "f.2"), paramAsConstant("f3", "f.3*")))
)
);
}
// lookup join on expression
namedDoubleParams = List.of("??f1", "??f2", "??f3", "??f4");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4");
anonymousDoubleParams = List.of("??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(null, """
from test
| lookup join idx on {}.{} == {}.{}
| limit 1""", params.get(0), params.get(1), params.get(2), params.get(3));
LogicalPlan plan = statement(
query,
new QueryParams(
List.of(
paramAsConstant("f1", "f.1"),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f.3"),
paramAsConstant("f4", "f.4")
)
)
);
Limit limit = as(plan, Limit.class);
LookupJoin join = as(limit.child(), LookupJoin.class);
UnresolvedRelation ur = as(join.right(), UnresolvedRelation.class);
assertEquals(ur.indexPattern().indexPattern(), "idx");
assertTrue(join.config().type().joinName().contains("LEFT OUTER"));
EsqlBinaryComparison on = as(join.config().joinOnConditions(), EsqlBinaryComparison.class);
assertEquals(on.getFunctionType(), EsqlBinaryComparison.BinaryComparisonOperation.EQ);
assertEquals(as(on.left(), UnresolvedAttribute.class).name(), "f.1.f.2");
assertEquals(as(on.right(), UnresolvedAttribute.class).name(), "f.3.f.4");
}
namedDoubleParams = List.of("??f1", "??f2", "??f3", "??f4", "??f5", "??f6");
positionalDoubleParams = List.of("??1", "??2", "??3", "??4", "??5", "??6");
anonymousDoubleParams = List.of("??", "??", "??", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(
null,
"from idx1 | ENRICH idx2 ON {}.{} WITH {}.{} = {}.{}",
params.get(0),
params.get(1),
params.get(2),
params.get(3),
params.get(4),
params.get(5)
);
assertEqualsIgnoringIds(
new Enrich(
EMPTY,
relation("idx1"),
null,
Literal.keyword(EMPTY, "idx2"),
attribute("f.1.*.f.2"),
null,
Map.of(),
List.of(new Alias(EMPTY, "f.3*.f.4.*", attribute("f.5.f.6*")))
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("f1", "f.1.*"),
paramAsConstant("f2", "f.2"),
paramAsConstant("f3", "f.3*"),
paramAsConstant("f4", "f.4.*"),
paramAsConstant("f5", "f.5"),
paramAsConstant("f6", "f.6*")
)
)
)
);
}
}
public void testMixedSingleDoubleParams() {
assumeTrue("double parameters markers for identifiers", EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled());
assumeTrue(
"requires LOOKUP JOIN ON boolean expression capability",
EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()
);
// This is a subset of testDoubleParamsForIdentifier, with single and double parameter markers mixed in the queries
// Single parameter markers represent a constant value or pattern
// double parameter markers represent identifiers - field or function names
// mixed constant and identifier, eval/where
List<List<String>> doubleParams = new ArrayList<>(3);
List<String> namedDoubleParams = List.of("??f0", "??fn1", "?v1", "??f2", "?v3");
List<String> positionalDoubleParams = List.of("??1", "??2", "?3", "??4", "?5");
List<String> anonymousDoubleParams = List.of("??", "??", "?", "??", "?");
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(null, """
from test
| eval {} = {}({})
| where {} == {}
| limit 1""", params.get(0), params.get(1), params.get(2), params.get(3), params.get(4));
assertEqualsIgnoringIds(
new Limit(
EMPTY,
new Literal(EMPTY, 1, INTEGER),
new Filter(
EMPTY,
new Eval(
EMPTY,
relation("test"),
List.of(new Alias(EMPTY, "x", function("toString", List.of(Literal.keyword(EMPTY, "constant_value")))))
),
new Equals(EMPTY, attribute("f.2"), new Literal(EMPTY, 100, INTEGER))
)
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("f0", "x"),
paramAsConstant("fn1", "toString"),
paramAsConstant("v1", "constant_value"),
paramAsConstant("f2", "f.2"),
paramAsConstant("v3", 100)
)
)
)
);
}
// mixed constant and identifier, stats/sort/mv_expand
namedDoubleParams = List.of("??fn2", "?v3", "??f4", "??f5", "??f6");
positionalDoubleParams = List.of("??1", "?2", "??3", "??4", "??5");
anonymousDoubleParams = List.of("??", "?", "??", "??", "??");
doubleParams.clear();
doubleParams.add(namedDoubleParams);
doubleParams.add(positionalDoubleParams);
doubleParams.add(anonymousDoubleParams);
for (List<String> params : doubleParams) {
String query = LoggerMessageFormat.format(null, """
from test
| stats y = {}({}) by {}
| sort {}
| mv_expand {}""", params.get(0), params.get(1), params.get(2), params.get(3), params.get(4));
assertEqualsIgnoringIds(
new MvExpand(
EMPTY,
new OrderBy(
EMPTY,
new Aggregate(
EMPTY,
relation("test"),
List.of(attribute("f.4.")),
List.of(new Alias(EMPTY, "y", function("count", List.of(Literal.keyword(EMPTY, "*")))), attribute("f.4."))
),
List.of(new Order(EMPTY, attribute("f.5.*"), Order.OrderDirection.ASC, Order.NullsPosition.LAST))
),
attribute("f.6*"),
attribute("f.6*")
),
statement(
query,
new QueryParams(
List.of(
paramAsConstant("fn2", "count"),
paramAsConstant("v3", "*"),
paramAsConstant("f4", "f.4."),
paramAsConstant("f5", "f.5.*"),
paramAsConstant("f6", "f.6*")
)
)
)
);
}
// mixed field name and field name pattern
LogicalPlan plan = statement(
"from test | keep ??f1, ?f2 | drop ?f3, ??f4 | lookup join idx on ??f5",
new QueryParams(
List.of(
paramAsConstant("f1", "f*1."),
paramAsPattern("f2", "f.2*"),
paramAsPattern("f3", "f3.*"),
paramAsConstant("f4", "f.4.*"),
paramAsConstant("f5", "f5")
)
)
);
LookupJoin join = as(plan, LookupJoin.class);
UnresolvedRelation ur = as(join.right(), UnresolvedRelation.class);
assertEquals(ur.indexPattern().indexPattern(), "idx");
assertEquals(join.config().type().joinName(), "LEFT OUTER");
assertEqualsIgnoringIds(join.config().leftFields(), List.of(attribute("f5")));
Drop drop = as(join.left(), Drop.class);
List<? extends NamedExpression> removals = drop.removals();
assertEquals(removals.size(), 2);
UnresolvedNamePattern up = as(removals.get(0), UnresolvedNamePattern.class);
assertEquals(up.name(), "f3.*");
assertEquals(up.pattern(), "f3.*");
UnresolvedAttribute ua = as(removals.get(1), UnresolvedAttribute.class);
assertEquals(ua.name(), "f.4.*");
Keep keep = as(drop.child(), Keep.class);
assertEquals(keep.projections().size(), 2);
ua = as(keep.projections().get(0), UnresolvedAttribute.class);
assertEquals(ua.name(), "f*1.");
up = as(keep.projections().get(1), UnresolvedNamePattern.class);
assertEquals(up.name(), "f.2*");
assertEquals(up.pattern(), "f.2*");
ur = as(keep.child(), UnresolvedRelation.class);
assertEqualsIgnoringIds(ur, relation("test"));
// test random single and double params
// commands in group1 take both constants(?) and identifiers(??)
List<String> commandWithRandomSingleOrDoubleParamsGroup1 = List.of(
"eval x = {}f1, y = {}f2, z = {}f3",
"eval x = fn({}f1), y = {}f2 + {}f3",
"where {}f1 == \"a\" and {}f2 > 1 and {}f3 in (1, 2)",
"stats x = fn({}f1) by {}f2, {}f3",
"sort {}f1, {}f2, {}f3",
"dissect {}f1 \"%{bar}\"",
"grok {}f1 \"%{WORD:foo}\""
);
for (String command : commandWithRandomSingleOrDoubleParamsGroup1) {
String param1 = randomBoolean() ? "?" : "??";
String param2 = randomBoolean() ? "?" : "??";
String param3 = randomBoolean() ? "?" : "??";
plan = statement(
LoggerMessageFormat.format(null, "from test | " + command, param1, param2, param3),
new QueryParams(List.of(paramAsConstant("f1", "f1"), paramAsConstant("f2", "f2"), paramAsConstant("f3", "f3")))
);
assertNotNull(plan);
}
// commands in group2 only take identifiers(??)
List<String> commandWithRandomSingleOrDoubleParamsGroup2 = List.of(
"eval x = {}f1(), y = {}f2(), z = {}f3()",
"where {}f1 : \"b\" and {}f2() > 0 and {}f3()",
"stats x = {}f1(), {}f2(), {}f3()",
"rename {}f1 as {}f2, {}f3 as x",
"enrich idx2 ON {}f1 WITH {}f2 = {}f3",
"keep {}f1, {}f2, {}f3",
"drop {}f1, {}f2, {}f3",
"mv_expand {}f1 | mv_expand {}f2 | mv_expand {}f3",
"lookup join idx1 on {}f1 | lookup join idx2 on {}f2 | lookup join idx3 on {}f3"
);
for (String command : commandWithRandomSingleOrDoubleParamsGroup2) {
String param1 = randomBoolean() ? "?" : "??";
String param2 = randomBoolean() ? "?" : "??";
String param3 = randomBoolean() ? "?" : "??";
if (param1.equals("?") || param2.equals("?") || param3.equals("?")) {
if (command.contains("lookup join") == false) {
expectError(
LoggerMessageFormat.format(null, "from test | " + command, param1, param2, param3),
List.of(paramAsConstant("f1", "f1"), paramAsConstant("f2", "f2"), paramAsConstant("f3", "f3")),
"declared as a constant, cannot be used as an identifier"
);
} else {
expectError(
LoggerMessageFormat.format(null, "from test | " + command, param1, param2, param3),
List.of(paramAsConstant("f1", "f1"), paramAsConstant("f2", "f2"), paramAsConstant("f3", "f3")),
"JOIN ON clause must be a comma separated list of fields or a single expression, found"
);
}
}
}
}
public void testInvalidDoubleParamsNames() {
assumeTrue("double parameters markers for identifiers", EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled());
expectError(
"from test | where x < ??n1 | eval y = ??n2",
List.of(paramAsConstant("n1", "f1"), paramAsConstant("n3", "f2")),
"line 1:39: Unknown query parameter [n2], did you mean any of [n3, n1]?"
);
expectError("from test | where x < ??@1", List.of(paramAsConstant("@1", "f1")), "line 1:25: extraneous input '@1' expecting <EOF>");
expectError("from test | where x < ??#1", List.of(paramAsConstant("#1", "f1")), "line 1:25: token recognition error at: '#'");
expectError("from test | where x < ??Å", List.of(paramAsConstant("Å", "f1")), "line 1:25: token recognition error at: 'Å'");
expectError("from test | eval x = ??Å", List.of(paramAsConstant("Å", "f1")), "line 1:24: token recognition error at: 'Å'");
}
public void testInvalidDoubleParamsPositions() {
assumeTrue("double parameters markers for identifiers", EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled());
expectError(
"from test | where x < ??0",
List.of(paramAsConstant(null, "f1")),
"line 1:23: No parameter is defined for position 0, did you mean position 1"
);
expectError(
"from test | where x < ??2",
List.of(paramAsConstant(null, "f1")),
"line 1:23: No parameter is defined for position 2, did you mean position 1"
);
expectError(
"from test | where x < ??0 and y < ??2",
List.of(paramAsConstant(null, "f1")),
"line 1:23: No parameter is defined for position 0, did you mean position 1?; "
+ "line 1:35: No parameter is defined for position 2, did you mean position 1?"
);
expectError(
"from test | where x < ??0",
List.of(paramAsConstant(null, "f1"), paramAsConstant(null, "f2")),
"line 1:23: No parameter is defined for position 0, did you mean any position between 1 and 2?"
);
}
public void testInvalidDoubleParamsType() {
assumeTrue("double parameters markers for identifiers", EsqlCapabilities.Cap.DOUBLE_PARAMETER_MARKERS_FOR_IDENTIFIERS.isEnabled());
// double parameter markers cannot be declared as identifier patterns
String error = "Query parameter [??f1][f1] declared as a pattern, cannot be used as an identifier";
List<String> commandWithDoubleParams = List.of(
"eval x = ??f1",
"eval x = ??f1(f1)",
"where ??f1 == \"a\"",
"stats x = count(??f1)",
"sort ??f1",
"rename ??f1 as ??f2",
"dissect ??f1 \"%{bar}\"",
"grok ??f1 \"%{WORD:foo}\"",
"enrich idx2 ON ??f1 WITH ??f2 = ??f3",
"keep ??f1",
"drop ??f1",
"mv_expand ??f1",
"lookup join idx on ??f1"
);
for (String command : commandWithDoubleParams) {
expectError(
"from test | " + command,
List.of(paramAsPattern("f1", "f1*"), paramAsPattern("f2", "f2*"), paramAsPattern("f3", "f3*")),
error
);
}
}
public void testUnclosedParenthesis() {
String[] queries = {
"row a = )",
"row ]",
"from source | eval x = [1,2,3]]",
"ROW x = 1 | KEEP x )",
"ROW x = 1 | DROP x )",
"ROW a = [1, 2] | RENAME a =b)",
"ROW a = [1, 2] | MV_EXPAND a)",
"from test | enrich a on b)" };
for (String q : queries) {
expectError(q, "Invalid query");
}
}
public void testBracketsInIndexNames() {
List<String> patterns = List.of(
"(",
")",
"()",
"(((",
")))",
"(test",
"test)",
"(test)",
"te()st",
"concat(foo,bar)",
"((((()))))",
"(((abc)))",
"*()*",
"*test()*"
);
for (String pattern : patterns) {
expectErrorForBracketsWithoutQuotes(pattern);
expectSuccessForBracketsWithinQuotes(pattern);
}
expectError("from test)", "line -1:-1: Invalid query [from test)]");
expectError("from te()st", "line 1:8: mismatched input '(' expecting {<EOF>, '|', ',', 'metadata'");
expectError("from test | enrich foo)", "line -1:-1: Invalid query [from test | enrich foo)]");
expectError("from test | lookup join foo) on bar", "line 1:28: token recognition error at: ')'");
if (EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()) {
expectError("from test | lookup join foo) on bar1 > bar2", "line 1:28: token recognition error at: ')'");
}
}
private void expectErrorForBracketsWithoutQuotes(String pattern) {
expectThrows(ParsingException.class, () -> processingCommand("from " + pattern));
expectThrows(ParsingException.class, () -> processingCommand("from *:" + pattern));
expectThrows(ParsingException.class, () -> processingCommand("from remote1:" + pattern + ",remote2:" + pattern));
expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join " + pattern + " on bar"));
expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join " + pattern + " on bar1 < bar2"));
expectThrows(ParsingException.class, () -> processingCommand("from test | enrich " + pattern));
}
private void expectSuccessForBracketsWithinQuotes(String indexName) {
LogicalPlan plan = statement("from \"" + indexName + "\"");
UnresolvedRelation from = as(plan, UnresolvedRelation.class);
assertThat(from.indexPattern().indexPattern(), is(indexName));
plan = statement("from \"*:" + indexName + "\"");
from = as(plan, UnresolvedRelation.class);
assertThat(from.indexPattern().indexPattern(), is("*:" + indexName));
plan = statement("from \"remote1:" + indexName + ",remote2:" + indexName + "\"");
from = as(plan, UnresolvedRelation.class);
assertThat(from.indexPattern().indexPattern(), is("remote1:" + indexName + ",remote2:" + indexName));
plan = statement("from test | enrich \"" + indexName + "\"");
Enrich enrich = as(plan, Enrich.class);
assertThat(enrich.policyName().fold(FoldContext.small()), is(BytesRefs.toBytesRef(indexName)));
as(enrich.child(), UnresolvedRelation.class);
if (indexName.contains("*")) {
expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join \"" + indexName + "\" on bar"));
expectThrows(ParsingException.class, () -> processingCommand("from test | lookup join \"" + indexName + "\" on bar1 > bar2"));
} else {
plan = statement("from test | lookup join \"" + indexName + "\" on bar");
LookupJoin lookup = as(plan, LookupJoin.class);
UnresolvedRelation right = as(lookup.right(), UnresolvedRelation.class);
assertThat(right.indexPattern().indexPattern(), is(indexName));
if (EsqlCapabilities.Cap.LOOKUP_JOIN_ON_BOOLEAN_EXPRESSION.isEnabled()) {
plan = statement("from test | lookup join \"" + indexName + "\" on bar1 <= bar2");
lookup = as(plan, LookupJoin.class);
right = as(lookup.right(), UnresolvedRelation.class);
assertThat(right.indexPattern().indexPattern(), is(indexName));
}
}
}
}
|
StatementParserTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/DeclarativeSlotPool.java
|
{
"start": 10218,
"end": 10483
}
|
interface ____ {
/** Notifies the listener about no more resource requests in the specified duration. */
void notifyResourceRequestStable();
}
/** No-op {@link ResourceRequestStableListener} implementation. */
|
ResourceRequestStableListener
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/NormalScopedBeanClassFinalTest.java
|
{
"start": 94,
"end": 344
}
|
class ____ extends AbstractNormalScopedFinalTest {
@Override
protected ArcTestContainer createTestContainer() {
return ArcTestContainer.builder().shouldFail().beanClasses(Unproxyable.class).build();
}
}
|
NormalScopedBeanClassFinalTest
|
java
|
elastic__elasticsearch
|
modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/FailureStoreMetricsWithIncrementalBulkIT.java
|
{
"start": 2932,
"end": 12714
}
|
class ____ extends ESIntegTestCase {
private static final List<String> METRICS = List.of(
FailureStoreMetrics.METRIC_TOTAL,
FailureStoreMetrics.METRIC_FAILURE_STORE,
FailureStoreMetrics.METRIC_REJECTED
);
private static final String DATA_STREAM_NAME = "data-stream-incremental";
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(DataStreamsPlugin.class, TestTelemetryPlugin.class, MapperExtrasPlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal, otherSettings))
.put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B")
.put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B")
.put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB")
.put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B")
.build();
}
public void testShortCircuitFailure() throws Exception {
createDataStreamWithFailureStore();
String coordinatingOnlyNode = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);
AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {});
IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, coordinatingOnlyNode);
try (IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest()) {
AtomicBoolean nextRequested = new AtomicBoolean(true);
int successfullyStored = 0;
while (nextRequested.get()) {
nextRequested.set(false);
refCounted.incRef();
handler.addItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, () -> nextRequested.set(true));
successfullyStored++;
}
assertBusy(() -> assertTrue(nextRequested.get()));
var metrics = collectTelemetry();
assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_TOTAL, DATA_STREAM_NAME, successfullyStored);
assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_FAILURE_STORE, DATA_STREAM_NAME, 0);
assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_REJECTED, DATA_STREAM_NAME, 0);
// Introduce artificial pressure that will reject the following requests
String node = findNodeOfPrimaryShard(DATA_STREAM_NAME);
IndexingPressure primaryPressure = internalCluster().getInstance(IndexingPressure.class, node);
long memoryLimit = primaryPressure.stats().getMemoryLimit();
long primaryRejections = primaryPressure.stats().getPrimaryRejections();
try (Releasable ignored = primaryPressure.validateAndMarkPrimaryOperationStarted(10, memoryLimit, 0, false, false)) {
while (primaryPressure.stats().getPrimaryRejections() == primaryRejections) {
while (nextRequested.get()) {
nextRequested.set(false);
refCounted.incRef();
List<DocWriteRequest<?>> requests = new ArrayList<>();
for (int i = 0; i < 20; ++i) {
requests.add(indexRequest(DATA_STREAM_NAME));
}
handler.addItems(requests, refCounted::decRef, () -> nextRequested.set(true));
}
assertBusy(() -> assertTrue(nextRequested.get()));
}
}
while (nextRequested.get()) {
nextRequested.set(false);
refCounted.incRef();
handler.addItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, () -> nextRequested.set(true));
}
assertBusy(() -> assertTrue(nextRequested.get()));
PlainActionFuture<BulkResponse> future = new PlainActionFuture<>();
handler.lastItems(List.of(indexRequest(DATA_STREAM_NAME)), refCounted::decRef, future);
BulkResponse bulkResponse = safeGet(future);
for (int i = 0; i < bulkResponse.getItems().length; ++i) {
// the first requests were successful
boolean hasFailed = i >= successfullyStored;
assertThat(bulkResponse.getItems()[i].isFailed(), is(hasFailed));
assertThat(bulkResponse.getItems()[i].getFailureStoreStatus(), is(IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN));
}
metrics = collectTelemetry();
assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_TOTAL, DATA_STREAM_NAME, bulkResponse.getItems().length);
assertDataStreamMetric(
metrics,
FailureStoreMetrics.METRIC_REJECTED,
DATA_STREAM_NAME,
bulkResponse.getItems().length - successfullyStored
);
assertDataStreamMetric(metrics, FailureStoreMetrics.METRIC_FAILURE_STORE, DATA_STREAM_NAME, 0);
}
}
private void createDataStreamWithFailureStore() throws IOException {
TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(
"template-incremental"
);
request.indexTemplate(
ComposableIndexTemplate.builder()
.indexPatterns(List.of(DATA_STREAM_NAME + "*"))
.dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate())
.template(Template.builder().mappings(new CompressedXContent("""
{
"dynamic": false,
"properties": {
"@timestamp": {
"type": "date"
},
"count": {
"type": "long"
}
}
}""")).dataStreamOptions(DataStreamTestHelper.createDataStreamOptionsTemplate(true)))
.build()
);
assertAcked(safeGet(client().execute(TransportPutComposableIndexTemplateAction.TYPE, request)));
final var createDataStreamRequest = new CreateDataStreamAction.Request(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
DATA_STREAM_NAME
);
assertAcked(safeGet(client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest)));
}
private static Map<String, List<Measurement>> collectTelemetry() {
Map<String, List<Measurement>> measurements = new HashMap<>();
for (PluginsService pluginsService : internalCluster().getInstances(PluginsService.class)) {
final TestTelemetryPlugin telemetryPlugin = pluginsService.filterPlugins(TestTelemetryPlugin.class).findFirst().orElseThrow();
telemetryPlugin.collect();
for (String metricName : METRICS) {
measurements.put(metricName, telemetryPlugin.getLongCounterMeasurement(metricName));
}
}
return measurements;
}
private void assertDataStreamMetric(Map<String, List<Measurement>> metrics, String metric, String dataStreamName, int expectedValue) {
List<Measurement> measurements = metrics.get(metric);
assertThat(measurements, notNullValue());
long totalValue = measurements.stream()
.filter(m -> m.attributes().get("data_stream").equals(dataStreamName))
.mapToLong(Measurement::getLong)
.sum();
assertThat(totalValue, equalTo((long) expectedValue));
}
private static IndexRequest indexRequest(String dataStreamName) {
String time = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis());
String value = "1";
return new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE)
.source(Strings.format("{\"%s\":\"%s\", \"count\": %s}", DEFAULT_TIMESTAMP_FIELD, time, value), XContentType.JSON);
}
protected static String findNodeOfPrimaryShard(String dataStreamName) {
GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(
TEST_REQUEST_TIMEOUT,
new String[] { dataStreamName }
);
GetDataStreamAction.Response getDataStreamResponse = safeGet(client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest));
assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1));
DataStream dataStream = getDataStreamResponse.getDataStreams().getFirst().getDataStream();
assertThat(dataStream.getName(), equalTo(DATA_STREAM_NAME));
assertThat(dataStream.getIndices().size(), equalTo(1));
String backingIndex = dataStream.getIndices().getFirst().getName();
assertThat(backingIndex, backingIndexEqualTo(DATA_STREAM_NAME, 1));
Index index = resolveIndex(backingIndex);
int shardId = 0;
for (String node : internalCluster().getNodeNames()) {
var indicesService = internalCluster().getInstance(IndicesService.class, node);
IndexService indexService = indicesService.indexService(index);
if (indexService != null) {
IndexShard shard = indexService.getShardOrNull(shardId);
if (shard != null && shard.isActive() && shard.routingEntry().primary()) {
return node;
}
}
}
throw new AssertionError("IndexShard instance not found for shard " + new ShardId(index, shardId));
}
}
|
FailureStoreMetricsWithIncrementalBulkIT
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-web-application-type/src/test/java/smoketest/webapplicationtype/WebEnvironmentNoneOverridesWebApplicationTypeTests.java
|
{
"start": 1355,
"end": 1657
}
|
class ____ {
@Autowired
private ApplicationContext context;
@Test
void contextIsPlain() {
assertThat(this.context).isNotInstanceOf(ReactiveWebApplicationContext.class);
assertThat(this.context).isNotInstanceOf(WebApplicationContext.class);
}
}
|
WebEnvironmentNoneOverridesWebApplicationTypeTests
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/consumer/internals/CounterConsumerRebalanceListener.java
|
{
"start": 1084,
"end": 3087
}
|
class ____ implements ConsumerRebalanceListener {
private final AtomicInteger revokedCounter = new AtomicInteger();
private final AtomicInteger assignedCounter = new AtomicInteger();
private final AtomicInteger lostCounter = new AtomicInteger();
private final Optional<RuntimeException> revokedError;
private final Optional<RuntimeException> assignedError;
private final Optional<RuntimeException> lostError;
public CounterConsumerRebalanceListener() {
this(Optional.empty(), Optional.empty(), Optional.empty());
}
public CounterConsumerRebalanceListener(Optional<RuntimeException> revokedError,
Optional<RuntimeException> assignedError,
Optional<RuntimeException> lostError) {
this.revokedError = revokedError;
this.assignedError = assignedError;
this.lostError = lostError;
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
try {
if (revokedError.isPresent())
throw revokedError.get();
} finally {
revokedCounter.incrementAndGet();
}
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
try {
if (assignedError.isPresent())
throw assignedError.get();
} finally {
assignedCounter.incrementAndGet();
}
}
@Override
public void onPartitionsLost(Collection<TopicPartition> partitions) {
try {
if (lostError.isPresent())
throw lostError.get();
} finally {
lostCounter.incrementAndGet();
}
}
public int revokedCount() {
return revokedCounter.get();
}
public int assignedCount() {
return assignedCounter.get();
}
public int lostCount() {
return lostCounter.get();
}
}
|
CounterConsumerRebalanceListener
|
java
|
google__guava
|
android/guava/src/com/google/common/base/Functions.java
|
{
"start": 14854,
"end": 15812
}
|
class ____<
F extends @Nullable Object, T extends @Nullable Object>
implements Function<F, T>, Serializable {
private final Supplier<T> supplier;
private SupplierFunction(Supplier<T> supplier) {
this.supplier = checkNotNull(supplier);
}
@Override
@ParametricNullness
public T apply(@ParametricNullness F input) {
return supplier.get();
}
@Override
public boolean equals(@Nullable Object obj) {
if (obj instanceof SupplierFunction) {
SupplierFunction<?, ?> that = (SupplierFunction<?, ?>) obj;
return this.supplier.equals(that.supplier);
}
return false;
}
@Override
public int hashCode() {
return supplier.hashCode();
}
@Override
public String toString() {
return "Functions.forSupplier(" + supplier + ")";
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
}
|
SupplierFunction
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
|
{
"start": 1042,
"end": 1971
}
|
class ____ extends IOException {
/** this value should not be defined in RpcHeader.proto so that protobuf will return a null */
private static final int UNSPECIFIED_ERROR = -1;
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
private final int errorCode;
private final String className;
/**
* @param className wrapped exception, may be null
* @param msg may be null
*/
public RemoteException(String className, String msg) {
this(className, msg, null);
}
/**
* @param className wrapped exception, may be null
* @param msg may be null
* @param erCode may be null
*/
public RemoteException(String className, String msg, RpcErrorCodeProto erCode) {
super(msg);
this.className = className;
if (erCode != null)
errorCode = erCode.getNumber();
else
errorCode = UNSPECIFIED_ERROR;
}
/**
* @return the
|
RemoteException
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/async/AsyncLoggerConfig.java
|
{
"start": 14696,
"end": 17897
}
|
class ____<B extends Builder<B>> extends RootLogger.Builder<B> {
@Override
public LoggerConfig build() {
final LevelAndRefs container =
LoggerConfig.getLevelAndRefs(getLevel(), getRefs(), getLevelAndRefs(), getConfig());
return new AsyncLoggerConfig(
LogManager.ROOT_LOGGER_NAME,
container.refs,
getFilter(),
container.level,
isAdditivity(),
getProperties(),
getConfig(),
shouldIncludeLocation(getIncludeLocation()));
}
}
/**
* @deprecated use {@link #createLogger(String, Level, String, AppenderRef[], Property[], Configuration, Filter)}
*/
@Deprecated
public static LoggerConfig createLogger(
final String additivity,
final String levelName,
final String includeLocation,
final AppenderRef[] refs,
final Property[] properties,
final Configuration config,
final Filter filter) {
final List<AppenderRef> appenderRefs = Arrays.asList(refs);
Level level = null;
try {
level = Level.toLevel(levelName, Level.ERROR);
} catch (final Exception ex) {
LOGGER.error("Invalid Log level specified: {}. Defaulting to Error", levelName);
level = Level.ERROR;
}
final boolean additive = Booleans.parseBoolean(additivity, true);
return new AsyncLoggerConfig(
LogManager.ROOT_LOGGER_NAME,
appenderRefs,
filter,
level,
additive,
properties,
config,
shouldIncludeLocation(includeLocation));
}
/**
*
*/
@Deprecated
public static LoggerConfig createLogger(
@PluginAttribute("additivity") final String additivity,
@PluginAttribute("level") final Level level,
@PluginAttribute("includeLocation") final String includeLocation,
@PluginElement("AppenderRef") final AppenderRef[] refs,
@PluginElement("Properties") final Property[] properties,
@PluginConfiguration final Configuration config,
@PluginElement("Filter") final Filter filter) {
final List<AppenderRef> appenderRefs = Arrays.asList(refs);
final Level actualLevel = level == null ? Level.ERROR : level;
final boolean additive = Booleans.parseBoolean(additivity, true);
return new AsyncLoggerConfig(
LogManager.ROOT_LOGGER_NAME,
appenderRefs,
filter,
actualLevel,
additive,
properties,
config,
shouldIncludeLocation(includeLocation));
}
}
}
|
Builder
|
java
|
quarkusio__quarkus
|
extensions/security/spi/src/main/java/io/quarkus/security/spi/ClassSecurityCheckStorageBuildItem.java
|
{
"start": 988,
"end": 1228
}
|
class ____
* @return security check (see runtime Security SPI for respective class)
*/
public Object getSecurityCheck(DotName className) {
return classNameToSecurityCheck.get(className);
}
public static final
|
name
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/ExtensionContext.java
|
{
"start": 35597,
"end": 37772
}
|
class ____ {
/**
* The default, global namespace which allows access to stored data from
* all extensions.
*/
public static final Namespace GLOBAL = Namespace.create(new Object());
/**
* Create a namespace which restricts access to data to all extensions
* which use the same sequence of {@code parts} for creating a namespace.
*
* <p>The order of the {@code parts} is significant.
*
* <p>Internally the {@code parts} are compared using {@link Object#equals(Object)}.
*/
public static Namespace create(Object... parts) {
Preconditions.notEmpty(parts, "parts array must not be null or empty");
Preconditions.containsNoNullElements(parts, "individual parts must not be null");
return new Namespace(List.of(parts));
}
private final List<Object> parts;
private Namespace(List<Object> parts) {
this.parts = List.copyOf(parts);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Namespace that = (Namespace) o;
return this.parts.equals(that.parts);
}
@Override
public int hashCode() {
return this.parts.hashCode();
}
/**
* Create a new namespace by appending the supplied {@code parts} to the
* existing sequence of parts in this namespace.
*
* @return new namespace; never {@code null}
* @since 5.8
*/
@API(status = STABLE, since = "5.10")
public Namespace append(Object... parts) {
Preconditions.notEmpty(parts, "parts array must not be null or empty");
Preconditions.containsNoNullElements(parts, "individual parts must not be null");
ArrayList<Object> newParts = new ArrayList<>(this.parts.size() + parts.length);
newParts.addAll(this.parts);
Collections.addAll(newParts, parts);
return new Namespace(newParts);
}
@API(status = INTERNAL, since = "5.13")
public List<Object> getParts() {
return parts;
}
}
/**
* {@code StoreScope} is an enumeration of the different scopes for
* {@link Store} instances.
*
* @since 5.13
* @see #getStore(StoreScope, Namespace)
*/
@API(status = EXPERIMENTAL, since = "6.0")
|
Namespace
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java
|
{
"start": 68479,
"end": 68633
}
|
class ____ {
public String a;
private D(String s) {
a = s;
}
@Override
public String toString() {
return "D(" + a + ")";
}
}
static
|
D
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_42_with_cte.java
|
{
"start": 924,
"end": 4563
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "WITH\n" +
" cte1 AS (SELECT a, b FROM table1),\n" +
" cte2 AS (SELECT c, d FROM table2)\n" +
"SELECT b, d FROM cte1 JOIN cte2\n" +
"WHERE cte1.a = cte2.c;";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL, true);
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.MYSQL);
stmt.accept(visitor);
System.out.println(stmt);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(4, visitor.getColumns().size());
assertEquals(2, visitor.getConditions().size());
assertEquals(0, visitor.getOrderByColumns().size());
assertTrue(visitor.containsTable("table1"));
assertTrue(visitor.containsTable("table2"));
assertTrue(visitor.containsColumn("table1", "a"));
assertTrue(visitor.containsColumn("table1", "b"));
assertTrue(visitor.containsColumn("table2", "c"));
assertTrue(visitor.containsColumn("table2", "d"));
{
String output = SQLUtils.toMySqlString(stmt);
assertEquals("WITH cte1 AS (\n" +
"\t\tSELECT a, b\n" +
"\t\tFROM table1\n" +
"\t),\n" +
"\tcte2 AS (\n" +
"\t\tSELECT c, d\n" +
"\t\tFROM table2\n" +
"\t)\n" +
"SELECT b, d\n" +
"FROM cte1\n" +
"\tJOIN cte2\n" +
"WHERE cte1.a = cte2.c;", //
output);
}
{
String output = SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION);
assertEquals("with cte1 as (\n" +
"\t\tselect a, b\n" +
"\t\tfrom table1\n" +
"\t),\n" +
"\tcte2 as (\n" +
"\t\tselect c, d\n" +
"\t\tfrom table2\n" +
"\t)\n" +
"select b, d\n" +
"from cte1\n" +
"\tjoin cte2\n" +
"where cte1.a = cte2.c;", //
output);
}
{
String output = SQLUtils.toMySqlString(stmt, new SQLUtils.FormatOption(true, true, true));
assertEquals("WITH cte1 AS (\n" +
"\t\tSELECT a, b\n" +
"\t\tFROM table1\n" +
"\t),\n" +
"\tcte2 AS (\n" +
"\t\tSELECT c, d\n" +
"\t\tFROM table2\n" +
"\t)\n" +
"SELECT b, d\n" +
"FROM cte1\n" +
"\tJOIN cte2\n" +
"WHERE cte1.a = cte2.c;", //
output);
}
}
}
|
MySqlSelectTest_42_with_cte
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/scalar/string/BinaryComparisonCaseInsensitiveFunction.java
|
{
"start": 1168,
"end": 3921
}
|
class ____ extends CaseInsensitiveScalarFunction {
private final Expression left, right;
protected BinaryComparisonCaseInsensitiveFunction(Source source, Expression left, Expression right, boolean caseInsensitive) {
super(source, asList(left, right), caseInsensitive);
this.left = left;
this.right = right;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
TypeResolution sourceResolution = isStringAndExact(left, sourceText(), FIRST);
if (sourceResolution.unresolved()) {
return sourceResolution;
}
return isStringAndExact(right, sourceText(), SECOND);
}
public Expression left() {
return left;
}
public Expression right() {
return right;
}
@Override
public DataType dataType() {
return DataTypes.BOOLEAN;
}
@Override
public boolean foldable() {
return left.foldable() && right.foldable();
}
@Override
public ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(left);
ScriptTemplate rightScript = asScript(right);
return asScriptFrom(leftScript, rightScript);
}
protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) {
return new ScriptTemplate(
format(
Locale.ROOT,
formatTemplate("%s.%s(%s,%s,%s)"),
Scripts.classPackageAsPrefix(getClass()),
scriptMethodName(),
leftScript.template(),
rightScript.template(),
"{}"
),
paramsBuilder().script(leftScript.params()).script(rightScript.params()).variable(isCaseInsensitive()).build(),
dataType()
);
}
protected String scriptMethodName() {
String simpleName = getClass().getSimpleName();
return Character.toLowerCase(simpleName.charAt(0)) + simpleName.substring(1);
}
@Override
public int hashCode() {
return Objects.hash(left, right, isCaseInsensitive());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
BinaryComparisonCaseInsensitiveFunction other = (BinaryComparisonCaseInsensitiveFunction) obj;
return Objects.equals(left, other.left)
&& Objects.equals(right, other.right)
&& Objects.equals(isCaseInsensitive(), other.isCaseInsensitive());
}
}
|
BinaryComparisonCaseInsensitiveFunction
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/tokenizer/LangChain4jTokenizerDefinition.java
|
{
"start": 1558,
"end": 4132
}
|
class ____ extends TokenizerImplementationDefinition {
@XmlAttribute(required = true)
@Metadata(javaType = "org.apache.camel.model.tokenizer.TokenizerType", required = true,
enums = "OPEN_AI,AZURE,QWEN")
private String tokenizerType;
@XmlAttribute(required = true)
@Metadata(javaType = "java.lang.Integer", required = true)
private String maxTokens;
@XmlAttribute(required = true)
@Metadata(javaType = "java.lang.Integer", required = true)
private String maxOverlap;
@XmlAttribute(required = true)
@Metadata(javaType = "java.lang.String", required = false)
private String modelName;
public LangChain4jTokenizerDefinition() {
}
public LangChain4jTokenizerDefinition(LangChain4jTokenizerDefinition source) {
super(source);
this.maxTokens = source.maxTokens;
this.maxOverlap = source.maxOverlap;
this.tokenizerType = source.tokenizerType;
this.modelName = source.modelName;
}
/**
* The maximum number of tokens on each segment
*/
public String getMaxTokens() {
return maxTokens;
}
/**
* Sets the maximum number of tokens on each segment
*/
public void setMaxTokens(String maxTokens) {
this.maxTokens = maxTokens;
}
/**
* Gets the maximum number of tokens that can overlap in each segment
*/
public String getMaxOverlap() {
return maxOverlap;
}
/**
* Sets the maximum number of tokens that can overlap in each segment
*/
public void setMaxOverlap(String maxOverlap) {
this.maxOverlap = maxOverlap;
}
/**
* Gets the tokenizer type
*/
public String getTokenizerType() {
return tokenizerType;
}
/**
* Sets the tokenizer type
*/
public void setTokenizerType(String tokenizerType) {
this.tokenizerType = tokenizerType;
}
/**
* Gets the model name
*
* @return
*/
public String getModelName() {
return modelName;
}
/**
* Sets the model name
*
* @param modelName
*/
public void setModelName(String modelName) {
this.modelName = modelName;
}
@Override
public LangChain4jTokenizerDefinition copyDefinition() {
throw new UnsupportedOperationException("Must be implemented in the concrete classes");
}
protected static String toName(String name) {
return "langChain4j" + StringHelper.capitalize(name);
}
@XmlTransient
public
|
LangChain4jTokenizerDefinition
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ServiceAccountAuthenticatorTests.java
|
{
"start": 1615,
"end": 8230
}
|
class ____ extends AbstractAuthenticatorTests {
public void testRecordingSuccessfulAuthenticationMetrics() {
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
final long initialNanoTime = randomLongBetween(0, 100);
final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime);
final ServiceAccountService serviceAccountService = mock(ServiceAccountService.class);
final String nodeName = randomAlphaOfLengthBetween(3, 8);
final ServiceAccountAuthenticator serviceAccountAuthenticator = new ServiceAccountAuthenticator(
serviceAccountService,
nodeName,
telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(),
nanoTimeSupplier
);
final ServiceAccountToken serviceAccountToken = randomServiceAccountToken();
final Authenticator.Context context = mockServiceAccountAuthenticatorContext(serviceAccountToken);
final long executionTimeInNanos = randomLongBetween(0, 500);
doAnswer(invocation -> {
nanoTimeSupplier.advanceTime(executionTimeInNanos);
final ActionListener<Authentication> listener = invocation.getArgument(2);
Authentication authentication = Authentication.newServiceAccountAuthentication(
new User(serviceAccountToken.getAccountId().asPrincipal()),
nodeName,
Map.of()
);
listener.onResponse(authentication);
return Void.TYPE;
}).when(serviceAccountService).authenticateToken(same(serviceAccountToken), same(nodeName), anyActionListener());
final PlainActionFuture<AuthenticationResult<Authentication>> future = new PlainActionFuture<>();
serviceAccountAuthenticator.authenticate(context, future);
var authResult = future.actionGet();
assertThat(authResult.isAuthenticated(), equalTo(true));
// verify we recorded success metric
assertSingleSuccessAuthMetric(
telemetryPlugin,
SecurityMetricType.AUTHC_SERVICE_ACCOUNT,
Map.ofEntries(
Map.entry(ServiceAccountAuthenticator.ATTRIBUTE_SERVICE_ACCOUNT_ID, serviceAccountToken.getAccountId().asPrincipal())
)
);
// verify that there were no failures recorded
assertZeroFailedAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_SERVICE_ACCOUNT);
// verify we recorded authentication time
assertAuthenticationTimeMetric(
telemetryPlugin,
SecurityMetricType.AUTHC_SERVICE_ACCOUNT,
executionTimeInNanos,
Map.ofEntries(
Map.entry(ServiceAccountAuthenticator.ATTRIBUTE_SERVICE_ACCOUNT_ID, serviceAccountToken.getAccountId().asPrincipal())
)
);
}
public void testRecordingFailedAuthenticationMetrics() {
final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin();
final long initialNanoTime = randomLongBetween(0, 100);
final TestNanoTimeSupplier nanoTimeSupplier = new TestNanoTimeSupplier(initialNanoTime);
final ServiceAccountService serviceAccountService = mock(ServiceAccountService.class);
final String nodeName = randomAlphaOfLengthBetween(3, 8);
final ServiceAccountAuthenticator serviceAccountAuthenticator = new ServiceAccountAuthenticator(
serviceAccountService,
nodeName,
telemetryPlugin.getTelemetryProvider(Settings.EMPTY).getMeterRegistry(),
nanoTimeSupplier
);
final ServiceAccountToken serviceAccountToken = randomServiceAccountToken();
final Authenticator.Context context = mockServiceAccountAuthenticatorContext(serviceAccountToken);
var failureError = new ElasticsearchSecurityException("failed to authenticate test service account", RestStatus.UNAUTHORIZED);
when(context.getRequest().exceptionProcessingRequest(same(failureError), any())).thenReturn(failureError);
final long executionTimeInNanos = randomLongBetween(0, 500);
doAnswer(invocation -> {
nanoTimeSupplier.advanceTime(executionTimeInNanos);
final ActionListener<Authentication> listener = invocation.getArgument(2);
listener.onFailure(failureError);
return Void.TYPE;
}).when(serviceAccountService).authenticateToken(same(serviceAccountToken), same(nodeName), anyActionListener());
final PlainActionFuture<AuthenticationResult<Authentication>> future = new PlainActionFuture<>();
serviceAccountAuthenticator.authenticate(context, future);
var e = expectThrows(ElasticsearchSecurityException.class, future::actionGet);
assertThat(e, sameInstance(failureError));
// verify we recorded failure metric
assertSingleFailedAuthMetric(
telemetryPlugin,
SecurityMetricType.AUTHC_SERVICE_ACCOUNT,
Map.ofEntries(
Map.entry(ServiceAccountAuthenticator.ATTRIBUTE_SERVICE_ACCOUNT_ID, serviceAccountToken.getAccountId().asPrincipal())
)
);
// verify that there were no successes recorded
assertZeroSuccessAuthMetrics(telemetryPlugin, SecurityMetricType.AUTHC_SERVICE_ACCOUNT);
// verify we recorded authentication time
assertAuthenticationTimeMetric(
telemetryPlugin,
SecurityMetricType.AUTHC_SERVICE_ACCOUNT,
executionTimeInNanos,
Map.ofEntries(
Map.entry(ServiceAccountAuthenticator.ATTRIBUTE_SERVICE_ACCOUNT_ID, serviceAccountToken.getAccountId().asPrincipal())
)
);
}
private static ServiceAccountToken randomServiceAccountToken() {
return ServiceAccountToken.newToken(
new ServiceAccountId(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8)),
randomAlphaOfLengthBetween(3, 8)
);
}
private Authenticator.Context mockServiceAccountAuthenticatorContext(ServiceAccountToken token) {
final Authenticator.Context context = mock(Authenticator.Context.class);
when(context.getMostRecentAuthenticationToken()).thenReturn(token);
when(context.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
final AuthenticationService.AuditableRequest auditableRequest = mock(AuthenticationService.AuditableRequest.class);
when(context.getRequest()).thenReturn(auditableRequest);
return context;
}
}
|
ServiceAccountAuthenticatorTests
|
java
|
apache__camel
|
components/camel-aws/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/decorators/messaging/AmqpSegmentDecorator.java
|
{
"start": 877,
"end": 1029
}
|
class ____ extends AbstractMessagingSegmentDecorator {
@Override
public String getComponent() {
return "amqp";
}
}
|
AmqpSegmentDecorator
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/TestTypeNames.java
|
{
"start": 1062,
"end": 1130
}
|
class ____ extends Base1616 { }
@JsonTypeName("B")
static
|
A1616
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/classrealm/ClassRealmRequest.java
|
{
"start": 1704,
"end": 2461
}
|
class ____ or {@code null} if using the default parent.
*/
ClassLoader getParent();
/**
* @deprecated Use {@link #getParentImports()} instead.
* @return imports
*/
@Deprecated
List<String> getImports();
/**
* Gets the packages/types to import from the parent realm.
*
* @return The modifiable list of packages/types to import from the parent realm, never {@code null}.
*/
List<String> getParentImports();
/**
* Gets the packages/types to import from foreign realms.
*
* @return The modifiable map of packages/types to import from foreign realms, never {@code null}.
*/
Map<String, ClassLoader> getForeignImports();
/**
* Gets the constituents for the
|
realm
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/param/MySqlParameterizedOutputVisitorTest_3.java
|
{
"start": 787,
"end": 1440
}
|
class ____ extends com.alibaba.druid.bvt.sql.mysql.param.MySQLParameterizedTest {
protected void setUp() throws Exception {
System.setProperty("fastsql.parameterized.shardingSupport", "false");
}
protected void tearDown() throws Exception {
System.clearProperty("fastsql.parameterized.shardingSupport");
}
public void test_0() throws Exception {
String sql = "delete from alerts where not (exists (select metric1_.id from metrics metric1_ where id=alerts.metric_id))";
assertSame(ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL), sql);
}
}
|
MySqlParameterizedOutputVisitorTest_3
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionUtils.java
|
{
"start": 670,
"end": 1464
}
|
class ____ {
/**
* Allows to directly call
* {@link TransportMasterNodeAction#masterOperation(org.elasticsearch.tasks.Task,MasterNodeRequest, ClusterState, ActionListener)}
* which is a protected method.
*/
public static <Request extends MasterNodeRequest<Request>, Response extends ActionResponse> void runMasterOperation(
TransportMasterNodeAction<Request, Response> masterNodeAction,
Request request,
ClusterState clusterState,
ActionListener<Response> actionListener
) throws Exception {
assert masterNodeAction.checkBlock(request, clusterState) == null;
// TODO: pass through task here?
masterNodeAction.masterOperation(null, request, clusterState, actionListener);
}
}
|
TransportMasterNodeActionUtils
|
java
|
playframework__playframework
|
cache/play-ehcache/src/main/java/play/cache/ehcache/EhCacheComponents.java
|
{
"start": 693,
"end": 898
}
|
class ____ extends BuiltInComponentsFromContext implements EhCacheComponents {
*
* public MyComponents(ApplicationLoader.Context context) {
* super(context);
* }
*
* // A service
|
MyComponents
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/test/condition/EnabledOnCommand.java
|
{
"start": 195,
"end": 316
}
|
class ____ test method is only <em>enabled</em>if the
* specified command is available.
*
* <p/>
* When applied at the
|
or
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/console/subpackage/FailingTestCase.java
|
{
"start": 460,
"end": 561
}
|
class ____ {
@Test
void first() {
fail();
}
@Test
void second() {
fail();
}
}
|
FailingTestCase
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueAggregatorJob.java
|
{
"start": 3993,
"end": 8459
}
|
class ____ {
public static JobControl createValueAggregatorJobs(String args[],
Class<? extends ValueAggregatorDescriptor>[] descriptors)
throws IOException {
JobControl theControl = new JobControl("ValueAggregatorJobs");
ArrayList<ControlledJob> dependingJobs = new ArrayList<ControlledJob>();
Configuration conf = new Configuration();
if (descriptors != null) {
conf = setAggregatorDescriptors(descriptors);
}
Job job = createValueAggregatorJob(conf, args);
ControlledJob cjob = new ControlledJob(job, dependingJobs);
theControl.addJob(cjob);
return theControl;
}
public static JobControl createValueAggregatorJobs(String args[])
throws IOException {
return createValueAggregatorJobs(args, null);
}
/**
* Create an Aggregate based map/reduce job.
*
* @param conf The configuration for job
* @param args the arguments used for job creation. Generic hadoop
* arguments are accepted.
* @return a Job object ready for submission.
*
* @throws IOException
* @see GenericOptionsParser
*/
public static Job createValueAggregatorJob(Configuration conf, String args[])
throws IOException {
GenericOptionsParser genericParser
= new GenericOptionsParser(conf, args);
args = genericParser.getRemainingArgs();
if (args.length < 2) {
System.out.println("usage: inputDirs outDir "
+ "[numOfReducer [textinputformat|seq [specfile [jobName]]]]");
GenericOptionsParser.printGenericCommandUsage(System.out);
System.exit(2);
}
String inputDir = args[0];
String outputDir = args[1];
int numOfReducers = 1;
if (args.length > 2) {
numOfReducers = Integer.parseInt(args[2]);
}
Class<? extends InputFormat> theInputFormat = null;
if (args.length > 3 &&
args[3].compareToIgnoreCase("textinputformat") == 0) {
theInputFormat = TextInputFormat.class;
} else {
theInputFormat = SequenceFileInputFormat.class;
}
Path specFile = null;
if (args.length > 4) {
specFile = new Path(args[4]);
}
String jobName = "";
if (args.length > 5) {
jobName = args[5];
}
if (specFile != null) {
conf.addResource(specFile);
}
String userJarFile = conf.get(ValueAggregatorJobBase.USER_JAR);
if (userJarFile != null) {
conf.set(MRJobConfig.JAR, userJarFile);
}
Job theJob = Job.getInstance(conf);
if (userJarFile == null) {
theJob.setJarByClass(ValueAggregator.class);
}
theJob.setJobName("ValueAggregatorJob: " + jobName);
FileInputFormat.addInputPaths(theJob, inputDir);
theJob.setInputFormatClass(theInputFormat);
theJob.setMapperClass(ValueAggregatorMapper.class);
FileOutputFormat.setOutputPath(theJob, new Path(outputDir));
theJob.setOutputFormatClass(TextOutputFormat.class);
theJob.setMapOutputKeyClass(Text.class);
theJob.setMapOutputValueClass(Text.class);
theJob.setOutputKeyClass(Text.class);
theJob.setOutputValueClass(Text.class);
theJob.setReducerClass(ValueAggregatorReducer.class);
theJob.setCombinerClass(ValueAggregatorCombiner.class);
theJob.setNumReduceTasks(numOfReducers);
return theJob;
}
public static Job createValueAggregatorJob(String args[],
Class<? extends ValueAggregatorDescriptor>[] descriptors)
throws IOException {
return createValueAggregatorJob(
setAggregatorDescriptors(descriptors), args);
}
public static Configuration setAggregatorDescriptors(
Class<? extends ValueAggregatorDescriptor>[] descriptors) {
Configuration conf = new Configuration();
conf.setInt(ValueAggregatorJobBase.DESCRIPTOR_NUM, descriptors.length);
//specify the aggregator descriptors
for(int i=0; i< descriptors.length; i++) {
conf.set(ValueAggregatorJobBase.DESCRIPTOR + "." + i,
"UserDefined," + descriptors[i].getName());
}
return conf;
}
/**
* create and run an Aggregate based map/reduce job.
*
* @param args the arguments used for job creation
* @throws IOException
*/
public static void main(String args[])
throws IOException, InterruptedException, ClassNotFoundException {
Job job = ValueAggregatorJob.createValueAggregatorJob(
new Configuration(), args);
int ret = job.waitForCompletion(true) ? 0 : 1;
System.exit(ret);
}
}
|
ValueAggregatorJob
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/basic/EmptyStringTest.java
|
{
"start": 964,
"end": 2170
}
|
class ____ {
private Integer emptyId = null;
private Integer nullId = null;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
scope.inTransaction( em -> {
StrTestEntity emptyEntity = new StrTestEntity( "" );
em.persist( emptyEntity );
StrTestEntity nullEntity = new StrTestEntity( null );
em.persist( nullEntity );
emptyId = emptyEntity.getId();
nullId = nullEntity.getId();
} );
scope.inTransaction( em -> {
// Should not generate revision after NULL to "" modification and vice versa on Oracle.
StrTestEntity emptyEntity = em.find( StrTestEntity.class, emptyId );
emptyEntity.setStr( null );
em.merge( emptyEntity );
StrTestEntity nullEntity = em.find( StrTestEntity.class, nullId );
nullEntity.setStr( "" );
em.merge( nullEntity );
} );
}
@Test
public void testRevisionsCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( StrTestEntity.class, emptyId ) );
assertEquals( Arrays.asList( 1 ), auditReader.getRevisions( StrTestEntity.class, nullId ) );
} );
}
}
|
EmptyStringTest
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/api/WrappedFixedKeyProcessorSupplier.java
|
{
"start": 1225,
"end": 1339
}
|
interface ____<KIn, VIn, VOut> extends FixedKeyProcessorSupplier<KIn, VIn, VOut> {
}
|
WrappedFixedKeyProcessorSupplier
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsRequestCallback.java
|
{
"start": 1096,
"end": 2081
}
|
class ____ implements RequestCallback {
private final S3AInputStreamStatistics statistics;
/**
* Create a new callback instance.
* @param statistics the statistics to update
*/
public AnalyticsRequestCallback(S3AInputStreamStatistics statistics) {
this.statistics = statistics;
}
@Override
public void onGetRequest() {
statistics.getRequestInitiated();
}
@Override
public void onHeadRequest() {
statistics.headRequestInitiated();
}
@Override
public void onBlockPrefetch(long start, long end) {
statistics.bytesPrefetched(end - start + 1);
}
@Override
public void footerParsingFailed() {
statistics.footerParsingFailed();
}
@Override
public void onReadVectored(int numIncomingRanges, int numCombinedRanges) {
statistics.readVectoredOperationStarted(numIncomingRanges, numCombinedRanges);
}
@Override
public void onCacheHit() {
statistics.streamReadCacheHit();
}
}
|
AnalyticsRequestCallback
|
java
|
apache__avro
|
lang/java/tools/src/test/compiler/output/NoSettersTest.java
|
{
"start": 6217,
"end": 13238
}
|
class ____ extends org.apache.avro.specific.SpecificRecordBuilderBase<NoSettersTest>
implements org.apache.avro.data.RecordBuilder<NoSettersTest> {
private java.lang.CharSequence name;
private java.lang.Integer favorite_number;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
* @param other The existing Builder to copy.
*/
private Builder(avro.examples.baseball.NoSettersTest.Builder other) {
super(other);
if (isValidValue(fields()[0], other.name)) {
this.name = data().deepCopy(fields()[0].schema(), other.name);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.favorite_number)) {
this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
}
/**
* Creates a Builder by copying an existing NoSettersTest instance
* @param other The existing instance to copy.
*/
private Builder(avro.examples.baseball.NoSettersTest other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.name)) {
this.name = data().deepCopy(fields()[0].schema(), other.name);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.favorite_number)) {
this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number);
fieldSetFlags()[1] = true;
}
}
/**
* Gets the value of the 'name' field.
* @return The value.
*/
public java.lang.CharSequence getName() {
return name;
}
/**
* Sets the value of the 'name' field.
* @param value The value of 'name'.
* @return This builder.
*/
public avro.examples.baseball.NoSettersTest.Builder setName(java.lang.CharSequence value) {
validate(fields()[0], value);
this.name = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'name' field has been set.
* @return True if the 'name' field has been set, false otherwise.
*/
public boolean hasName() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'name' field.
* @return This builder.
*/
public avro.examples.baseball.NoSettersTest.Builder clearName() {
name = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'favorite_number' field.
* @return The value.
*/
public java.lang.Integer getFavoriteNumber() {
return favorite_number;
}
/**
* Sets the value of the 'favorite_number' field.
* @param value The value of 'favorite_number'.
* @return This builder.
*/
public avro.examples.baseball.NoSettersTest.Builder setFavoriteNumber(java.lang.Integer value) {
validate(fields()[1], value);
this.favorite_number = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'favorite_number' field has been set.
* @return True if the 'favorite_number' field has been set, false otherwise.
*/
public boolean hasFavoriteNumber() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'favorite_number' field.
* @return This builder.
*/
public avro.examples.baseball.NoSettersTest.Builder clearFavoriteNumber() {
favorite_number = null;
fieldSetFlags()[1] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public NoSettersTest build() {
try {
NoSettersTest record = new NoSettersTest();
record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]);
record.favorite_number = fieldSetFlags()[1] ? this.favorite_number : (java.lang.Integer) defaultValue(fields()[1]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<NoSettersTest>
WRITER$ = (org.apache.avro.io.DatumWriter<NoSettersTest>)MODEL$.createDatumWriter(SCHEMA$);
@Override public void writeExternal(java.io.ObjectOutput out)
throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<NoSettersTest>
READER$ = (org.apache.avro.io.DatumReader<NoSettersTest>)MODEL$.createDatumReader(SCHEMA$);
@Override public void readExternal(java.io.ObjectInput in)
throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override protected boolean hasCustomCoders() { return true; }
@Override public void customEncode(org.apache.avro.io.Encoder out)
throws java.io.IOException
{
out.writeString(this.name);
if (this.favorite_number == null) {
out.writeIndex(1);
out.writeNull();
} else {
out.writeIndex(0);
out.writeInt(this.favorite_number);
}
}
@Override public void customDecode(org.apache.avro.io.ResolvingDecoder in)
throws java.io.IOException
{
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null);
if (in.readIndex() != 0) {
in.readNull();
this.favorite_number = null;
} else {
this.favorite_number = in.readInt();
}
} else {
for (int i = 0; i < 2; i++) {
switch (fieldOrder[i].pos()) {
case 0:
this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null);
break;
case 1:
if (in.readIndex() != 0) {
in.readNull();
this.favorite_number = null;
} else {
this.favorite_number = in.readInt();
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
@Override
public int hashCode() {
int result = 1;
result = 31 * result + (this.name == null ? 0 : this.name.hashCode());
result = 31 * result + (this.favorite_number == null ? 0 : this.favorite_number.hashCode());
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof NoSettersTest)) {
return false;
}
NoSettersTest other = (NoSettersTest) o;
if (Utf8.compareSequences(this.name, other.name) != 0) {
return false;
}
if (!java.util.Objects.equals(this.favorite_number, other.favorite_number)) {
return false;
}
return true;
}
}
|
Builder
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java
|
{
"start": 714,
"end": 3774
}
|
class ____ extends ESTestCase {
public void testLongs() throws IOException {
long[][] values = new long[between(3, 10)][];
for (int d = 0; d < values.length; d++) {
values[d] = new long[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)];
for (int i = 0; i < values[d].length; i++) {
values[d][i] = randomLong();
}
}
LongDocValuesField longField = wrap(values);
Longs longs = (Longs) longField.toScriptDocValues();
for (int round = 0; round < 10; round++) {
int d = between(0, values.length - 1);
longField.setNextDocId(d);
if (values[d].length > 0) {
assertEquals(values[d][0], longs.getValue());
assertEquals(values[d][0], (long) longs.get(0));
assertEquals(values[d][0], longField.get(Long.MIN_VALUE));
assertEquals(values[d][0], longField.get(0, Long.MIN_VALUE));
Exception e = expectThrows(IndexOutOfBoundsException.class, () -> { long l = longs.get(longs.size()); });
assertEquals("A document doesn't have a value for a field at position [" + longs.size() + "]!", e.getMessage());
} else {
Exception e = expectThrows(IllegalStateException.class, longs::getValue);
assertEquals(
"A document doesn't have a value for a field! "
+ "Use doc[<field>].size()==0 to check if a document is missing a field!",
e.getMessage()
);
e = expectThrows(IllegalStateException.class, () -> longs.get(0));
assertEquals(
"A document doesn't have a value for a field! "
+ "Use doc[<field>].size()==0 to check if a document is missing a field!",
e.getMessage()
);
}
assertEquals(values[d].length, longField.size());
for (int i = 0; i < values[d].length; i++) {
assertEquals(values[d][i], longs.get(i).longValue());
assertEquals(values[d][i], longField.get(i, Long.MIN_VALUE));
}
Exception e = expectThrows(UnsupportedOperationException.class, () -> longs.add(100L));
assertEquals("doc values are unmodifiable", e.getMessage());
}
}
private LongDocValuesField wrap(long[][] values) {
return new LongDocValuesField(new SortedNumericLongValues() {
long[] current;
int i;
@Override
public boolean advanceExact(int doc) {
i = 0;
current = values[doc];
return current.length > 0;
}
@Override
public int docValueCount() {
return current.length;
}
@Override
public long nextValue() {
return current[i++];
}
}, "test");
}
}
|
ScriptDocValuesLongsTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/metrics/TestNativeAzureFileSystemMetricsSystem.java
|
{
"start": 1245,
"end": 3783
}
|
class ____ {
private static final String WASB_FILES_CREATED = "wasb_files_created";
private static int getFilesCreated(AzureBlobStorageTestAccount testAccount) {
return testAccount.getLatestMetricValue(WASB_FILES_CREATED, 0).intValue();
}
/**
* Tests that when we have multiple file systems created/destroyed
* metrics from each are published correctly.
* @throws Exception on a failure
*/
@Test
public void testMetricsAcrossFileSystems()
throws Exception {
AzureBlobStorageTestAccount a1, a2, a3;
a1 = AzureBlobStorageTestAccount.createMock();
assertFilesCreated(a1, "a1", 0);
a2 = AzureBlobStorageTestAccount.createMock();
assertFilesCreated(a2, "a2", 0);
a1.getFileSystem().create(new Path("/foo")).close();
a1.getFileSystem().create(new Path("/bar")).close();
a2.getFileSystem().create(new Path("/baz")).close();
assertFilesCreated(a1, "a1", 0);
assertFilesCreated(a2, "a2", 0);
a1.closeFileSystem(); // Causes the file system to close, which publishes metrics
a2.closeFileSystem();
assertFilesCreated(a1, "a1", 2);
assertFilesCreated(a2, "a2", 1);
a3 = AzureBlobStorageTestAccount.createMock();
assertFilesCreated(a3, "a3", 0);
a3.closeFileSystem();
assertFilesCreated(a3, "a3", 0);
}
/**
* Assert that a specific number of files were created.
* @param account account to examine
* @param name account name (for exception text)
* @param expected expected value
*/
private void assertFilesCreated(AzureBlobStorageTestAccount account,
String name, int expected) {
assertEquals(expected, getFilesCreated(account),
"Files created in account " + name);
}
@Test
public void testMetricsSourceNames() {
String name1 = NativeAzureFileSystem.newMetricsSourceName();
String name2 = NativeAzureFileSystem.newMetricsSourceName();
assertTrue(name1.startsWith("AzureFileSystemMetrics"));
assertTrue(name2.startsWith("AzureFileSystemMetrics"));
assertTrue(!name1.equals(name2));
}
@Test
public void testSkipMetricsCollection() throws Exception {
AzureBlobStorageTestAccount a;
a = AzureBlobStorageTestAccount.createMock();
a.getFileSystem().getConf().setBoolean(
NativeAzureFileSystem.SKIP_AZURE_METRICS_PROPERTY_NAME, true);
a.getFileSystem().create(new Path("/foo")).close();
a.closeFileSystem(); // Causes the file system to close, which publishes metrics
assertFilesCreated(a, "a", 0);
}
}
|
TestNativeAzureFileSystemMetricsSystem
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/rsocket/service/RSocketExchangeBeanRegistrationAotProcessorTests.java
|
{
"start": 3035,
"end": 3097
}
|
interface ____ {
void notExchange();
}
|
NonAnnotatedInterface
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/ConstraintValidatorLocalesTest.java
|
{
"start": 1112,
"end": 1328
}
|
class ____ {
public MyBean(String name) {
super();
this.name = name;
}
@Pattern(regexp = "A.*", message = "{pattern.message}")
private String name;
}
}
|
MyBean
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java
|
{
"start": 30019,
"end": 30209
}
|
class ____ extends BaseParameterExtension<BeforeAllParameter> {
}
}
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
@ExtendWith(AfterAllParameter.Extension.class)
@
|
Extension
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 27084,
"end": 27810
}
|
interface ____ {",
" B b();",
"}");
CompilerTests.daggerCompiler(aFile, bFile, aComponentFile, bComponentFile)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(0);
subject.generatedSource(goldenFileRule.goldenSource("test/DaggerBComponent"));
});
}
@Test
public void componentWithNullableDependency() throws Exception {
Source bFile =
CompilerTests.javaSource(
"test.B",
"package test;",
"",
"import javax.inject.Inject;",
"import javax.inject.Provider;",
"",
"final
|
BComponent
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java
|
{
"start": 1533,
"end": 6931
}
|
class ____ extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(MockTransportService.TestPlugin.class);
}
@Override
protected boolean addMockHttpTransport() {
return false; // enable HTTP
}
public void testAbdicateAfterVotingConfigExclusionAdded() throws IOException {
internalCluster().setBootstrapMasterNodeIndex(0);
internalCluster().startNodes(2);
final String originalMaster = internalCluster().getMasterName();
final var restClient = getRestClient();
logger.info("--> excluding master node {}", originalMaster);
final var excludeRequest = new Request("POST", "/_cluster/voting_config_exclusions");
excludeRequest.addParameter("node_names", originalMaster);
assertEmptyResponse(restClient.performRequest(excludeRequest));
clusterAdmin().prepareHealth(TEST_REQUEST_TIMEOUT).setWaitForEvents(Priority.LANGUID).get();
assertNotEquals(originalMaster, internalCluster().getMasterName());
final var clearRequest = new Request("DELETE", "/_cluster/voting_config_exclusions");
clearRequest.addParameter("wait_for_removal", "false");
assertEmptyResponse(restClient.performRequest(clearRequest));
assertThat(
internalCluster().getInstance(ClusterService.class).state().metadata().coordinationMetadata().getVotingConfigExclusions(),
empty()
);
}
private void assertEmptyResponse(Response response) throws IOException {
assertEquals("text/plain; charset=UTF-8", response.getHeader("content-type"));
assertEquals(0, response.getEntity().getContentLength());
assertEquals(0, response.getEntity().getContent().readAllBytes().length);
}
public void testElectsNodeNotInVotingConfiguration() throws Exception {
internalCluster().setBootstrapMasterNodeIndex(0);
final List<String> nodeNames = internalCluster().startNodes(4);
// a 4-node cluster settles on a 3-node configuration; we then prevent the nodes in the configuration from winning an election
// by failing at the pre-voting stage, so that the extra node must be elected instead when the master shuts down. This extra node
// should then add itself into the voting configuration.
assertFalse(
internalCluster().client()
.admin()
.cluster()
.prepareHealth(TEST_REQUEST_TIMEOUT)
.setWaitForNodes("4")
.setWaitForEvents(Priority.LANGUID)
.get()
.isTimedOut()
);
String excludedNodeName = null;
final ClusterState clusterState = internalCluster().client()
.admin()
.cluster()
.prepareState(TEST_REQUEST_TIMEOUT)
.clear()
.setNodes(true)
.setMetadata(true)
.get()
.getState();
final Set<String> votingConfiguration = clusterState.getLastCommittedConfiguration().getNodeIds();
assertThat(votingConfiguration, hasSize(3));
assertThat(clusterState.nodes().getSize(), equalTo(4));
assertThat(votingConfiguration, hasItem(clusterState.nodes().getMasterNodeId()));
for (DiscoveryNode discoveryNode : clusterState.nodes()) {
if (votingConfiguration.contains(discoveryNode.getId()) == false) {
assertThat(excludedNodeName, nullValue());
excludedNodeName = discoveryNode.getName();
}
}
for (final String sender : nodeNames) {
if (sender.equals(excludedNodeName)) {
continue;
}
final var senderTransportService = MockTransportService.getInstance(sender);
for (final String receiver : nodeNames) {
senderTransportService.addSendBehavior(
internalCluster().getInstance(TransportService.class, receiver),
(connection, requestId, action, request, options) -> {
if (action.equals(StatefulPreVoteCollector.REQUEST_PRE_VOTE_ACTION_NAME)) {
throw new ElasticsearchException("rejected");
}
connection.sendRequest(requestId, action, request, options);
}
);
}
}
internalCluster().stopCurrentMasterNode();
assertFalse(
internalCluster().client()
.admin()
.cluster()
.prepareHealth(TEST_REQUEST_TIMEOUT)
.setWaitForNodes("3")
.setWaitForEvents(Priority.LANGUID)
.get()
.isTimedOut()
);
final ClusterState newClusterState = internalCluster().client()
.admin()
.cluster()
.prepareState(TEST_REQUEST_TIMEOUT)
.clear()
.setNodes(true)
.setMetadata(true)
.get()
.getState();
assertThat(newClusterState.nodes().getMasterNode().getName(), equalTo(excludedNodeName));
assertThat(newClusterState.getLastCommittedConfiguration().getNodeIds(), hasItem(newClusterState.nodes().getMasterNodeId()));
}
}
|
VotingConfigurationIT
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/TestExceptionCheckerTest.java
|
{
"start": 1380,
"end": 2017
}
|
class ____ {
@Test(expected = IOException.class, timeout = 0L)
public void test() throws Exception {
Path p = Paths.get("NOSUCH");
Files.readAllBytes(p);
Files.readAllBytes(p);
}
}
""")
.addOutputLines(
"out/ExceptionTest.java",
"""
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import java.io.IOException;
import java.nio.file.*;
import org.junit.Test;
|
ExceptionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/OverriddenSQLOrdersAnnotation.java
|
{
"start": 786,
"end": 1876
}
|
class ____
implements DialectOverride.SQLOrders, RepeatableContainer<DialectOverride.SQLOrder> {
private DialectOverride.SQLOrder[] value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public OverriddenSQLOrdersAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public OverriddenSQLOrdersAnnotation(SQLOrders annotation, ModelsContext modelContext) {
this.value = extractJdkValue( annotation, DIALECT_OVERRIDE_SQL_ORDERS, "value", modelContext );
}
/**
* Used in creating annotation instances from Jandex variant
*/
public OverriddenSQLOrdersAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (DialectOverride.SQLOrder[]) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return SQLOrders.class;
}
@Override
public DialectOverride.SQLOrder[] value() {
return value;
}
public void value(DialectOverride.SQLOrder[] value) {
this.value = value;
}
}
|
OverriddenSQLOrdersAnnotation
|
java
|
apache__spark
|
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExecutorDiskUtils.java
|
{
"start": 923,
"end": 1794
}
|
class ____ {
/**
* Hashes a filename into the corresponding local directory, in a manner consistent with
* Spark's DiskBlockManager.getFile().
*/
public static String getFilePath(String[] localDirs, int subDirsPerLocalDir, String filename) {
int hash = JavaUtils.nonNegativeHash(filename);
String localDir = localDirs[hash % localDirs.length];
int subDirId = (hash / localDirs.length) % subDirsPerLocalDir;
final String notNormalizedPath =
localDir + File.separator + String.format("%02x", subDirId) + File.separator + filename;
// Interning the normalized path as according to measurements, in some scenarios such
// duplicate strings may waste a lot of memory (~ 10% of the heap).
// Unfortunately, we cannot just call the normalization code that java.io.File
// uses, since it is in the package-private
|
ExecutorDiskUtils
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/util/context/Context4Test.java
|
{
"start": 1071,
"end": 13548
}
|
class ____ {
Context4 c = new Context4(1, "A", 2, "B", 3, "C", 4, "D");
@Test
public void replaceKey1NewContext() throws Exception {
Context put = c.put(1, "foo");
assertThat(put)
.isInstanceOf(Context4.class)
.isNotSameAs(c);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactly(1, 2, 3, 4);
assertThat(put.stream().map(Map.Entry::getValue))
.containsExactly("foo", "B", "C", "D");
}
@Test
public void replaceKey2NewContext() {
Context put = c.put(2, "foo");
assertThat(put)
.isInstanceOf(Context4.class)
.isNotSameAs(c);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactly(1, 2, 3, 4);
assertThat(put.stream().map(Map.Entry::getValue))
.containsExactly("A", "foo", "C", "D");
}
@Test
public void replaceKey3NewContext() {
Context put = c.put(3, "foo");
assertThat(put)
.isInstanceOf(Context4.class)
.isNotSameAs(c);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactly(1, 2, 3, 4);
assertThat(put.stream().map(Map.Entry::getValue))
.containsExactly("A", "B", "foo", "D");
}
@Test
public void replaceKey4NewContext() {
Context put = c.put(4, "foo");
assertThat(put)
.isInstanceOf(Context4.class)
.isNotSameAs(c);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactly(1, 2, 3, 4);
assertThat(put.stream().map(Map.Entry::getValue))
.containsExactly("A", "B", "C", "foo");
}
@Test
public void putDifferentKeyContext5() throws Exception {
Context put = c.put(5, "Abis");
assertThat(put)
.isInstanceOf(Context5.class);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactly(1, 2, 3, 4, 5);
assertThat(put.stream().map(Map.Entry::getValue))
.containsExactly("A", "B", "C", "D", "Abis");
}
@Test
public void hasKey() throws Exception {
assertThat(c.hasKey(1)).as("hasKey(1)").isTrue();
assertThat(c.hasKey(2)).as("hasKey(2)").isTrue();
assertThat(c.hasKey(3)).as("hasKey(3)").isTrue();
assertThat(c.hasKey(4)).as("hasKey(4)").isTrue();
assertThat(c.hasKey(5)).as("hasKey(5)").isFalse();
}
@Test
public void removeKeys() {
assertThat(c.delete(1))
.as("delete(1)")
.isInstanceOf(Context3.class)
.has(keyValue(2, "B"))
.has(keyValue(3, "C"))
.has(keyValue(4, "D"))
.doesNotHave(key(1));
assertThat(c.delete(2))
.as("delete(2)")
.isInstanceOf(Context3.class)
.has(keyValue(1, "A"))
.has(keyValue(3, "C"))
.has(keyValue(4, "D"))
.doesNotHave(key(2));
assertThat(c.delete(3))
.as("delete(3)")
.isInstanceOf(Context3.class)
.has(keyValue(1, "A"))
.has(keyValue(2, "B"))
.has(keyValue(4, "D"))
.doesNotHave(key(3));
assertThat(c.delete(4))
.as("delete(4)")
.isInstanceOf(Context3.class)
.has(keyValue(1, "A"))
.has(keyValue(2, "B"))
.has(keyValue(3, "C"))
.doesNotHave(key(4));
assertThat(c.delete(5)).isSameAs(c);
}
@Test
public void get() {
assertThat((String) c.get(1)).isEqualTo("A");
assertThat((String) c.get(2)).isEqualTo("B");
assertThat((String) c.get(3)).isEqualTo("C");
assertThat((String) c.get(4)).isEqualTo("D");
}
@Test
public void getUnknown() throws Exception {
assertThatExceptionOfType(NoSuchElementException.class)
.isThrownBy(() -> c.get(5))
.withMessage("Context does not contain key: 5");
}
@Test
public void getUnknownWithDefault() throws Exception {
assertThat(c.getOrDefault("peeka", "boo")).isEqualTo("boo");
}
@Test
public void getUnknownWithDefaultNull() throws Exception {
Object def = null;
assertThat(c.getOrDefault("peeka", def)).isNull();
}
@Test
public void stream() throws Exception {
assertThat(c.stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
.hasSize(4)
.containsOnlyKeys(1, 2, 3, 4)
.containsValues("A", "B", "C", "D");
}
@Test
void forEach() {
Map<Object, Object> items = new HashMap<>();
c.forEach(items::put);
assertThat(items)
.hasSize(4)
.containsEntry(1, "A")
.containsEntry(2, "B")
.containsEntry(3, "C")
.containsEntry(4, "D");
}
@Test
void forEachThrows() {
Map<Object, Object> items = new HashMap<>();
BiConsumer<Object, Object> action = (key, value) -> {
if (key.equals(2)) {
throw new RuntimeException("Boom!");
}
items.put(key, value);
};
assertThatExceptionOfType(RuntimeException.class)
.isThrownBy(() -> c.forEach(action))
.withMessage("Boom!");
assertThat(items)
.hasSize(1)
.containsOnlyKeys(1)
.containsValues("A");
}
@Test
public void string() throws Exception {
assertThat(c.toString()).isEqualTo("Context4{1=A, 2=B, 3=C, 4=D}");
}
@Test
public void ofApi() {
assertThat(Context.of("test", 12, "value", true, 123, 456L, true, false))
.isInstanceOf(Context4.class)
.hasToString("Context4{test=12, value=true, 123=456, true=false}");
}
@Test
public void putAllOf() {
Context m = Context.of("A", 1, "B", 2, "C", 3);
Context put = c.putAll(m.readOnly());
assertThat(put).isInstanceOf(ContextN.class);
assertThat(put.stream().map(Map.Entry::getKey))
.containsExactlyInAnyOrder(1, 2, 3, 4, "A", "B", "C");
}
@Test
public void putAllReplaces() {
Context m = Context.of(c.key1, "replaced", "A", 1);
Context put = c.putAll(m.readOnly());
assertThat(put).isInstanceOf(Context5.class)
.hasToString("Context5{1=replaced, 2=B, 3=C, 4=D, A=1}");
}
@Test
public void putAllOfEmpty() {
Context m = Context.empty();
Context put = c.putAll(m.readOnly());
assertThat(put).isSameAs(c);
}
@Test
public void putNonNullWithNull() {
Context put = c.putNonNull("putNonNull", null);
assertThat(put).isSameAs(c);
}
@Test
public void putNonNullWithValue() {
Context put = c.putNonNull("putNonNull", "value");
assertThat(put.getOrEmpty("putNonNull")).contains("value");
}
@Test
public void size() {
assertThat(c.size()).isEqualTo(4);
}
@Test
public void checkDuplicateKeysZeroOne() {
assertThatCode(Context4::checkKeys).as("zero").doesNotThrowAnyException();
assertThatCode(() -> Context4.checkKeys("one")).as("one").doesNotThrowAnyException();
}
@Test
public void checkNullKeysOne() {
assertThatNullPointerException()
.isThrownBy(() -> Context4.checkKeys((Object) null))
.withMessage("key1");
}
@Test
public void checkDuplicateKeysTwo() {
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 1))
.withMessage("Key #1 (1) is duplicated");
}
@Test
public void checkNullKeysTwo() {
assertThatNullPointerException().isThrownBy(() -> Context4.checkKeys("one", null))
.withMessage("key2");
}
@Test
public void checkDuplicateKeysThree() {
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 1, 3))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 1))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 2))
.withMessage("Key #2 (2) is duplicated");
}
@Test
public void checkNullKeysThree() {
assertThatNullPointerException()
.isThrownBy(() -> Context4.checkKeys("one", "two", null))
.withMessage("key3");
}
@Test
public void checkDuplicateKeysFour() {
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 1, 3, 4))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 1, 4))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 1))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 2, 4))
.withMessage("Key #2 (2) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 2))
.withMessage("Key #2 (2) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 3))
.withMessage("Key #3 (3) is duplicated");
}
@Test
public void checkNullKeysFour() {
assertThatNullPointerException()
.isThrownBy(() -> Context4.checkKeys("one", "two", "three", null))
.withMessage("key4");
}
@Test
public void checkDuplicateKeysFive() {
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 1, 3, 4, 5))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 1, 4, 5))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 1, 5))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 4, 1))
.withMessage("Key #1 (1) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 2, 4, 5))
.withMessage("Key #2 (2) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 2, 5))
.withMessage("Key #2 (2) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 4, 2))
.withMessage("Key #2 (2) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 3, 5))
.withMessage("Key #3 (3) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 4, 3))
.withMessage("Key #3 (3) is duplicated");
assertThatIllegalArgumentException()
.isThrownBy(() -> Context4.checkKeys(1, 2, 3, 4, 4))
.withMessage("Key #4 (4) is duplicated");
}
@Test
public void checkNullKeysFive() {
assertThatNullPointerException()
.isThrownBy(() -> Context4.checkKeys("one", "two", "three", "four", null))
.withMessage("key5");
}
@Test
public void putAllSelfIntoEmpty() {
CoreContext initial = new Context0();
Context result = ((CoreContext) c).putAllInto(initial);
assertThat(result).isNotSameAs(initial)
.isNotSameAs(c);
assertThat(result.stream()).containsExactlyElementsOf(c.stream().collect(Collectors.toList()));
}
@Test
public void putAllSelfIntoContextN() {
CoreContext initial = new ContextN(1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6);
Context4 self = new Context4("A", 1, "B", 2, "C", 3, "D", 4);
Context result = self.putAllInto(initial);
assertThat(result).isNotSameAs(initial)
.isNotSameAs(c);
assertThat(result.stream().map(String::valueOf))
.containsExactly("1=1", "2=2", "3=3", "4=4", "5=5", "6=6", "A=1", "B=2", "C=3", "D=4");
}
@Test
public void unsafePutAllIntoShouldReplace() {
ContextN ctx = new ContextN(Collections.emptyMap());
ctx.accept(1, "VALUE1");
ctx.accept(2, "VALUE2");
ctx.accept(3, "VALUE3");
ctx.accept(4, "VALUE4");
ctx.accept("extra", "value");
Context4 self = new Context4(1, "REPLACED1", 2, "REPLACED2",
3, "REPLACED3", 4, "REPLACED4");
self.unsafePutAllInto(ctx);
assertThat(ctx)
.containsEntry(1, "REPLACED1")
.containsEntry(2, "REPLACED2")
.containsEntry(3, "REPLACED3")
.containsEntry(4, "REPLACED4")
.containsEntry("extra", "value")
.hasSize(5);
}
@Test
void putAllMap() {
Map<Object, Object> map = new HashMap<>();
map.put("A", 1);
map.put("B", 2);
map.put("C", 3);
Context put = c.putAllMap(map);
assertThat(put).isInstanceOf(ContextN.class)
.hasToString("ContextN{1=A, 2=B, 3=C, 4=D, A=1, B=2, C=3}");
}
@Test
void putAllMapEmpty() {
Context put = c.putAllMap(Collections.emptyMap());
assertThat(put).isSameAs(c);
}
@Test
void putAllMapNullKey() {
assertThatExceptionOfType(NullPointerException.class)
.isThrownBy(() -> c.putAllMap(Collections.singletonMap(null, "oops")));
}
@Test
void putAllMapNullValue() {
assertThatExceptionOfType(NullPointerException.class)
.isThrownBy(() -> c.putAllMap(Collections.singletonMap("A", null)));
}
@Test
void putAllMapReplaces() {
Map<Object, Object> map = new HashMap<>();
map.put(c.key1, "replaced");
map.put("A", 1);
Context put = c.putAllMap(map);
assertThat(put).isInstanceOf(Context5.class)
.hasToString("Context5{1=replaced, 2=B, 3=C, 4=D, A=1}");
}
}
|
Context4Test
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java
|
{
"start": 67706,
"end": 68099
}
|
class ____ implements BeanResolver {
@Override
public Object resolve(EvaluationContext context, String beanName) throws AccessException {
return switch (beanName) {
case "foo" -> "custard";
case "foo.bar" -> "trouble";
case "&foo" -> "foo factory";
case "goo" -> throw new AccessException("DONT ASK ME ABOUT GOO");
default -> null;
};
}
}
static
|
MyBeanResolver
|
java
|
reactor__reactor-core
|
reactor-test/src/main/java/reactor/test/StepVerifier.java
|
{
"start": 37467,
"end": 40466
}
|
interface ____<T> extends Step<T> {
/**
* Provide a {@link Predicate} that will turn this StepVerifier's subscribers into
* {@link reactor.core.Fuseable.ConditionalSubscriber} and drive the {@link reactor.core.Fuseable.ConditionalSubscriber#tryOnNext(Object)}
* behavior of these subscribers. Set to {@code null} to deactivate (the default).
*
* @param tryOnNextPredicate the {@link Predicate} to use for conditional tryOnNext path
* @return the verifier for final {@link #verify()} call
*/
default FirstStep<T> enableConditionalSupport(@Nullable Predicate<? super T> tryOnNextPredicate) {
if (tryOnNextPredicate != null) {
throw new UnsupportedOperationException("This implementation of StepVerifier doesn't support ConditionalSubscriber mode");
}
return this;
}
/**
* Expect the source {@link Publisher} to run with Reactor Fusion flow
* optimization. It will be requesting {@link Fuseable#ANY} fusion mode.
*
* @return this builder
*
* @see Fuseable
*/
Step<T> expectFusion();
/**
* Expect the source {@link Publisher} to run the requested Reactor Fusion mode
* from any of these modes :
* {@link Fuseable#NONE}, {@link Fuseable#SYNC}, {@link Fuseable#ASYNC},
* {@link Fuseable#ANY}, {@link Fuseable#THREAD_BARRIER}.
*
* @param requested the requested and expected fusion mode
*
* @return this builder
*
* @see Fuseable
*/
Step<T> expectFusion(int requested);
/**
* Expect the source {@link Publisher} to run with Reactor Fusion flow
* optimization.
* Expect the source {@link Publisher} to run the requested Reactor Fusion mode
* from any of these modes :
* {@link Fuseable#NONE}, {@link Fuseable#SYNC}, {@link Fuseable#ASYNC},
* {@link Fuseable#ANY}, {@link Fuseable#THREAD_BARRIER}.
*
* @param requested the requested fusion mode
* @param expected the expected fusion mode
*
* @return this builder
*
* @see Fuseable
*/
Step<T> expectFusion(int requested, int expected);
/**
* Expect the source {@link Publisher} to NOT run with Reactor Fusion flow
* optimization. It will check if publisher is {@link Fuseable} or
* subscription is a {@link reactor.core.Fuseable.QueueSubscription}.
*
* @return this builder
*
* @see Fuseable
*/
Step<T> expectNoFusionSupport();
/**
* Expect a {@link Subscription}.
* Effectively behave as the default implicit {@link Subscription} expectation.
*
* @return this builder
*
* @see Subscriber#onSubscribe(Subscription)
*/
Step<T> expectSubscription();
/**
* Expect a {@link Subscription} and evaluate with the given predicate.
*
* @param predicate the predicate to test on the received {@link Subscription}
*
* @return this builder
*
* @see Subscriber#onSubscribe(Subscription)
*/
Step<T> expectSubscriptionMatches(Predicate<? super Subscription> predicate);
}
/**
* Exposes post-verification state assertions.
*/
|
FirstStep
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestCombineSequenceFileInputFormat.java
|
{
"start": 4062,
"end": 5699
}
|
class ____ {
private final int start;
private final int end;
Range(int start, int end) {
this.start = start;
this.end = end;
}
@Override
public String toString() {
return "(" + start + ", " + end + ")";
}
}
private static Range[] createRanges(int length, int numFiles, Random random) {
// generate a number of files with various lengths
Range[] ranges = new Range[numFiles];
for (int i = 0; i < numFiles; i++) {
int start = i == 0 ? 0 : ranges[i-1].end;
int end = i == numFiles - 1 ?
length :
(length/numFiles)*(2*i + 1)/2 + random.nextInt(length/numFiles) + 1;
ranges[i] = new Range(start, end);
}
return ranges;
}
private static void createFiles(int length, int numFiles, Random random)
throws IOException {
Range[] ranges = createRanges(length, numFiles, random);
for (int i = 0; i < numFiles; i++) {
Path file = new Path(workDir, "test_" + i + ".seq");
// create a file with length entries
@SuppressWarnings("deprecation")
SequenceFile.Writer writer =
SequenceFile.createWriter(localFs, conf, file,
IntWritable.class, BytesWritable.class);
Range range = ranges[i];
try {
for (int j = range.start; j < range.end; j++) {
IntWritable key = new IntWritable(j);
byte[] data = new byte[random.nextInt(10)];
random.nextBytes(data);
BytesWritable value = new BytesWritable(data);
writer.append(key, value);
}
} finally {
writer.close();
}
}
}
}
|
Range
|
java
|
google__error-prone
|
check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java
|
{
"start": 3376,
"end": 3919
}
|
class ____ {
private void doIt() {
String s1 = "";
String s2 = "";
if (true)
// BUG: Diagnostic contains: [s1, s2]
String.format(s1 + s2);
String s3 = "";
}
}
""")
.doTest();
}
@Test
public void findAllIdentsLocalsOuterScope() {
CompilationTestHelper.newInstance(PrintIdents.class, getClass())
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/RawUrlWireTapDslTest.java
|
{
"start": 1039,
"end": 2442
}
|
class ____ extends BaseEndpointDslTest {
@Test
public void testFlow() throws Exception {
MockEndpoint m1 = getMockEndpoint("mock:m1");
m1.expectedMessageCount(1);
MockEndpoint m2 = getMockEndpoint("mock:m2");
m2.expectedMessageCount(1);
MockEndpoint m3 = getMockEndpoint("mock:m3");
m3.expectedMessageCount(1);
template.requestBodyAndHeader("direct:a", "Hello World", "size", 1, String.class);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new EndpointRouteBuilder() {
public void configure() {
from(direct("a"))
.recipientList(endpoints(mock("m1"), direct("b")))
.routingSlip(endpoints(mock("m2"), direct("c")))
.wireTap(seda("d").size("${header.size}")).dynamicUri(true)
.enrich(direct("e"))
.toD(mock("${header.next}"));
from(direct("b")).to(log("endpoint.b"));
from(direct("c")).to(log("endpoint.c"));
from(seda("d?size=1")).to(log("endpoint.d"));
from(direct("e"))
.setBody(constant("body"))
.setHeader("next", constant("m3"));
}
};
}
}
|
RawUrlWireTapDslTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FieldCanBeFinalTest.java
|
{
"start": 9526,
"end": 9915
}
|
class ____ {
@VisibleForTesting public int x;
Test() {
x = 42;
}
}
""")
.doTest();
}
@Test
public void protectedField() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.annotations.VisibleForTesting;
|
Test
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/litemode/AnnotatedConfigClassesWithoutAtConfigurationTests.java
|
{
"start": 2147,
"end": 3077
}
|
class ____ {
static final AtomicInteger enigmaCallCount = new AtomicInteger();
@Bean
String enigma() {
return "enigma #" + enigmaCallCount.incrementAndGet();
}
@Bean
LifecycleBean lifecycleBean() {
// The following call to enigma() literally invokes the local
// enigma() method, not a CGLIB proxied version, since these methods
// are essentially factory bean methods.
LifecycleBean bean = new LifecycleBean(enigma());
assertThat(bean.isInitialized()).isFalse();
return bean;
}
}
@Autowired
private String enigma;
@Autowired
private LifecycleBean lifecycleBean;
@Test
void testSPR_9051() {
assertThat(enigma).isNotNull();
assertThat(lifecycleBean).isNotNull();
assertThat(lifecycleBean.isInitialized()).isTrue();
Set<String> names = Set.of(enigma, lifecycleBean.getName());
assertThat(names).containsExactlyInAnyOrder("enigma #1", "enigma #2");
}
}
|
AnnotatedFactoryBeans
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/junitrunner/DeepStubbingWithJUnitRunnerTest.java
|
{
"start": 750,
"end": 862
}
|
class ____ {
void someMethod(Root root) {
root.getFoo().getBar();
}
}
|
SomeClass
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/where/hbm/EagerToManyWhereUseClassWhereTest.java
|
{
"start": 910,
"end": 5305
}
|
class ____ {
@AfterEach
void dropTestData(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@JiraKey( "HHH-13011" )
public void testAssociatedWhereClause(SessionFactoryScope factoryScope) {
var product = new Product();
var flowers = new Category();
flowers.setId( 1 );
flowers.setName( "flowers" );
flowers.setDescription( "FLOWERS" );
product.getCategoriesOneToMany().add( flowers );
product.getCategoriesWithDescOneToMany().add( flowers );
product.getCategoriesManyToMany().add( flowers );
product.getCategoriesWithDescManyToMany().add( flowers );
product.getCategoriesWithDescIdLt4ManyToMany().add( flowers );
var vegetables = new Category();
vegetables.setId( 2 );
vegetables.setName( "vegetables" );
vegetables.setDescription( "VEGETABLES" );
product.getCategoriesOneToMany().add( vegetables );
product.getCategoriesWithDescOneToMany().add( vegetables );
product.getCategoriesManyToMany().add( vegetables );
product.getCategoriesWithDescManyToMany().add( vegetables );
product.getCategoriesWithDescIdLt4ManyToMany().add( vegetables );
var dogs = new Category();
dogs.setId( 3 );
dogs.setName( "dogs" );
dogs.setDescription( null );
product.getCategoriesOneToMany().add( dogs );
product.getCategoriesWithDescOneToMany().add( dogs );
product.getCategoriesManyToMany().add( dogs );
product.getCategoriesWithDescManyToMany().add( dogs );
product.getCategoriesWithDescIdLt4ManyToMany().add( dogs );
var building = new Category();
building.setId( 4 );
building.setName( "building" );
building.setDescription( "BUILDING" );
product.getCategoriesOneToMany().add( building );
product.getCategoriesWithDescOneToMany().add( building );
product.getCategoriesManyToMany().add( building );
product.getCategoriesWithDescManyToMany().add( building );
product.getCategoriesWithDescIdLt4ManyToMany().add( building );
factoryScope.inTransaction( (session) -> {
session.persist( flowers );
session.persist( vegetables );
session.persist( dogs );
session.persist( building );
session.persist( product );
} );
factoryScope.inTransaction( (session) -> {
var p = session.find( Product.class, product.getId() );
assertNotNull( p );
assertEquals( 4, p.getCategoriesOneToMany().size() );
checkIds( p.getCategoriesOneToMany(), new Integer[] { 1, 2, 3, 4 } );
assertEquals( 3, p.getCategoriesWithDescOneToMany().size() );
checkIds( p.getCategoriesWithDescOneToMany(), new Integer[] { 1, 2, 4 } );
assertEquals( 4, p.getCategoriesManyToMany().size() );
checkIds( p.getCategoriesManyToMany(), new Integer[] { 1, 2, 3, 4 } );
assertEquals( 3, p.getCategoriesWithDescManyToMany().size() );
checkIds( p.getCategoriesWithDescManyToMany(), new Integer[] { 1, 2, 4 } );
assertEquals( 2, p.getCategoriesWithDescIdLt4ManyToMany().size() );
checkIds( p.getCategoriesWithDescIdLt4ManyToMany(), new Integer[] { 1, 2 } );
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, flowers.getId() );
assertNotNull( c );
c.setInactive( 1 );
} );
factoryScope.inTransaction( (session) -> {
var c = session.find( Category.class, flowers.getId() );
assertNull( c );
} );
factoryScope.inTransaction( (session) -> {
var p = session.find( Product.class, product.getId() );
assertNotNull( p );
assertEquals( 3, p.getCategoriesOneToMany().size() );
checkIds( p.getCategoriesOneToMany(), new Integer[] { 2, 3, 4 } );
assertEquals( 2, p.getCategoriesWithDescOneToMany().size() );
checkIds( p.getCategoriesWithDescOneToMany(), new Integer[] { 2, 4 } );
assertEquals( 3, p.getCategoriesManyToMany().size() );
checkIds( p.getCategoriesManyToMany(), new Integer[] { 2, 3, 4 } );
assertEquals( 2, p.getCategoriesWithDescManyToMany().size() );
checkIds( p.getCategoriesWithDescManyToMany(), new Integer[] { 2, 4 } );
assertEquals( 1, p.getCategoriesWithDescIdLt4ManyToMany().size() );
checkIds( p.getCategoriesWithDescIdLt4ManyToMany(), new Integer[] { 2 } );
} );
}
private void checkIds(Set<Category> categories, Integer[] expectedIds) {
final Set<Integer> expectedIdSet = new HashSet<>( Arrays.asList( expectedIds ) );
for ( Category category : categories ) {
expectedIdSet.remove( category.getId() );
}
assertTrue( expectedIdSet.isEmpty() );
}
}
|
EagerToManyWhereUseClassWhereTest
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/Constants.java
|
{
"start": 937,
"end": 5811
}
|
class ____ {
/**
* Maven home.
*
* @since 3.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_HOME = "maven.home";
/**
* Maven version.
*
* @since 3.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_VERSION = "maven.version";
/**
* Maven major version: contains the major segment of this Maven version.
*
* @since 4.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_VERSION_MAJOR = "maven.version.major";
/**
* Maven minor version: contains the minor segment of this Maven version.
*
* @since 4.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_VERSION_MINOR = "maven.version.minor";
/**
* Maven patch version: contains the patch segment of this Maven version.
*
* @since 4.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_VERSION_PATCH = "maven.version.patch";
/**
* Maven snapshot: contains "true" if this Maven is a snapshot version.
*
* @since 4.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_VERSION_SNAPSHOT = "maven.version.snapshot";
/**
* Maven build version: a human-readable string containing this Maven version, buildnumber, and time of its build.
*
* @since 3.0.0
*/
@Config(readOnly = true, source = Config.Source.SYSTEM_PROPERTIES)
public static final String MAVEN_BUILD_VERSION = "maven.build.version";
/**
* Maven installation configuration directory.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.home}/conf")
public static final String MAVEN_INSTALLATION_CONF = "maven.installation.conf";
/**
* Maven user configuration directory.
*
* @since 4.0.0
*/
@Config(defaultValue = "${user.home}/.m2")
public static final String MAVEN_USER_CONF = "maven.user.conf";
/**
* Maven project configuration directory.
*
* @since 4.0.0
*/
@Config(defaultValue = "${session.rootDirectory}/.mvn")
public static final String MAVEN_PROJECT_CONF = "maven.project.conf";
/**
* Maven local repository.
*
* @since 3.0.0
*/
@Config(defaultValue = "${maven.user.conf}/repository")
public static final String MAVEN_REPO_LOCAL = "maven.repo.local";
/**
* Maven central repository URL.
* The property will have the value of the <code>MAVEN_REPO_CENTRAL</code>
* environment variable if it is defined.
*
* @since 4.0.0
*/
@Config(defaultValue = "https://repo.maven.apache.org/maven2")
public static final String MAVEN_REPO_CENTRAL = "maven.repo.central";
/**
* Maven installation settings.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.installation.conf}/settings.xml")
public static final String MAVEN_INSTALLATION_SETTINGS = "maven.installation.settings";
/**
* Maven user settings.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.user.conf}/settings.xml")
public static final String MAVEN_USER_SETTINGS = "maven.user.settings";
/**
* Maven project settings.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.project.conf}/settings.xml")
public static final String MAVEN_PROJECT_SETTINGS = "maven.project.settings";
/**
* Maven installation extensions.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.installation.conf}/extensions.xml")
public static final String MAVEN_INSTALLATION_EXTENSIONS = "maven.installation.extensions";
/**
* Maven user extensions.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.user.conf}/extensions.xml")
public static final String MAVEN_USER_EXTENSIONS = "maven.user.extensions";
/**
* Maven project extensions.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.project.conf}/extensions.xml")
public static final String MAVEN_PROJECT_EXTENSIONS = "maven.project.extensions";
/**
* Maven installation toolchains.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.installation.conf}/toolchains.xml")
public static final String MAVEN_INSTALLATION_TOOLCHAINS = "maven.installation.toolchains";
/**
* Maven user toolchains.
*
* @since 4.0.0
*/
@Config(defaultValue = "${maven.user.conf}/toolchains.xml")
public static final String MAVEN_USER_TOOLCHAINS = "maven.user.toolchains";
/**
* Extensions
|
Constants
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java
|
{
"start": 4377,
"end": 6122
}
|
class ____ extends SequenceCriterion {
private final int ordinal;
private boolean unused = true;
TestCriterion(final int ordinal) {
super(
ordinal,
new BoxedQueryRequest(
() -> SearchSourceBuilder.searchSource()
// set a non-negative size
.size(10)
.query(matchAllQuery())
// pass the ordinal through terminate after
.terminateAfter(ordinal),
"timestamp",
emptyList(),
emptySet()
),
keyExtractors,
tsExtractor,
tbExtractor,
implicitTbExtractor,
false,
false
);
this.ordinal = ordinal;
}
int ordinal() {
return ordinal;
}
@Override
public int hashCode() {
return ordinal;
}
public boolean use() {
boolean u = unused;
unused = false;
return u;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SequenceSpecTests.TestCriterion other = (SequenceSpecTests.TestCriterion) obj;
return ordinal == other.ordinal;
}
@Override
public String toString() {
return format(null, "[{}] -> {}", ordinal, events.get(ordinal).values());
}
}
static
|
TestCriterion
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesHPAEndpointBuilderFactory.java
|
{
"start": 23045,
"end": 33237
}
|
interface ____
extends
EndpointProducerBuilder {
default AdvancedKubernetesHPAEndpointProducerBuilder advanced() {
return (AdvancedKubernetesHPAEndpointProducerBuilder) this;
}
/**
* The Kubernetes API Version to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param apiVersion the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder apiVersion(String apiVersion) {
doSetProperty("apiVersion", apiVersion);
return this;
}
/**
* The dns domain, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param dnsDomain the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder dnsDomain(String dnsDomain) {
doSetProperty("dnsDomain", dnsDomain);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option is a:
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: common
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder kubernetesClient(io.fabric8.kubernetes.client.KubernetesClient kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* Default KubernetesClient to use if provided.
*
* The option will be converted to a
* <code>io.fabric8.kubernetes.client.KubernetesClient</code> type.
*
* Group: common
*
* @param kubernetesClient the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder kubernetesClient(String kubernetesClient) {
doSetProperty("kubernetesClient", kubernetesClient);
return this;
}
/**
* The namespace.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param namespace the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder namespace(String namespace) {
doSetProperty("namespace", namespace);
return this;
}
/**
* The port name, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param portName the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder portName(String portName) {
doSetProperty("portName", portName);
return this;
}
/**
* The port protocol, used for ServiceCall EIP.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: tcp
* Group: common
*
* @param portProtocol the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder portProtocol(String portProtocol) {
doSetProperty("portProtocol", portProtocol);
return this;
}
/**
* Producer operation to do on Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param operation the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder operation(String operation) {
doSetProperty("operation", operation);
return this;
}
/**
* The CA Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertData the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder caCertData(String caCertData) {
doSetProperty("caCertData", caCertData);
return this;
}
/**
* The CA Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param caCertFile the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder caCertFile(String caCertFile) {
doSetProperty("caCertFile", caCertFile);
return this;
}
/**
* The Client Cert Data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertData the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientCertData(String clientCertData) {
doSetProperty("clientCertData", clientCertData);
return this;
}
/**
* The Client Cert File.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientCertFile the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientCertFile(String clientCertFile) {
doSetProperty("clientCertFile", clientCertFile);
return this;
}
/**
* The Key Algorithm used by the client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyAlgo the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientKeyAlgo(String clientKeyAlgo) {
doSetProperty("clientKeyAlgo", clientKeyAlgo);
return this;
}
/**
* The Client Key data.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyData the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientKeyData(String clientKeyData) {
doSetProperty("clientKeyData", clientKeyData);
return this;
}
/**
* The Client Key file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyFile the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientKeyFile(String clientKeyFile) {
doSetProperty("clientKeyFile", clientKeyFile);
return this;
}
/**
* The Client Key Passphrase.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param clientKeyPassphrase the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder clientKeyPassphrase(String clientKeyPassphrase) {
doSetProperty("clientKeyPassphrase", clientKeyPassphrase);
return this;
}
/**
* The Auth Token.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param oauthToken the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder oauthToken(String oauthToken) {
doSetProperty("oauthToken", oauthToken);
return this;
}
/**
* Password to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder trustCerts(Boolean trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Define if the certs we used are trusted anyway or not.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: security
*
* @param trustCerts the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder trustCerts(String trustCerts) {
doSetProperty("trustCerts", trustCerts);
return this;
}
/**
* Username to connect to Kubernetes.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default KubernetesHPAEndpointProducerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Kubernetes HPA component.
*/
public
|
KubernetesHPAEndpointProducerBuilder
|
java
|
spring-projects__spring-boot
|
module/spring-boot-graphql/src/test/java/org/springframework/boot/graphql/autoconfigure/GraphQlAutoConfigurationTests.java
|
{
"start": 14031,
"end": 14322
}
|
class ____ implements GraphQlSourceBuilderCustomizer {
public boolean applied;
@Override
public void customize(GraphQlSource.SchemaResourceBuilder builder) {
this.applied = true;
}
}
}
@Configuration(proxyBeanMethods = false)
static
|
CustomGraphQlSourceBuilderCustomizer
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/CapitalizeQueryHintsShuttle.java
|
{
"start": 1195,
"end": 3668
}
|
class ____ extends QueryHintsRelShuttle {
@Override
protected RelNode doVisit(RelNode node) {
Hintable hNode = (Hintable) node;
AtomicBoolean changed = new AtomicBoolean(false);
List<RelHint> hintsWithCapitalJoinHints =
hNode.getHints().stream()
.map(
hint -> {
String capitalHintName = hint.hintName.toUpperCase(Locale.ROOT);
if (!FlinkHints.isQueryHint(capitalHintName)
|| hint.hintName.equals(capitalHintName)) {
return hint;
}
changed.set(true);
if (JoinStrategy.isJoinStrategy(capitalHintName)) {
if (JoinStrategy.isLookupHint(hint.hintName)) {
return RelHint.builder(capitalHintName)
.hintOptions(hint.kvOptions)
.inheritPath(hint.inheritPath)
.build();
}
return RelHint.builder(capitalHintName)
.hintOptions(hint.listOptions)
.inheritPath(hint.inheritPath)
.build();
} else if (StateTtlHint.isStateTtlHint(hint.hintName)) {
return RelHint.builder(capitalHintName)
.hintOptions(hint.kvOptions)
.inheritPath(hint.inheritPath)
.build();
}
throw new IllegalStateException(
"Unknown hint: " + hint.hintName);
})
.collect(Collectors.toList());
if (changed.get()) {
return super.visit(hNode.withHints(hintsWithCapitalJoinHints));
} else {
return super.visit(node);
}
}
}
|
CapitalizeQueryHintsShuttle
|
java
|
apache__camel
|
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/config_maps/KubernetesConfigMapsConsumer.java
|
{
"start": 1762,
"end": 3190
}
|
class ____ extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(KubernetesConfigMapsConsumer.class);
private final Processor processor;
private ExecutorService executor;
private ConfigMapsConsumerTask configMapWatcher;
public KubernetesConfigMapsConsumer(AbstractKubernetesEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.processor = processor;
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
super.doStart();
executor = getEndpoint().createExecutor(this);
configMapWatcher = new ConfigMapsConsumerTask();
executor.submit(configMapWatcher);
}
@Override
protected void doStop() throws Exception {
super.doStop();
LOG.debug("Stopping Kubernetes ConfigMap Consumer");
if (executor != null) {
KubernetesHelper.close(configMapWatcher, configMapWatcher::getWatch);
if (getEndpoint() != null && getEndpoint().getCamelContext() != null) {
getEndpoint().getCamelContext().getExecutorServiceManager().shutdownNow(executor);
} else {
executor.shutdownNow();
}
}
executor = null;
}
|
KubernetesConfigMapsConsumer
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableFlatMap.java
|
{
"start": 2136,
"end": 15617
}
|
class ____<T, U> extends AtomicInteger implements Disposable, Observer<T> {
private static final long serialVersionUID = -2117620485640801370L;
final Observer<? super U> downstream;
final Function<? super T, ? extends ObservableSource<? extends U>> mapper;
final boolean delayErrors;
final int maxConcurrency;
final int bufferSize;
volatile SimplePlainQueue<U> queue;
volatile boolean done;
final AtomicThrowable errors = new AtomicThrowable();
volatile boolean disposed;
final AtomicReference<InnerObserver<?, ?>[]> observers;
static final InnerObserver<?, ?>[] EMPTY = new InnerObserver<?, ?>[0];
static final InnerObserver<?, ?>[] CANCELLED = new InnerObserver<?, ?>[0];
Disposable upstream;
long uniqueId;
int lastIndex;
Queue<ObservableSource<? extends U>> sources;
int wip;
MergeObserver(Observer<? super U> actual, Function<? super T, ? extends ObservableSource<? extends U>> mapper,
boolean delayErrors, int maxConcurrency, int bufferSize) {
this.downstream = actual;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
this.bufferSize = bufferSize;
if (maxConcurrency != Integer.MAX_VALUE) {
sources = new ArrayDeque<>(maxConcurrency);
}
this.observers = new AtomicReference<>(EMPTY);
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
// safeguard against misbehaving sources
if (done) {
return;
}
ObservableSource<? extends U> p;
try {
p = Objects.requireNonNull(mapper.apply(t), "The mapper returned a null ObservableSource");
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
upstream.dispose();
onError(e);
return;
}
if (maxConcurrency != Integer.MAX_VALUE) {
synchronized (this) {
if (wip == maxConcurrency) {
sources.offer(p);
return;
}
wip++;
}
}
subscribeInner(p);
}
@SuppressWarnings("unchecked")
void subscribeInner(ObservableSource<? extends U> p) {
for (;;) {
if (p instanceof Supplier) {
if (tryEmitScalar(((Supplier<? extends U>)p)) && maxConcurrency != Integer.MAX_VALUE) {
boolean empty = false;
synchronized (this) {
p = sources.poll();
if (p == null) {
wip--;
empty = true;
}
}
if (empty) {
drain();
break;
}
} else {
break;
}
} else {
InnerObserver<T, U> inner = new InnerObserver<>(this, uniqueId++);
if (addInner(inner)) {
p.subscribe(inner);
}
break;
}
}
}
boolean addInner(InnerObserver<T, U> inner) {
for (;;) {
InnerObserver<?, ?>[] a = observers.get();
if (a == CANCELLED) {
inner.dispose();
return false;
}
int n = a.length;
InnerObserver<?, ?>[] b = new InnerObserver[n + 1];
System.arraycopy(a, 0, b, 0, n);
b[n] = inner;
if (observers.compareAndSet(a, b)) {
return true;
}
}
}
void removeInner(InnerObserver<T, U> inner) {
for (;;) {
InnerObserver<?, ?>[] a = observers.get();
int n = a.length;
int j = -1;
for (int i = 0; i < n; i++) {
if (a[i] == inner) {
j = i;
break;
}
}
if (j < 0) {
return;
}
InnerObserver<?, ?>[] b;
if (n == 1) {
b = EMPTY;
} else {
b = new InnerObserver<?, ?>[n - 1];
System.arraycopy(a, 0, b, 0, j);
System.arraycopy(a, j + 1, b, j, n - j - 1);
}
if (observers.compareAndSet(a, b)) {
return;
}
}
}
boolean tryEmitScalar(Supplier<? extends U> value) {
U u;
try {
u = value.get();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
errors.tryAddThrowableOrReport(ex);
drain();
return true;
}
if (u == null) {
return true;
}
if (get() == 0 && compareAndSet(0, 1)) {
downstream.onNext(u);
if (decrementAndGet() == 0) {
return true;
}
} else {
SimplePlainQueue<U> q = queue;
if (q == null) {
if (maxConcurrency == Integer.MAX_VALUE) {
q = new SpscLinkedArrayQueue<>(bufferSize);
} else {
q = new SpscArrayQueue<>(maxConcurrency);
}
queue = q;
}
q.offer(u);
if (getAndIncrement() != 0) {
return false;
}
}
drainLoop();
return true;
}
void tryEmit(U value, InnerObserver<T, U> inner) {
if (get() == 0 && compareAndSet(0, 1)) {
downstream.onNext(value);
if (decrementAndGet() == 0) {
return;
}
} else {
SimpleQueue<U> q = inner.queue;
if (q == null) {
q = new SpscLinkedArrayQueue<>(bufferSize);
inner.queue = q;
}
q.offer(value);
if (getAndIncrement() != 0) {
return;
}
}
drainLoop();
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
if (errors.tryAddThrowableOrReport(t)) {
done = true;
drain();
}
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
drain();
}
@Override
public void dispose() {
disposed = true;
if (disposeAll()) {
errors.tryTerminateAndReport();
}
}
@Override
public boolean isDisposed() {
return disposed;
}
void drain() {
if (getAndIncrement() == 0) {
drainLoop();
}
}
void drainLoop() {
final Observer<? super U> child = this.downstream;
int missed = 1;
for (;;) {
if (checkTerminate()) {
return;
}
int innerCompleted = 0;
SimplePlainQueue<U> svq = queue;
if (svq != null) {
for (;;) {
if (checkTerminate()) {
return;
}
U o = svq.poll();
if (o == null) {
break;
}
child.onNext(o);
innerCompleted++;
}
}
if (innerCompleted != 0) {
if (maxConcurrency != Integer.MAX_VALUE) {
subscribeMore(innerCompleted);
innerCompleted = 0;
}
continue;
}
boolean d = done;
svq = queue;
InnerObserver<?, ?>[] inner = observers.get();
int n = inner.length;
int nSources = 0;
if (maxConcurrency != Integer.MAX_VALUE) {
synchronized (this) {
nSources = sources.size();
}
}
if (d && (svq == null || svq.isEmpty()) && n == 0 && nSources == 0) {
errors.tryTerminateConsumer(downstream);
return;
}
if (n != 0) {
int j = Math.min(n - 1, lastIndex);
sourceLoop:
for (int i = 0; i < n; i++) {
if (checkTerminate()) {
return;
}
@SuppressWarnings("unchecked")
InnerObserver<T, U> is = (InnerObserver<T, U>)inner[j];
SimpleQueue<U> q = is.queue;
if (q != null) {
for (;;) {
U o;
try {
o = q.poll();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
is.dispose();
errors.tryAddThrowableOrReport(ex);
if (checkTerminate()) {
return;
}
removeInner(is);
innerCompleted++;
j++;
if (j == n) {
j = 0;
}
continue sourceLoop;
}
if (o == null) {
break;
}
child.onNext(o);
if (checkTerminate()) {
return;
}
}
}
boolean innerDone = is.done;
SimpleQueue<U> innerQueue = is.queue;
if (innerDone && (innerQueue == null || innerQueue.isEmpty())) {
removeInner(is);
innerCompleted++;
}
j++;
if (j == n) {
j = 0;
}
}
lastIndex = j;
}
if (innerCompleted != 0) {
if (maxConcurrency != Integer.MAX_VALUE) {
subscribeMore(innerCompleted);
innerCompleted = 0;
}
continue;
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
void subscribeMore(int innerCompleted) {
while (innerCompleted-- != 0) {
ObservableSource<? extends U> p;
synchronized (this) {
p = sources.poll();
if (p == null) {
wip--;
continue;
}
}
subscribeInner(p);
}
}
boolean checkTerminate() {
if (disposed) {
return true;
}
Throwable e = errors.get();
if (!delayErrors && (e != null)) {
disposeAll();
errors.tryTerminateConsumer(downstream);
return true;
}
return false;
}
boolean disposeAll() {
upstream.dispose();
InnerObserver<?, ?>[] a = observers.getAndSet(CANCELLED);
if (a != CANCELLED) {
for (InnerObserver<?, ?> inner : a) {
inner.dispose();
}
return true;
}
return false;
}
}
static final
|
MergeObserver
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/builditem/RunTimeConfigurationDefaultBuildItem.java
|
{
"start": 232,
"end": 1235
}
|
class ____ extends MultiBuildItem {
private final String key;
private final String value;
/**
* Construct a new instance.
*
* @param key the configuration key (must not be {@code null} or empty)
* @param value the configuration value (must not be {@code null})
*/
public RunTimeConfigurationDefaultBuildItem(final String key, final String value) {
Assert.checkNotNullParam("key", key);
Assert.checkNotEmptyParam("key", key);
Assert.checkNotNullParam("value for key " + key, value);
this.key = key;
this.value = value;
}
/**
* Get the configuration key.
*
* @return the configuration key (not {@code null} or empty)
*/
public String getKey() {
return key;
}
/**
* Get the configuration value.
*
* @return the configuration value (must not be {@code null})
*/
public String getValue() {
return value;
}
}
|
RunTimeConfigurationDefaultBuildItem
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMultiRack.java
|
{
"start": 1693,
"end": 1747
}
|
class ____ clusters having multiple racks.
*/
public
|
with
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastBytesRefByTimestampGroupingAggregatorFunction.java
|
{
"start": 1181,
"end": 10893
}
|
class ____ implements GroupingAggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("timestamps", ElementType.LONG),
new IntermediateStateDesc("values", ElementType.BYTES_REF),
new IntermediateStateDesc("hasValues", ElementType.BOOLEAN) );
private final AllLastBytesRefByTimestampAggregator.GroupingState state;
private final List<Integer> channels;
private final DriverContext driverContext;
public AllLastBytesRefByTimestampGroupingAggregatorFunction(List<Integer> channels,
AllLastBytesRefByTimestampAggregator.GroupingState state, DriverContext driverContext) {
this.channels = channels;
this.state = state;
this.driverContext = driverContext;
}
public static AllLastBytesRefByTimestampGroupingAggregatorFunction create(List<Integer> channels,
DriverContext driverContext) {
return new AllLastBytesRefByTimestampGroupingAggregatorFunction(channels, AllLastBytesRefByTimestampAggregator.initGrouping(driverContext), driverContext);
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public GroupingAggregatorFunction.AddInput prepareProcessRawInputPage(SeenGroupIds seenGroupIds,
Page page) {
BytesRefBlock valueBlock = page.getBlock(channels.get(0));
LongBlock timestampBlock = page.getBlock(channels.get(1));
maybeEnableGroupIdTracking(seenGroupIds, valueBlock, timestampBlock);
return new GroupingAggregatorFunction.AddInput() {
@Override
public void add(int positionOffset, IntArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntBigArrayBlock groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void add(int positionOffset, IntVector groupIds) {
addRawInput(positionOffset, groupIds, valueBlock, timestampBlock);
}
@Override
public void close() {
}
};
}
private void addRawInput(int positionOffset, IntArrayBlock groups, BytesRefBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block timestampsUncast = page.getBlock(channels.get(0));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(1));
if (valuesUncast.areAllValuesNull()) {
return;
}
BytesRefBlock values = (BytesRefBlock) valuesUncast;
Block hasValuesUncast = page.getBlock(channels.get(2));
if (hasValuesUncast.areAllValuesNull()) {
return;
}
BooleanBlock hasValues = (BooleanBlock) hasValuesUncast;
assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount();
BytesRef valuesScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntBigArrayBlock groups, BytesRefBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int valuesPosition = groupPosition + positionOffset;
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock);
}
}
}
@Override
public void addIntermediateInput(int positionOffset, IntBigArrayBlock groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block timestampsUncast = page.getBlock(channels.get(0));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(1));
if (valuesUncast.areAllValuesNull()) {
return;
}
BytesRefBlock values = (BytesRefBlock) valuesUncast;
Block hasValuesUncast = page.getBlock(channels.get(2));
if (hasValuesUncast.areAllValuesNull()) {
return;
}
BooleanBlock hasValues = (BooleanBlock) hasValuesUncast;
assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount();
BytesRef valuesScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
if (groups.isNull(groupPosition)) {
continue;
}
int groupStart = groups.getFirstValueIndex(groupPosition);
int groupEnd = groupStart + groups.getValueCount(groupPosition);
for (int g = groupStart; g < groupEnd; g++) {
int groupId = groups.getInt(g);
int valuesPosition = groupPosition + positionOffset;
AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition);
}
}
}
private void addRawInput(int positionOffset, IntVector groups, BytesRefBlock valueBlock,
LongBlock timestampBlock) {
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int valuesPosition = groupPosition + positionOffset;
int groupId = groups.getInt(groupPosition);
AllLastBytesRefByTimestampAggregator.combine(state, groupId, valuesPosition, valueBlock, timestampBlock);
}
}
@Override
public void addIntermediateInput(int positionOffset, IntVector groups, Page page) {
state.enableGroupIdTracking(new SeenGroupIds.Empty());
assert channels.size() == intermediateBlockCount();
Block timestampsUncast = page.getBlock(channels.get(0));
if (timestampsUncast.areAllValuesNull()) {
return;
}
LongBlock timestamps = (LongBlock) timestampsUncast;
Block valuesUncast = page.getBlock(channels.get(1));
if (valuesUncast.areAllValuesNull()) {
return;
}
BytesRefBlock values = (BytesRefBlock) valuesUncast;
Block hasValuesUncast = page.getBlock(channels.get(2));
if (hasValuesUncast.areAllValuesNull()) {
return;
}
BooleanBlock hasValues = (BooleanBlock) hasValuesUncast;
assert timestamps.getPositionCount() == values.getPositionCount() && timestamps.getPositionCount() == hasValues.getPositionCount();
BytesRef valuesScratch = new BytesRef();
for (int groupPosition = 0; groupPosition < groups.getPositionCount(); groupPosition++) {
int groupId = groups.getInt(groupPosition);
int valuesPosition = groupPosition + positionOffset;
AllLastBytesRefByTimestampAggregator.combineIntermediate(state, groupId, timestamps, values, hasValues, valuesPosition);
}
}
private void maybeEnableGroupIdTracking(SeenGroupIds seenGroupIds, BytesRefBlock valueBlock,
LongBlock timestampBlock) {
if (valueBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
if (timestampBlock.mayHaveNulls()) {
state.enableGroupIdTracking(seenGroupIds);
}
}
@Override
public void selectedMayContainUnseenGroups(SeenGroupIds seenGroupIds) {
state.enableGroupIdTracking(seenGroupIds);
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) {
state.toIntermediate(blocks, offset, selected, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, IntVector selected,
GroupingAggregatorEvaluationContext ctx) {
blocks[offset] = AllLastBytesRefByTimestampAggregator.evaluateFinal(state, selected, ctx);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
AllLastBytesRefByTimestampGroupingAggregatorFunction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/dialect/unit/locktimeout/PostgreSQLLockTimeoutTest.java
|
{
"start": 636,
"end": 3081
}
|
class ____ {
private final Dialect dialect = new PostgreSQLDialect();
@Test
public void testLockTimeoutNoAliasNoTimeout() {
assertEquals(
" for share",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_READ ) )
);
assertEquals(
" for no key update",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_WRITE ) )
);
}
@Test
public void testLockTimeoutNoAliasNoWait() {
assertEquals(
" for share nowait",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_READ )
.setTimeout( NO_WAIT ) )
);
assertEquals(
" for no key update nowait",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_WRITE )
.setTimeout( NO_WAIT ) )
);
}
@Test
public void testLockTimeoutNoAliasSkipLocked() {
assertEquals(
" for share skip locked",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_READ )
.setTimeout( SKIP_LOCKED ) )
);
assertEquals(
" for no key update skip locked",
dialect.getForUpdateString( new LockOptions( LockMode.PESSIMISTIC_WRITE )
.setTimeout( SKIP_LOCKED ) )
);
}
@Test
public void testLockTimeoutAliasNoTimeout() {
String alias = "a";
assertEquals(
" for share of a",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_READ )
)
);
assertEquals(
" for no key update of a",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_WRITE )
)
);
}
@Test
public void testLockTimeoutAliasNoWait() {
String alias = "a";
assertEquals(
" for share of a nowait",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_READ ).setTimeout( NO_WAIT )
)
);
assertEquals(
" for no key update of a nowait",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_WRITE ).setTimeout( NO_WAIT )
)
);
}
@Test
public void testLockTimeoutAliasSkipLocked() {
String alias = "a";
assertEquals(
" for share of a skip locked",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_READ ).setTimeout( SKIP_LOCKED )
)
);
assertEquals(
" for no key update of a skip locked",
dialect.getForUpdateString(
alias,
new LockOptions( LockMode.PESSIMISTIC_WRITE ).setTimeout( SKIP_LOCKED )
)
);
}
}
|
PostgreSQLLockTimeoutTest
|
java
|
apache__camel
|
components/camel-mybatis/src/generated/java/org/apache/camel/component/mybatis/MyBatisBeanEndpointUriFactory.java
|
{
"start": 517,
"end": 2359
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":beanName:methodName";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(6);
props.add("beanName");
props.add("executorType");
props.add("inputHeader");
props.add("lazyStartProducer");
props.add("methodName");
props.add("outputHeader");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "mybatis-bean".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "beanName", null, true, copy);
uri = buildPathParameter(syntax, uri, "methodName", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
MyBatisBeanEndpointUriFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/embeddables/generics/GenericEmbeddedIdentifierTest.java
|
{
"start": 10879,
"end": 11006
}
|
class ____<O, E> extends GenericObject<EmbeddableKey<O, E>> {
}
@Entity( name = "UserAccessReport" )
public static
|
AccessReport
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/EmbeddableQuerySelectTest.java
|
{
"start": 5903,
"end": 5968
}
|
enum ____ {
FOO,
BAR,
}
@Embeddable
public static
|
AccountType
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/diagnostics/analyzer/NoSuchMethodFailureAnalyzer.java
|
{
"start": 5883,
"end": 6634
}
|
class ____ loaded from the following location:");
writer.println();
writer.printf(" %s%n", callerDescriptor.getTypeHierarchy().get(0).getLocation());
}
else {
writer.printf("The calling method's class, %s, was loaded from the following location:%n",
callerDescriptor.getClassName());
writer.println();
writer.printf(" %s%n", callerDescriptor.getCandidateLocations().get(0));
}
writer.println();
writer.printf("The called method's class, %s, is available from the following locations:%n",
calledDescriptor.getClassName());
writer.println();
for (URL candidate : calledDescriptor.getCandidateLocations()) {
writer.printf(" %s%n", candidate);
}
writer.println();
writer.println("The called method's
|
was
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 118102,
"end": 118192
}
|
class ____ {
MySpecialArg(String value) {
}
}
@Controller
private static
|
MySpecialArg
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoBuilderCompilationTest.java
|
{
"start": 20710,
"end": 21028
}
|
interface ____");
}
@Test
public void noMatchingConstructor() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoBuilder;",
"",
"public
|
Builder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregator.java
|
{
"start": 1265,
"end": 6044
}
|
class ____ extends MetricsAggregator {
private final GeoShapeValuesSource valuesSource;
private final boolean wrapLongitude;
private DoubleArray tops;
private DoubleArray bottoms;
private DoubleArray posLefts;
private DoubleArray posRights;
private DoubleArray negLefts;
private DoubleArray negRights;
public GeoShapeBoundsAggregator(
String name,
AggregationContext context,
Aggregator parent,
ValuesSourceConfig valuesSourceConfig,
boolean wrapLongitude,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
assert valuesSourceConfig.hasValues();
this.valuesSource = (GeoShapeValuesSource) valuesSourceConfig.getValuesSource();
this.wrapLongitude = wrapLongitude;
tops = bigArrays().newDoubleArray(1, false);
tops.fill(0, tops.size(), Double.NEGATIVE_INFINITY);
bottoms = bigArrays().newDoubleArray(1, false);
bottoms.fill(0, bottoms.size(), Double.POSITIVE_INFINITY);
posLefts = bigArrays().newDoubleArray(1, false);
posLefts.fill(0, posLefts.size(), Double.POSITIVE_INFINITY);
posRights = bigArrays().newDoubleArray(1, false);
posRights.fill(0, posRights.size(), Double.NEGATIVE_INFINITY);
negLefts = bigArrays().newDoubleArray(1, false);
negLefts.fill(0, negLefts.size(), Double.POSITIVE_INFINITY);
negRights = bigArrays().newDoubleArray(1, false);
negRights.fill(0, negRights.size(), Double.NEGATIVE_INFINITY);
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) {
final GeoShapeValues values = valuesSource.shapeValues(aggCtx.getLeafReaderContext());
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
maybeResize(bucket);
final GeoShapeValues.GeoShapeValue value = values.value();
final GeoShapeValues.BoundingBox bounds = value.boundingBox();
tops.set(bucket, Math.max(tops.get(bucket), bounds.top));
bottoms.set(bucket, Math.min(bottoms.get(bucket), bounds.bottom));
posLefts.set(bucket, Math.min(posLefts.get(bucket), bounds.posLeft));
posRights.set(bucket, Math.max(posRights.get(bucket), bounds.posRight));
negLefts.set(bucket, Math.min(negLefts.get(bucket), bounds.negLeft));
negRights.set(bucket, Math.max(negRights.get(bucket), bounds.negRight));
}
}
private void maybeResize(long bucket) {
if (bucket >= tops.size()) {
final long from = tops.size();
tops = bigArrays().grow(tops, bucket + 1);
tops.fill(from, tops.size(), Double.NEGATIVE_INFINITY);
bottoms = bigArrays().resize(bottoms, tops.size());
bottoms.fill(from, bottoms.size(), Double.POSITIVE_INFINITY);
posLefts = bigArrays().resize(posLefts, tops.size());
posLefts.fill(from, posLefts.size(), Double.POSITIVE_INFINITY);
posRights = bigArrays().resize(posRights, tops.size());
posRights.fill(from, posRights.size(), Double.NEGATIVE_INFINITY);
negLefts = bigArrays().resize(negLefts, tops.size());
negLefts.fill(from, negLefts.size(), Double.POSITIVE_INFINITY);
negRights = bigArrays().resize(negRights, tops.size());
negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY);
}
}
};
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
double top = tops.get(owningBucketOrdinal);
double bottom = bottoms.get(owningBucketOrdinal);
double posLeft = posLefts.get(owningBucketOrdinal);
double posRight = posRights.get(owningBucketOrdinal);
double negLeft = negLefts.get(owningBucketOrdinal);
double negRight = negRights.get(owningBucketOrdinal);
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, metadata());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return InternalGeoBounds.empty(name, wrapLongitude, metadata());
}
@Override
public void doClose() {
Releasables.close(tops, bottoms, posLefts, posRights, negLefts, negRights);
}
}
|
GeoShapeBoundsAggregator
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryParameterSetterFactory.java
|
{
"start": 8840,
"end": 10590
}
|
class ____ extends QueryParameterSetterFactory {
private final JpaParameters parameters;
private final boolean preferNamedParameters;
/**
* @param parameters must not be {@literal null}.
* @param preferNamedParameters whether to use named parameters.
*/
BasicQueryParameterSetterFactory(JpaParameters parameters, boolean preferNamedParameters) {
Assert.notNull(parameters, "JpaParameters must not be null");
this.parameters = parameters;
this.preferNamedParameters = preferNamedParameters;
}
@Override
public @Nullable QueryParameterSetter create(ParameterBinding binding, ParametrizedQuery query) {
Assert.notNull(binding, "Binding must not be null");
if (!(binding.getOrigin() instanceof MethodInvocationArgument mia)) {
return null;
}
BindingIdentifier identifier = mia.identifier();
JpaParameter parameter;
if (preferNamedParameters && identifier.hasName()) {
parameter = findParameterForBinding(parameters, identifier.getName());
} else if (identifier.hasPosition()) {
parameter = findParameterForBinding(parameters, identifier.getPosition() - 1);
} else {
// this can happen when a query uses parameters in ORDER BY and the COUNT query just needs to drop a binding.
parameter = null;
}
return parameter == null //
? QueryParameterSetter.NOOP //
: createSetter(values -> getValue(values, parameter), binding, parameter);
}
protected @Nullable Object getValue(JpaParametersParameterAccessor accessor, Parameter parameter) {
return accessor.getValue(parameter);
}
}
/**
* @author Jens Schauder
* @author Oliver Gierke
* @author Mark Paluch
* @see QueryParameterSetterFactory
*/
private static
|
BasicQueryParameterSetterFactory
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoBuilderCompilationTest.java
|
{
"start": 36069,
"end": 36633
}
|
interface ____<E> {",
" abstract Builder<E> param(E param);",
" abstract Baz<E> build();",
" }",
"}");
Compilation compilation =
javac().withProcessors(new AutoBuilderProcessor()).compile(javaFileObject);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
"[AutoBuilderTypeParams] Builder type parameters <E> must match type parameters <T> of"
+ " Baz(T param)")
.inFile(javaFileObject)
.onLineContaining("
|
Builder
|
java
|
grpc__grpc-java
|
binder/src/androidTest/java/io/grpc/binder/internal/BinderClientTransportTest.java
|
{
"start": 5920,
"end": 21988
}
|
class ____ {
final BinderClientTransportFactory.Builder factoryBuilder =
new BinderClientTransportFactory.Builder()
.setSourceContext(appContext)
.setScheduledExecutorPool(executorServicePool)
.setOffloadExecutorPool(offloadServicePool);
@CanIgnoreReturnValue
public BinderClientTransportBuilder setSecurityPolicy(SecurityPolicy securityPolicy) {
factoryBuilder.setSecurityPolicy(securityPolicy);
return this;
}
@CanIgnoreReturnValue
public BinderClientTransportBuilder setBinderDecorator(
OneWayBinderProxy.Decorator binderDecorator) {
factoryBuilder.setBinderDecorator(binderDecorator);
return this;
}
@CanIgnoreReturnValue
public BinderClientTransportBuilder setReadyTimeoutMillis(int timeoutMillis) {
factoryBuilder.setReadyTimeoutMillis(timeoutMillis);
return this;
}
@CanIgnoreReturnValue
public BinderClientTransportBuilder setPreAuthorizeServer(boolean preAuthorizeServer) {
factoryBuilder.setPreAuthorizeServers(preAuthorizeServer);
return this;
}
public BinderClientTransport build() {
return factoryBuilder
.buildClientTransportFactory()
.newClientTransport(serverAddress, new ClientTransportOptions(), null);
}
}
@After
public void tearDown() throws Exception {
blockingSecurityPolicy.provideNextCheckAuthorizationResult(Status.ABORTED);
transport.shutdownNow(Status.OK);
HostServices.awaitServiceShutdown();
shutdownAndTerminate(executorServicePool.getObject());
shutdownAndTerminate(offloadServicePool.getObject());
}
private static void shutdownAndTerminate(ExecutorService executorService)
throws InterruptedException {
executorService.shutdownNow();
if (!executorService.awaitTermination(TIMEOUT_SECONDS, SECONDS)) {
throw new AssertionError("executor failed to terminate promptly");
}
}
@Test
public void testShutdownBeforeStreamStart_b153326034() throws Exception {
transport = new BinderClientTransportBuilder().build();
startAndAwaitReady(transport, transportListener);
ClientStream stream =
transport.newStream(methodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
transport.shutdownNow(Status.UNKNOWN.withDescription("reasons"));
// This shouldn't throw an exception.
stream.start(streamListener);
}
@Test
public void testRequestWhileStreamIsWaitingOnCall_b154088869() throws Exception {
transport = new BinderClientTransportBuilder().build();
startAndAwaitReady(transport, transportListener);
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
stream.writeMessage(marshaller.stream(Empty.getDefaultInstance()));
stream.halfClose();
stream.request(3);
streamListener.readAndDiscardMessages(2);
// Without the fix, this loops forever.
stream.request(2);
}
@Test
public void testTransactionForDiscardedCall_b155244043() throws Exception {
transport = new BinderClientTransportBuilder().build();
startAndAwaitReady(transport, transportListener);
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
stream.writeMessage(marshaller.stream(Empty.getDefaultInstance()));
assertThat(transport.getOngoingCalls()).hasSize(1);
int callId = transport.getOngoingCalls().keySet().iterator().next();
stream.cancel(Status.UNKNOWN);
// Send a transaction to the no-longer present call ID. It should be silently ignored.
Parcel p = Parcel.obtain();
transport.handleTransaction(callId, p);
p.recycle();
}
@Test
public void testBadTransactionStreamThroughput_b163053382() throws Exception {
transport = new BinderClientTransportBuilder().build();
startAndAwaitReady(transport, transportListener);
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
stream.writeMessage(marshaller.stream(Empty.getDefaultInstance()));
stream.halfClose();
stream.request(1000);
// We should eventually see all messages despite receiving no more transactions from the server.
streamListener.readAndDiscardMessages(100);
}
@Test
public void testMessageProducerClosedAfterStream_b169313545() throws Exception {
transport = new BinderClientTransportBuilder().build();
startAndAwaitReady(transport, transportListener);
ClientStream stream =
transport.newStream(methodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
stream.writeMessage(marshaller.stream(Empty.getDefaultInstance()));
stream.halfClose();
stream.request(2);
// Wait until we receive the first message.
streamListener.awaitMessages();
// Now cancel the stream, forcing it to close.
stream.cancel(Status.CANCELLED);
// The message producer shouldn't throw an exception if we drain it now.
streamListener.drainMessages();
}
@Test
public void testNewStreamBeforeTransportReadyFails() throws Exception {
// Use a special SecurityPolicy that lets us act before the transport is setup/ready.
transport =
new BinderClientTransportBuilder().setSecurityPolicy(blockingSecurityPolicy).build();
transport.start(transportListener).run();
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
assertThat(streamListener.awaitClose().getCode()).isEqualTo(Code.INTERNAL);
// Unblock the SETUP_TRANSPORT handshake and make sure it becomes ready in the usual way.
blockingSecurityPolicy.provideNextCheckAuthorizationResult(Status.OK);
transportListener.awaitReady();
}
@Test
public void testTxnFailureDuringSetup() throws Exception {
BlockingBinderDecorator<ThrowingOneWayBinderProxy> decorator = new BlockingBinderDecorator<>();
transport = new BinderClientTransportBuilder().setBinderDecorator(decorator).build();
transport.start(transportListener).run();
ThrowingOneWayBinderProxy endpointBinder =
new ThrowingOneWayBinderProxy(decorator.takeNextRequest());
DeadObjectException doe = new DeadObjectException("ouch");
endpointBinder.setRemoteException(doe);
decorator.putNextResult(endpointBinder);
Status shutdownStatus = transportListener.awaitShutdown();
assertThat(shutdownStatus.getCode()).isEqualTo(Code.UNAVAILABLE);
assertThat(shutdownStatus.getCause()).isInstanceOf(RemoteException.class);
transportListener.awaitTermination();
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
Status streamStatus = streamListener.awaitClose();
assertThat(streamStatus.getCode()).isEqualTo(Code.UNAVAILABLE);
assertThat(streamStatus.getCause()).isSameInstanceAs(doe);
}
@Test
public void testTxnFailurePostSetup() throws Exception {
BlockingBinderDecorator<ThrowingOneWayBinderProxy> decorator = new BlockingBinderDecorator<>();
transport = new BinderClientTransportBuilder().setBinderDecorator(decorator).build();
transport.start(transportListener).run();
ThrowingOneWayBinderProxy endpointBinder =
new ThrowingOneWayBinderProxy(decorator.takeNextRequest());
decorator.putNextResult(endpointBinder);
ThrowingOneWayBinderProxy serverBinder =
new ThrowingOneWayBinderProxy(decorator.takeNextRequest());
DeadObjectException doe = new DeadObjectException("ouch");
serverBinder.setRemoteException(doe);
decorator.putNextResult(serverBinder);
transportListener.awaitReady();
ClientStream stream =
transport.newStream(streamingMethodDesc, new Metadata(), CallOptions.DEFAULT, tracers);
stream.start(streamListener);
stream.writeMessage(marshaller.stream(Empty.getDefaultInstance()));
stream.halfClose();
stream.request(1);
Status streamStatus = streamListener.awaitClose();
assertThat(streamStatus.getCode()).isEqualTo(Code.UNAVAILABLE);
assertThat(streamStatus.getCause()).isSameInstanceAs(doe);
}
@Test
public void testServerBinderDeadOnArrival() throws Exception {
BlockingBinderDecorator<OneWayBinderProxy> decorator = new BlockingBinderDecorator<>();
transport = new BinderClientTransportBuilder().setBinderDecorator(decorator).build();
transport.start(transportListener).run();
decorator.putNextResult(decorator.takeNextRequest()); // Server's "Endpoint" Binder.
OneWayBinderProxy unusedServerBinder = decorator.takeNextRequest();
decorator.putNextResult(
OneWayBinderProxy.wrap(new FakeDeadBinder(), offloadServicePool.getObject()));
Status clientStatus = transportListener.awaitShutdown();
assertThat(clientStatus.getCode()).isEqualTo(Code.UNAVAILABLE);
assertThat(clientStatus.getDescription()).contains("Failed to observe outgoing binder");
}
@Test
public void testBlackHoleEndpointConnectTimeout() throws Exception {
BlockingBinderDecorator<BlackHoleOneWayBinderProxy> decorator = new BlockingBinderDecorator<>();
transport =
new BinderClientTransportBuilder()
.setBinderDecorator(decorator)
.setReadyTimeoutMillis(1_234)
.build();
transport.start(transportListener).run();
BlackHoleOneWayBinderProxy endpointBinder =
new BlackHoleOneWayBinderProxy(decorator.takeNextRequest());
endpointBinder.dropAllTransactions(true);
decorator.putNextResult(endpointBinder);
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.DEADLINE_EXCEEDED);
assertThat(transportStatus.getDescription()).contains("1234");
transportListener.awaitTermination();
}
@Test
public void testBlackHoleSecurityPolicyAuthTimeout() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setSecurityPolicy(securityPolicy)
.setPreAuthorizeServer(false)
.setReadyTimeoutMillis(1_234)
.build();
transport.start(transportListener).run();
// Take the next authRequest but don't respond to it, in order to trigger the ready timeout.
AuthRequest authRequest = securityPolicy.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS);
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.DEADLINE_EXCEEDED);
assertThat(transportStatus.getDescription()).contains("1234");
transportListener.awaitTermination();
// If the transport gave up waiting on auth, it should cancel its request.
assertThat(authRequest.isCancelled()).isTrue();
}
@Test
public void testBlackHoleSecurityPolicyPreAuthTimeout() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setSecurityPolicy(securityPolicy)
.setPreAuthorizeServer(true)
.setReadyTimeoutMillis(1_234)
.build();
transport.start(transportListener).run();
// Take the next authRequest but don't respond to it, in order to trigger the ready timeout.
AuthRequest preAuthRequest = securityPolicy.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS);
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.DEADLINE_EXCEEDED);
assertThat(transportStatus.getDescription()).contains("1234");
transportListener.awaitTermination();
// If the transport gave up waiting on auth, it should cancel its request.
assertThat(preAuthRequest.isCancelled()).isTrue();
}
@Test
public void testAsyncSecurityPolicyAuthFailure() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setPreAuthorizeServer(false)
.setSecurityPolicy(securityPolicy)
.build();
RuntimeException exception = new NullPointerException();
transport.start(transportListener).run();
securityPolicy.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS).setResult(exception);
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.INTERNAL);
assertThat(transportStatus.getCause()).isEqualTo(exception);
transportListener.awaitTermination();
}
@Test
public void testAsyncSecurityPolicyPreAuthFailure() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setPreAuthorizeServer(true)
.setSecurityPolicy(securityPolicy)
.build();
RuntimeException exception = new NullPointerException();
transport.start(transportListener).run();
securityPolicy.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS).setResult(exception);
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.INTERNAL);
assertThat(transportStatus.getCause()).isEqualTo(exception);
transportListener.awaitTermination();
}
@Test
public void testAsyncSecurityPolicyAuthSuccess() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setPreAuthorizeServer(false)
.setSecurityPolicy(securityPolicy)
.build();
transport.start(transportListener).run();
securityPolicy
.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS)
.setResult(Status.PERMISSION_DENIED.withDescription("xyzzy"));
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.PERMISSION_DENIED);
assertThat(transportStatus.getDescription()).contains("xyzzy");
transportListener.awaitTermination();
}
@Test
public void testAsyncSecurityPolicyPreAuthSuccess() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport =
new BinderClientTransportBuilder()
.setPreAuthorizeServer(true)
.setSecurityPolicy(securityPolicy)
.build();
transport.start(transportListener).run();
securityPolicy
.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS)
.setResult(Status.PERMISSION_DENIED.withDescription("xyzzy"));
Status transportStatus = transportListener.awaitShutdown();
assertThat(transportStatus.getCode()).isEqualTo(Code.PERMISSION_DENIED);
assertThat(transportStatus.getDescription()).contains("xyzzy");
transportListener.awaitTermination();
}
@Test
public void testAsyncSecurityPolicyCancelledUponExternalTermination() throws Exception {
SettableAsyncSecurityPolicy securityPolicy = new SettableAsyncSecurityPolicy();
transport = new BinderClientTransportBuilder().setSecurityPolicy(securityPolicy).build();
transport.start(transportListener).run();
AuthRequest authRequest = securityPolicy.takeNextAuthRequest(TIMEOUT_SECONDS, SECONDS);
transport.shutdownNow(Status.UNAVAILABLE); // 'authRequest' remains unanswered!
transportListener.awaitShutdown();
transportListener.awaitTermination();
assertThat(authRequest.isCancelled()).isTrue();
}
private static void startAndAwaitReady(
BinderClientTransport transport, TestTransportListener transportListener) throws Exception {
transport.start(transportListener).run();
transportListener.awaitReady();
}
private static final
|
BinderClientTransportBuilder
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/UClassIdentTest.java
|
{
"start": 2676,
"end": 3284
}
|
class ____
inliner.addImport("package.subpackage.Foo.Bar");
// Will import "package.Foo"
assertInlines("Foo.Bar", UClassIdent.create("package.Foo.Bar"));
assertInlines("Bar", UClassIdent.create("package.subpackage.Foo.Bar"));
// Will not import "anotherPackage.Foo" due to conflicts
assertInlines("anotherPackage.Foo.Bar", UClassIdent.create("anotherPackage.Foo.Bar"));
new EqualsTester()
.addEqualityGroup(
inliner.getImportsToAdd(),
ImmutableSet.of("package.Exception", "package.subpackage.Foo.Bar", "package.Foo"))
.testEquals();
}
}
|
names
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/info/GitInfoContributorTests.java
|
{
"start": 1357,
"end": 3823
}
|
class ____ {
@Test
@SuppressWarnings("unchecked")
void coerceDate() {
Properties properties = new Properties();
properties.put("branch", "master");
properties.put("commit.time", "2016-03-04T14:36:33+0100");
GitInfoContributor contributor = new GitInfoContributor(new GitProperties(properties));
Map<String, Object> content = contributor.generateContent();
assertThat(content.get("commit")).isInstanceOf(Map.class);
Map<String, Object> commit = (Map<String, Object>) content.get("commit");
Object commitTime = commit.get("time");
assertThat(commitTime).isInstanceOf(Instant.class);
assertThat(((Instant) commitTime).toEpochMilli()).isEqualTo(1457098593000L);
}
@Test
@SuppressWarnings("unchecked")
void shortenCommitId() {
Properties properties = new Properties();
properties.put("branch", "master");
properties.put("commit.id", "8e29a0b0d423d2665c6ee5171947c101a5c15681");
GitInfoContributor contributor = new GitInfoContributor(new GitProperties(properties));
Map<String, Object> content = contributor.generateContent();
assertThat(content.get("commit")).isInstanceOf(Map.class);
Map<String, Object> commit = (Map<String, Object>) content.get("commit");
assertThat(commit).containsEntry("id", "8e29a0b");
}
@Test
@SuppressWarnings("unchecked")
void withGitIdAndAbbrev() {
// gh-11892
Properties properties = new Properties();
properties.put("branch", "master");
properties.put("commit.id", "1b3cec34f7ca0a021244452f2cae07a80497a7c7");
properties.put("commit.id.abbrev", "1b3cec3");
GitInfoContributor contributor = new GitInfoContributor(new GitProperties(properties), Mode.FULL);
Map<String, Object> content = contributor.generateContent();
Map<String, Object> commit = (Map<String, Object>) content.get("commit");
assertThat(commit).isNotNull();
assertThat(commit.get("id")).isInstanceOf(Map.class);
Map<String, Object> id = (Map<String, Object>) commit.get("id");
assertThat(id).containsEntry("full", "1b3cec34f7ca0a021244452f2cae07a80497a7c7");
assertThat(id).containsEntry("abbrev", "1b3cec3");
}
@Test
void shouldRegisterHints() {
RuntimeHints runtimeHints = new RuntimeHints();
new GitInfoContributorRuntimeHints().registerHints(runtimeHints, getClass().getClassLoader());
assertThat(RuntimeHintsPredicates.reflection()
.onType(GitProperties.class)
.withMemberCategories(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS)).accepts(runtimeHints);
}
}
|
GitInfoContributorTests
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/cdi/AddingSpanAttributesInterceptor.java
|
{
"start": 2035,
"end": 3077
}
|
class ____ implements ParameterAttributeNamesExtractor {
@Override
public String[] extract(final Method method, final Parameter[] parameters) {
String[] attributeNames = new String[parameters.length];
for (int i = 0; i < parameters.length; i++) {
attributeNames[i] = attributeName(parameters[i]);
}
return attributeNames;
}
private static String attributeName(Parameter parameter) {
String value = null;
SpanAttribute spanAttribute = parameter.getDeclaredAnnotation(SpanAttribute.class);
if (spanAttribute != null) {
value = spanAttribute.value();
} else {
return null;
}
if (!value.isEmpty()) {
return value;
} else if (parameter.isNamePresent()) {
return parameter.getName();
} else {
return null;
}
}
}
}
|
WithSpanParameterAttributeNamesExtractor
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/refaster/testdata/template/AutoboxingTemplate.java
|
{
"start": 1016,
"end": 1236
}
|
class ____ {
@BeforeTemplate
public <E> List<E> singletonList(E e) {
return Collections.singletonList(e);
}
@AfterTemplate
public <E> List<E> asList(E e) {
return Arrays.asList(e);
}
}
|
AutoboxingTemplate
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/support/OpenTest4JAndJUnit4AwareThrowableCollectorTests.java
|
{
"start": 2029,
"end": 3017
}
|
class ____.junit.internal.AssumptionViolatedException: " +
"only supporting org.opentest4j.TestAbortedException for aborted execution.");
// @formatter:on
});
}
@Test
void simulateHamcrestNotInTheClasspath(LogRecordListener listener) throws Throwable {
TestClassLoader classLoader = new TestClassLoader(false, true);
doWithCustomClassLoader(classLoader, () -> {
// Ensure that our custom ClassLoader actually throws a NoClassDefFoundError
// when attempting to load the AssumptionViolatedException class.
assertThrows(NoClassDefFoundError.class,
() -> ReflectionUtils.tryToLoadClass(AssumptionViolatedException.class.getName()).get());
Class<?> clazz = classLoader.loadClass(OpenTest4JAndJUnit4AwareThrowableCollector.class.getName());
assertNotNull(ReflectionUtils.newInstance(clazz));
// @formatter:off
assertThat(listener.stream(Level.FINE).map(LogRecord::getMessage).findFirst().orElse("<not found>"))
.isEqualTo(
"Failed to load
|
org
|
java
|
elastic__elasticsearch
|
x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java
|
{
"start": 1426,
"end": 11774
}
|
class ____ extends AbstractXpackFullClusterRestartTestCase {
// See PyTorchModelIT for how this model was created
static final String BASE_64_ENCODED_MODEL =
"UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWlpaWlpaWlpaWoACY19fdG9yY2hfXwp"
+ "TdXBlclNpbXBsZQpxACmBfShYCAAAAHRyYWluaW5ncQGIdWJxAi5QSwcIXOpBBDQAAAA0AAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAA"
+ "AAAAAdAEEAc2ltcGxlbW9kZWwvY29kZS9fX3RvcmNoX18ucHlGQj0AWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaW"
+ "lpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWnWOMWvDMBCF9/yKI5MMrnHTQsHgjt2aJdlCEIp9SgWSTpykFvfXV1htaYds0nfv473Jqhjh"
+ "kAPywbhgUbzSnC02wwZAyqBYOUzIUUoY4XRe6SVr/Q8lVsYbf4UBLkS2kBk1aOIPxbOIaPVQtEQ8vUnZ/WlrSxTA+JCTNHMc4Ig+Ele"
+ "s+Jod+iR3N/jDDf74wxu4e/5+DmtE9mUyhdgFNq7bZ3ekehbruC6aTxS/c1rom6Z698WrEfIYxcn4JGTftLA7tzCnJeD41IJVC+U07k"
+ "umUHw3E47Vqh+xnULeFisYLx064mV8UTZibWFMmX0p23wBUEsHCE0EGH3yAAAAlwEAAFBLAwQUAAgICAAAAAAAAAAAAAAAAAAAAAAAJ"
+ "wA5AHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYnVnX3BrbEZCNQBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpa"
+ "WlpaWlpaWlpaWlpaWlpaWlpaWlpaWrWST0+DMBiHW6bOod/BGS94kKpo2Mwyox5x3pbgiXSAFtdR/nQu3IwHiZ9oX88CaeGu9tL0efq"
+ "+v8P7fmiGA1wgTgoIcECZQqe6vmYD6G4hAJOcB1E8NazTm+ELyzY4C3Q0z8MsRwF+j4JlQUPEEo5wjH0WB9hCNFqgpOCExZY5QnnEw7"
+ "ME+0v8GuaIs8wnKI7RigVrKkBzm0lh2OdjkeHllG28f066vK6SfEypF60S+vuYt4gjj2fYr/uPrSvRv356TepfJ9iWJRN0OaELQSZN3"
+ "FRPNbcP1PTSntMr0x0HzLZQjPYIEo3UaFeiISRKH0Mil+BE/dyT1m7tCBLwVO1MX4DK3bbuTlXuy8r71j5Aoho66udAoseOnrdVzx28"
+ "UFW6ROuO/lT6QKKyo79VU54emj9QSwcInsUTEDMBAAAFAwAAUEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAZAAYAc2ltcGxlbW9kZWw"
+ "vY29uc3RhbnRzLnBrbEZCAgBaWoACKS5QSwcIbS8JVwQAAAAEAAAAUEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAATADsAc2ltcGxlbW"
+ "9kZWwvdmVyc2lvbkZCNwBaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaMwpQSwcI0"
+ "Z5nVQIAAAACAAAAUEsBAgAAAAAICAAAAAAAAFzqQQQ0AAAANAAAABQAAAAAAAAAAAAAAAAAAAAAAHNpbXBsZW1vZGVsL2RhdGEucGts"
+ "UEsBAgAAFAAICAgAAAAAAE0EGH3yAAAAlwEAAB0AAAAAAAAAAAAAAAAAhAAAAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5UEs"
+ "BAgAAFAAICAgAAAAAAJ7FExAzAQAABQMAACcAAAAAAAAAAAAAAAAAAgIAAHNpbXBsZW1vZGVsL2NvZGUvX190b3JjaF9fLnB5LmRlYn"
+ "VnX3BrbFBLAQIAAAAACAgAAAAAAABtLwlXBAAAAAQAAAAZAAAAAAAAAAAAAAAAAMMDAABzaW1wbGVtb2RlbC9jb25zdGFudHMucGtsU"
+ "EsBAgAAAAAICAAAAAAAANGeZ1UCAAAAAgAAABMAAAAAAAAAAAAAAAAAFAQAAHNpbXBsZW1vZGVsL3ZlcnNpb25QSwYGLAAAAAAAAAAe"
+ "Ay0AAAAAAAAAAAAFAAAAAAAAAAUAAAAAAAAAagEAAAAAAACSBAAAAAAAAFBLBgcAAAAA/AUAAAAAAAABAAAAUEsFBgAAAAAFAAUAagE"
+ "AAJIEAAAAAA==";
static final long RAW_MODEL_SIZE; // size of the model before base64 encoding
static {
RAW_MODEL_SIZE = Base64.getDecoder().decode(BASE_64_ENCODED_MODEL).length;
}
public MLModelDeploymentFullClusterRestartIT(@Name("cluster") FullClusterRestartUpgradeStatus upgradeStatus) {
super(upgradeStatus);
}
@Before
public void setLogging() throws IOException {
Request loggingSettings = new Request("PUT", "_cluster/settings");
loggingSettings.setJsonEntity("""
{"persistent" : {
"logger.org.elasticsearch.xpack.ml.inference.assignment" : "TRACE",
"logger.org.elasticsearch.xpack.ml.process.assignment.planning" : "TRACE",
"logger.org.elasticsearch.xpack.ml.inference.deployment" : "TRACE",
"logger.org.elasticsearch.xpack.ml.process.logging" : "TRACE"
}}""");
client().performRequest(loggingSettings);
}
@Override
protected Settings restClientSettings() {
String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
public void testDeploymentSurvivesRestart() throws Exception {
String modelId = "trained-model-full-cluster-restart";
if (isRunningAgainstOldCluster()) {
createTrainedModel(modelId);
putModelDefinition(modelId);
putVocabulary(List.of("these", "are", "my", "words"), modelId);
startDeployment(modelId);
assertInfer(modelId);
} else {
ensureHealth(".ml-inference-*,.ml-config*", (request -> {
request.addParameter("wait_for_status", "yellow");
request.addParameter("timeout", "70s");
}));
waitForDeploymentStarted(modelId);
assertBusy(() -> {
try {
assertInfer(modelId);
} catch (ResponseException e) {
// assertBusy only loops on AssertionErrors, so we have
// to convert failure status exceptions to these
throw new AssertionError("Inference failed", e);
}
}, 90, TimeUnit.SECONDS);
stopDeployment(modelId);
}
}
@SuppressWarnings("unchecked")
private void waitForDeploymentStarted(String modelId) throws Exception {
assertBusy(() -> {
var response = getTrainedModelStats(modelId);
Map<String, Object> map = entityAsMap(response);
List<Map<String, Object>> stats = (List<Map<String, Object>>) map.get("trained_model_stats");
assertThat(stats, hasSize(1));
var stat = stats.get(0);
assertThat(
stat.toString(),
XContentMapValues.extractValue("deployment_stats.allocation_status.state", stat),
equalTo("fully_allocated")
);
assertThat(stat.toString(), XContentMapValues.extractValue("deployment_stats.state", stat), equalTo("started"));
}, 120, TimeUnit.SECONDS);
}
private void assertInfer(String modelId) throws IOException {
Response inference = infer("my words", modelId);
assertThat(EntityUtils.toString(inference.getEntity()), equalTo("{\"inference_results\":[{\"predicted_value\":[[1.0,1.0]]}]}"));
}
private void putModelDefinition(String modelId) throws IOException {
Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0");
request.setJsonEntity(Strings.format("""
{"total_definition_length":%s,"definition": "%s","total_parts": 1}""", RAW_MODEL_SIZE, BASE_64_ENCODED_MODEL));
client().performRequest(request);
}
private void putVocabulary(List<String> vocabulary, String modelId) throws IOException {
List<String> vocabularyWithPad = new ArrayList<>();
vocabularyWithPad.add("[PAD]");
vocabularyWithPad.add("[UNK]");
vocabularyWithPad.addAll(vocabulary);
String quotedWords = vocabularyWithPad.stream().map(s -> "\"" + s + "\"").collect(Collectors.joining(","));
Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/vocabulary");
request.setJsonEntity(Strings.format("""
{ "vocabulary": [%s] }
""", quotedWords));
client().performRequest(request);
}
private void createTrainedModel(String modelId) throws IOException {
Request request = new Request("PUT", "/_ml/trained_models/" + modelId);
request.setJsonEntity("""
{
"description": "simple model for testing",
"model_type": "pytorch",
"inference_config": {
"pass_through": {
"tokenization": {
"bert": {
"with_special_tokens": false
}
}
}
}
}""");
client().performRequest(request);
}
private Response startDeployment(String modelId) throws IOException {
return startDeployment(modelId, AllocationStatus.State.STARTED.toString());
}
private Response startDeployment(String modelId, String waitForState) throws IOException {
String inferenceThreadParamName = "threads_per_allocation";
String modelThreadParamName = "number_of_allocations";
String compatibleHeader = null;
if (isRunningAgainstOldCluster()) {
compatibleHeader = compatibleMediaType(XContentType.VND_JSON, RestApiVersion.V_8);
inferenceThreadParamName = "inference_threads";
modelThreadParamName = "model_threads";
}
Request request = new Request(
"POST",
"/_ml/trained_models/"
+ modelId
+ "/deployment/_start?timeout=40s&wait_for="
+ waitForState
+ "&"
+ inferenceThreadParamName
+ "=1&"
+ modelThreadParamName
+ "=1"
);
if (compatibleHeader != null) {
request.setOptions(request.getOptions().toBuilder().addHeader("Accept", compatibleHeader).build());
}
request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build());
var response = client().performRequest(request);
assertOK(response);
return response;
}
private void stopDeployment(String modelId) throws IOException {
String endpoint = "/_ml/trained_models/" + modelId + "/deployment/_stop";
Request request = new Request("POST", endpoint);
client().performRequest(request);
}
private Response getTrainedModelStats(String modelId) throws IOException {
Request request = new Request("GET", "/_ml/trained_models/" + modelId + "/_stats");
var response = client().performRequest(request);
assertOK(response);
return response;
}
private Response infer(String input, String modelId) throws IOException {
Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer");
request.setJsonEntity(Strings.format("""
{ "docs": [{"input":"%s"}] }
""", input));
var response = client().performRequest(request);
assertOK(response);
return response;
}
}
|
MLModelDeploymentFullClusterRestartIT
|
java
|
netty__netty
|
codec-compression/src/test/java/io/netty/handler/codec/compression/ByteBufChecksumTest.java
|
{
"start": 1123,
"end": 3405
}
|
class ____ {
private static final byte[] BYTE_ARRAY = new byte[1024];
@BeforeAll
public static void setUp() {
new Random().nextBytes(BYTE_ARRAY);
}
@Test
public void testHeapByteBufUpdate() {
testUpdate(Unpooled.wrappedBuffer(BYTE_ARRAY));
}
@Test
public void testDirectByteBufUpdate() {
ByteBuf buf = Unpooled.directBuffer(BYTE_ARRAY.length);
buf.writeBytes(BYTE_ARRAY);
testUpdate(buf);
}
private static void testUpdate(ByteBuf buf) {
try {
// all variations of xxHash32: slow and naive, optimised, wrapped optimised;
// the last two should be literally identical, but it's best to guard against
// an accidental regression in ByteBufChecksum#wrapChecksum(Checksum)
testUpdate(xxHash32(DEFAULT_SEED), ByteBufChecksum.wrapChecksum(xxHash32(DEFAULT_SEED)), buf);
testUpdate(xxHash32(DEFAULT_SEED), new Lz4XXHash32(DEFAULT_SEED), buf);
testUpdate(xxHash32(DEFAULT_SEED), ByteBufChecksum.wrapChecksum(new Lz4XXHash32(DEFAULT_SEED)), buf);
// CRC32 and Adler32, special-cased to use ReflectiveByteBufChecksum
testUpdate(new CRC32(), ByteBufChecksum.wrapChecksum(new CRC32()), buf);
testUpdate(new Adler32(), ByteBufChecksum.wrapChecksum(new Adler32()), buf);
} finally {
buf.release();
}
}
private static void testUpdate(Checksum checksum, ByteBufChecksum wrapped, ByteBuf buf) {
testUpdate(checksum, wrapped, buf, 0, BYTE_ARRAY.length);
testUpdate(checksum, wrapped, buf, 0, BYTE_ARRAY.length - 1);
testUpdate(checksum, wrapped, buf, 1, BYTE_ARRAY.length - 1);
testUpdate(checksum, wrapped, buf, 1, BYTE_ARRAY.length - 2);
}
private static void testUpdate(Checksum checksum, ByteBufChecksum wrapped, ByteBuf buf, int off, int len) {
checksum.reset();
wrapped.reset();
checksum.update(BYTE_ARRAY, off, len);
wrapped.update(buf, off, len);
assertEquals(checksum.getValue(), wrapped.getValue());
}
private static Checksum xxHash32(int seed) {
return XXHashFactory.fastestInstance().newStreamingHash32(seed).asChecksum();
}
}
|
ByteBufChecksumTest
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/ContextKey.java
|
{
"start": 1262,
"end": 3838
}
|
class ____<K, N> {
private final RecordContext<K> recordContext;
@Nullable private Object userKey;
@Nullable private final N namespace;
public ContextKey(RecordContext<K> recordContext, @Nullable N namespace) {
this(recordContext, namespace, null);
}
public ContextKey(RecordContext<K> recordContext, @Nullable N namespace, Object userKey) {
this.recordContext = recordContext;
this.namespace = namespace;
this.userKey = userKey;
}
public K getRawKey() {
return recordContext.getKey();
}
public int getKeyGroup() {
return recordContext.getKeyGroup();
}
public N getNamespace() {
return namespace;
}
public Object getUserKey() {
return userKey;
}
public void setUserKey(Object userKey) {
this.userKey = userKey;
resetExtra();
}
public void resetExtra() {
recordContext.setExtra(null);
}
/**
* Get the serialized key. If the cached serialized key within {@code RecordContext#payload} is
* null, the provided serialization function will be called, and the serialization result will
* be cached by {@code RecordContext#payload}.
*
* @param serializeKeyFunc the provided serialization function for this contextKey.
* @return the serialized bytes.
*/
public byte[] getOrCreateSerializedKey(
FunctionWithException<ContextKey<K, N>, byte[], IOException> serializeKeyFunc)
throws IOException {
byte[] serializedKey = (byte[]) recordContext.getExtra();
if (serializedKey != null) {
return serializedKey;
}
synchronized (recordContext) {
serializedKey = (byte[]) recordContext.getExtra();
if (serializedKey == null) {
serializedKey = serializeKeyFunc.apply(this);
recordContext.setExtra(serializedKey);
}
}
return serializedKey;
}
@Override
public int hashCode() {
return Objects.hash(recordContext);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ContextKey<?, ?> that = (ContextKey<?, ?>) o;
return Objects.equals(recordContext, that.recordContext);
}
@Override
public String toString() {
return "ContextKey{recordCtx:" + recordContext.toString() + ", userKey:" + userKey + "}";
}
}
|
ContextKey
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/test/support/ModifiedClassPathClassLoader.java
|
{
"start": 2430,
"end": 9068
}
|
class ____ extends URLClassLoader {
private static final Map<Class<?>, ModifiedClassPathClassLoader> cache = new ConcurrentReferenceHashMap<>();
private static final Pattern INTELLIJ_CLASSPATH_JAR_PATTERN = Pattern.compile(".*classpath(\\d+)?\\.jar");
private static final int MAX_RESOLUTION_ATTEMPTS = 5;
private final ClassLoader junitLoader;
ModifiedClassPathClassLoader(URL[] urls, ClassLoader parent, ClassLoader junitLoader) {
super(urls, parent);
this.junitLoader = junitLoader;
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith("org.junit") || name.startsWith("org.hamcrest")
|| name.startsWith("io.netty.internal.tcnative")) {
return Class.forName(name, false, this.junitLoader);
}
return super.loadClass(name);
}
static ModifiedClassPathClassLoader get(Class<?> testClass) {
return cache.computeIfAbsent(testClass, ModifiedClassPathClassLoader::compute);
}
private static ModifiedClassPathClassLoader compute(Class<?> testClass) {
ClassLoader classLoader = testClass.getClassLoader();
MergedAnnotations annotations = MergedAnnotations.from(testClass,
MergedAnnotations.SearchStrategy.TYPE_HIERARCHY);
if (annotations.isPresent(ForkedClassPath.class) && (annotations.isPresent(ClassPathOverrides.class)
|| annotations.isPresent(ClassPathExclusions.class))) {
throw new IllegalStateException("@ForkedClassPath is redundant in combination with either "
+ "@ClassPathOverrides or @ClassPathExclusions");
}
return new ModifiedClassPathClassLoader(processUrls(extractUrls(classLoader), annotations),
classLoader.getParent(), classLoader);
}
private static URL[] extractUrls(ClassLoader classLoader) {
List<URL> extractedUrls = new ArrayList<>();
doExtractUrls(classLoader).forEach((URL url) -> {
if (isManifestOnlyJar(url)) {
extractedUrls.addAll(extractUrlsFromManifestClassPath(url));
}
else {
extractedUrls.add(url);
}
});
return extractedUrls.toArray(new URL[0]);
}
private static Stream<URL> doExtractUrls(ClassLoader classLoader) {
if (classLoader instanceof URLClassLoader urlClassLoader) {
return Stream.of(urlClassLoader.getURLs());
}
return Stream.of(ManagementFactory.getRuntimeMXBean().getClassPath().split(File.pathSeparator))
.map(ModifiedClassPathClassLoader::toURL);
}
private static URL toURL(String entry) {
try {
return new File(entry).toURI().toURL();
}
catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
private static boolean isManifestOnlyJar(URL url) {
return isShortenedIntelliJJar(url);
}
private static boolean isShortenedIntelliJJar(URL url) {
String urlPath = url.getPath();
boolean isCandidate = INTELLIJ_CLASSPATH_JAR_PATTERN.matcher(urlPath).matches();
if (isCandidate) {
try {
Attributes attributes = getManifestMainAttributesFromUrl(url);
String createdBy = attributes.getValue("Created-By");
return createdBy != null && createdBy.contains("IntelliJ");
}
catch (Exception ex) {
}
}
return false;
}
private static List<URL> extractUrlsFromManifestClassPath(URL booterJar) {
List<URL> urls = new ArrayList<>();
try {
for (String entry : getClassPath(booterJar)) {
urls.add(new URL(entry));
}
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
return urls;
}
private static String[] getClassPath(URL booterJar) throws Exception {
Attributes attributes = getManifestMainAttributesFromUrl(booterJar);
return StringUtils.delimitedListToStringArray(attributes.getValue(Attributes.Name.CLASS_PATH), " ");
}
private static Attributes getManifestMainAttributesFromUrl(URL url) throws Exception {
try (JarFile jarFile = new JarFile(new File(url.toURI()))) {
return jarFile.getManifest().getMainAttributes();
}
}
private static URL[] processUrls(URL[] urls, MergedAnnotations annotations) {
ClassPathEntryFilter filter = new ClassPathEntryFilter(annotations.get(ClassPathExclusions.class));
List<URL> additionalUrls = getAdditionalUrls(annotations.get(ClassPathOverrides.class));
List<URL> processedUrls = new ArrayList<>(additionalUrls);
for (URL url : urls) {
if (!filter.isExcluded(url)) {
processedUrls.add(url);
}
}
return processedUrls.toArray(new URL[0]);
}
private static List<URL> getAdditionalUrls(MergedAnnotation<ClassPathOverrides> annotation) {
if (!annotation.isPresent()) {
return Collections.emptyList();
}
return resolveCoordinates(annotation.getStringArray(MergedAnnotation.VALUE));
}
private static List<URL> resolveCoordinates(String[] coordinates) {
Exception latestFailure = null;
DefaultServiceLocator serviceLocator = MavenRepositorySystemUtils.newServiceLocator();
serviceLocator.addService(RepositoryConnectorFactory.class, BasicRepositoryConnectorFactory.class);
serviceLocator.addService(TransporterFactory.class, HttpTransporterFactory.class);
RepositorySystem repositorySystem = serviceLocator.getService(RepositorySystem.class);
DefaultRepositorySystemSession session = MavenRepositorySystemUtils.newSession();
LocalRepository localRepository = new LocalRepository(System.getProperty("user.home") + "/.m2/repository");
RemoteRepository remoteRepository = new RemoteRepository.Builder("central", "default",
"https://repo.maven.apache.org/maven2")
.build();
session.setLocalRepositoryManager(repositorySystem.newLocalRepositoryManager(session, localRepository));
for (int i = 0; i < MAX_RESOLUTION_ATTEMPTS; i++) {
CollectRequest collectRequest = new CollectRequest(null, Arrays.asList(remoteRepository));
collectRequest.setDependencies(createDependencies(coordinates));
DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, null);
try {
DependencyResult result = repositorySystem.resolveDependencies(session, dependencyRequest);
List<URL> resolvedArtifacts = new ArrayList<>();
for (ArtifactResult artifact : result.getArtifactResults()) {
resolvedArtifacts.add(artifact.getArtifact().getFile().toURI().toURL());
}
return resolvedArtifacts;
}
catch (Exception ex) {
latestFailure = ex;
}
}
throw new IllegalStateException("Resolution failed after " + MAX_RESOLUTION_ATTEMPTS + " attempts",
latestFailure);
}
private static List<Dependency> createDependencies(String[] allCoordinates) {
List<Dependency> dependencies = new ArrayList<>();
for (String coordinate : allCoordinates) {
dependencies.add(new Dependency(new DefaultArtifact(coordinate), null));
}
return dependencies;
}
/**
* Filter for
|
ModifiedClassPathClassLoader
|
java
|
quarkusio__quarkus
|
independent-projects/tools/registry-client/src/main/java/io/quarkus/registry/config/RegistriesConfigLocator.java
|
{
"start": 604,
"end": 10053
}
|
class ____ {
public static final String CONFIG_RELATIVE_PATH = ".quarkus/config.yaml";
public static final String CONFIG_FILE_PATH_PROPERTY = "quarkus.tools.config";
static final String QUARKUS_REGISTRIES = "QUARKUS_REGISTRIES";
static final String QUARKUS_REGISTRY_ENV_VAR_PREFIX = "QUARKUS_REGISTRY_";
static final String RECOMMEND_STREAMS_FROM_ = "RECOMMEND_STREAMS_FROM_";
/**
* Locate the registry client configuration file and deserialize it.
*
* The method will look for the file in the following locations in this order:
* <ol>
* <li>if <code>quarkus.config.root</code> system property is set, its value will be
* used as the location of the configuration file</li>
* <li>current user directory (which usually would be the project dir)</li>
* <li><code>.quarkus/config.yaml</code> in the user home directory
* </ol>
*
* If the configuration file can't be located (it is optional),
* an empty configuration will be returned to the caller.
*
* @return registry client configuration, never null
*/
public static RegistriesConfig resolveConfig() {
final RegistriesConfig config = initFromEnvironmentOrNull(System.getenv());
if (config != null) {
return config;
}
final Path configYaml = locateConfigYaml();
if (configYaml == null) {
return new RegistriesConfigImpl.Builder().build().setSource(ConfigSource.DEFAULT);
}
return load(configYaml);
}
/**
* Deserializes a given configuration file.
*
* @param configYaml configuration file
* @return deserialized registry client configuration
*/
public static RegistriesConfig load(Path configYaml) {
try {
RegistriesConfigImpl.Builder config = RegistriesConfigMapperHelper.deserialize(configYaml,
RegistriesConfigImpl.Builder.class);
if (config == null) { // empty file
config = new RegistriesConfigImpl.Builder();
}
return config.setSource(new ConfigSource.FileConfigSource(configYaml)).build();
} catch (IOException e) {
throw new IllegalStateException("Failed to parse config file " + configYaml, e);
}
}
/**
* Deserializes registry client configuration from an input stream.
*
* @param configYaml input stream
* @return deserialized registry client configuration
*/
public static RegistriesConfig load(InputStream configYaml) {
try {
RegistriesConfigImpl.Builder instance = RegistriesConfigMapperHelper.deserializeYaml(configYaml,
RegistriesConfigImpl.Builder.class);
return instance == null ? null : instance.build();
} catch (IOException e) {
throw new IllegalStateException("Failed to parse config file " + configYaml, e);
}
}
/**
* Deserializes registry client configuration from a reader.
*
* @param configYaml reader
* @return deserialized registry client configuration
*/
public static RegistriesConfig load(Reader configYaml) {
try {
RegistriesConfigImpl.Builder instance = RegistriesConfigMapperHelper.deserializeYaml(configYaml,
RegistriesConfigImpl.Builder.class);
return instance == null ? null : instance.build();
} catch (IOException e) {
throw new IllegalStateException("Failed to parse config file " + configYaml, e);
}
}
/**
* Returns the registry client configuration file or null, if the file could not be found.
*
* @return the registry client configuration file or null, if the file could not be found
*/
public static Path locateConfigYaml() {
return locateConfigYaml(null);
}
/**
* Returns the registry client configuration file or null if the file could not be found.
*
* @param configYaml Path to a pre-specified config file (e.g. a command line argument)
* @return the registry client configuration file or null if the file could not be found.
*/
public static Path locateConfigYaml(Path configYaml) {
if (configYaml == null) {
final String prop = System.getProperty(CONFIG_FILE_PATH_PROPERTY);
if (prop != null) {
configYaml = Paths.get(prop);
if (!Files.exists(configYaml)) {
throw new IllegalStateException("Quarkus extension registry configuration file " + configYaml
+ " specified by the system property " + CONFIG_FILE_PATH_PROPERTY + " does not exist");
}
return configYaml;
}
configYaml = Paths.get("").normalize().toAbsolutePath().resolve(CONFIG_RELATIVE_PATH);
if (Files.exists(configYaml)) {
return configYaml;
}
configYaml = getDefaultConfigYamlLocation();
}
return Files.exists(configYaml) ? configYaml : null;
}
/**
* Returns the default location of the registry client configuration file.
*
* @return the default location of the registry client configuration file
*/
static Path getDefaultConfigYamlLocation() {
return Paths.get(System.getProperty("user.home")).resolve(CONFIG_RELATIVE_PATH);
}
/**
* Returns the default registry client configuration which should be used in case
* no configuration file was found in the user's environment.
*
* @return default registry client configuration
*/
public static RegistryConfig getDefaultRegistry() {
return RegistryConfigImpl.getDefaultRegistry();
}
/**
* @param map A Map containing environment variables, e.g. {@link System#getenv()}
* @return A RegistriesConfig object initialized from environment variables.
*/
static RegistriesConfig initFromEnvironmentOrNull(Map<String, String> map) {
final String envRegistries = map.get(QUARKUS_REGISTRIES);
if (envRegistries == null || envRegistries.isBlank()) {
return null;
}
RegistriesConfigImpl.Builder registriesConfigBuilder = new RegistriesConfigImpl.Builder();
for (String registryId : envRegistries.split(",")) {
final RegistryConfigImpl.Builder builder = new RegistryConfigImpl.Builder()
.setId(registryId);
final String envvarPrefix = getEnvVarPrefix(registryId);
for (Map.Entry<String, String> var : map.entrySet()) {
final String envvarName = var.getKey();
if (!envvarName.startsWith(envvarPrefix)) {
continue;
}
if (isEnvVarOption(envvarName, envvarPrefix, "UPDATE_POLICY")) {
builder.setUpdatePolicy(var.getValue());
} else if (isEnvVarOption(envvarName, envvarPrefix, "REPO_URL")) {
builder.setMaven(RegistryMavenConfig.builder()
.setRepository(RegistryMavenRepoConfig.builder()
.setUrl(var.getValue())
.build())
.build());
} else if (isEnvVarOption(envvarName, envvarPrefix, "OFFERING")) {
builder.setExtra(Constants.OFFERING, var.getValue());
} else if (isEnvVarOption(envvarName, envvarPrefix, RECOMMEND_STREAMS_FROM_)) {
// the format for recommend-streams-from is
// QUARKUS_REGISTRY_<REGISTRY_ID>_RECOMMEND_STREAMS_FROM_<PLATFORM_KEY>=<STREAM_ID>
int i = envvarPrefix.length() + RECOMMEND_STREAMS_FROM_.length();
final StringBuilder platformKey = new StringBuilder(envvarName.length() - i);
while (i < envvarName.length()) {
final char ch = envvarName.charAt(i++);
platformKey.append(ch == '_' ? '.' : Character.toLowerCase(ch));
}
builder.computeExtraIfAbsent(Constants.RECOMMEND_STREAMS_FROM, RegistriesConfigLocator::newMap)
.put(platformKey.toString(), var.getValue());
}
}
registriesConfigBuilder.addRegistry(builder.build());
}
return registriesConfigBuilder
.build()
.setSource(ConfigSource.ENV);
}
private static boolean isEnvVarOption(String varName, String registryPrefix, String optionName) {
return varName.regionMatches(registryPrefix.length(), optionName, 0, optionName.length());
}
private static String getEnvVarPrefix(String registryId) {
final StringBuilder buf = new StringBuilder(QUARKUS_REGISTRY_ENV_VAR_PREFIX.length() + registryId.length() + 1)
.append(QUARKUS_REGISTRY_ENV_VAR_PREFIX);
for (int i = 0; i < registryId.length(); ++i) {
final char c = registryId.charAt(i);
if (c == '.') {
buf.append('_');
} else {
buf.append(Character.toUpperCase(c));
}
}
return buf.append('_').toString();
}
private static Map<String, String> newMap(String key) {
return new HashMap<>(1);
}
}
|
RegistriesConfigLocator
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/PanacheEntity.java
|
{
"start": 338,
"end": 868
}
|
class ____ gain the ID field and auto-generated accessors
* to all their public fields (unless annotated with {@link Transient}), as well as all
* the useful methods from {@link PanacheEntityBase}.
* </p>
* <p>
* If you want a custom ID type or strategy, you can directly extend {@link PanacheEntityBase}
* instead, and write your own ID field. You will still get auto-generated accessors and
* all the useful methods.
* </p>
*
* @author Stéphane Épardaud
* @see PanacheEntityBase
*/
@MappedSuperclass
public abstract
|
they
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/health/metadata/HealthMetadataSerializationTests.java
|
{
"start": 1048,
"end": 7355
}
|
class ____ extends SimpleDiffableWireSerializationTestCase<ClusterState.Custom> {
@Override
protected ClusterState.Custom makeTestChanges(ClusterState.Custom testInstance) {
if (randomBoolean()) {
return testInstance;
}
return mutate((HealthMetadata) testInstance);
}
@Override
protected Writeable.Reader<Diff<ClusterState.Custom>> diffReader() {
return HealthMetadata::readDiffFrom;
}
@Override
protected Writeable.Reader<ClusterState.Custom> instanceReader() {
return HealthMetadata::new;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(
List.of(new NamedWriteableRegistry.Entry(ClusterState.Custom.class, HealthMetadata.TYPE, HealthMetadata::new))
);
}
@Override
protected ClusterState.Custom createTestInstance() {
return randomHealthMetadata();
}
@Override
protected ClusterState.Custom mutateInstance(ClusterState.Custom instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
private static HealthMetadata randomHealthMetadata() {
return new HealthMetadata(randomDiskMetadata(), randomShardLimitsMetadata());
}
private static HealthMetadata.ShardLimits randomShardLimitsMetadata() {
return randomBoolean() ? randomShardLimitsMetadataNonNull() : null;
}
private static HealthMetadata.ShardLimits randomShardLimitsMetadataNonNull() {
return new HealthMetadata.ShardLimits(
randomIntBetween(1, 10000),
randomIntBetween(1, 10000),
randomIntBetween(1, 10000),
randomIntBetween(1, 10000)
);
}
private static HealthMetadata.Disk randomDiskMetadata() {
return new HealthMetadata.Disk(
randomRelativeByteSizeValue(),
ByteSizeValue.ofGb(randomIntBetween(10, 999)),
randomRelativeByteSizeValue(),
ByteSizeValue.ofGb(randomIntBetween(10, 999)),
randomRelativeByteSizeValue(),
ByteSizeValue.ofGb(randomIntBetween(10, 999))
);
}
private static RelativeByteSizeValue randomRelativeByteSizeValue() {
if (randomBoolean()) {
return new RelativeByteSizeValue(ByteSizeValue.ofGb(randomIntBetween(10, 999)));
} else {
return new RelativeByteSizeValue(new RatioValue(randomDouble()));
}
}
static HealthMetadata.Disk mutate(HealthMetadata.Disk base) {
RelativeByteSizeValue highWatermark = base.highWatermark();
ByteSizeValue highWatermarkMaxHeadRoom = base.highMaxHeadroom();
RelativeByteSizeValue floodStageWatermark = base.floodStageWatermark();
ByteSizeValue floodStageWatermarkMaxHeadRoom = base.floodStageMaxHeadroom();
RelativeByteSizeValue floodStageWatermarkFrozen = base.frozenFloodStageWatermark();
ByteSizeValue floodStageWatermarkFrozenMaxHeadRoom = base.frozenFloodStageMaxHeadroom();
switch (randomInt(5)) {
case 0 -> highWatermark = randomValueOtherThan(highWatermark, HealthMetadataSerializationTests::randomRelativeByteSizeValue);
case 1 -> highWatermarkMaxHeadRoom = randomValueOtherThan(
highWatermarkMaxHeadRoom,
() -> ByteSizeValue.ofGb(randomIntBetween(10, 999))
);
case 2 -> floodStageWatermark = randomValueOtherThan(
floodStageWatermark,
HealthMetadataSerializationTests::randomRelativeByteSizeValue
);
case 3 -> floodStageWatermarkMaxHeadRoom = randomValueOtherThan(
floodStageWatermarkMaxHeadRoom,
() -> ByteSizeValue.ofGb(randomIntBetween(10, 999))
);
case 4 -> floodStageWatermarkFrozen = randomValueOtherThan(
floodStageWatermarkFrozen,
HealthMetadataSerializationTests::randomRelativeByteSizeValue
);
case 5 -> floodStageWatermarkFrozenMaxHeadRoom = randomValueOtherThan(
floodStageWatermarkFrozenMaxHeadRoom,
() -> ByteSizeValue.ofGb(randomIntBetween(10, 999))
);
}
return new HealthMetadata.Disk(
highWatermark,
highWatermarkMaxHeadRoom,
floodStageWatermark,
floodStageWatermarkMaxHeadRoom,
floodStageWatermarkFrozen,
floodStageWatermarkFrozenMaxHeadRoom
);
}
static HealthMetadata.ShardLimits mutate(HealthMetadata.ShardLimits base) {
if (base == null) {
return randomShardLimitsMetadataNonNull();
}
int maxShardsPerNode = base.maxShardsPerNode();
int maxShardsPerNodeFrozen = base.maxShardsPerNodeFrozen();
int shardCapacityUnhealthyThresholdYellow = base.shardCapacityUnhealthyThresholdYellow();
int shardCapacityUnhealthyThresholdRed = base.shardCapacityUnhealthyThresholdRed();
switch (randomInt(3)) {
case 0 -> maxShardsPerNode = randomValueOtherThan(maxShardsPerNode, () -> randomIntBetween(1, 10000));
case 1 -> maxShardsPerNodeFrozen = randomValueOtherThan(maxShardsPerNodeFrozen, () -> randomIntBetween(1, 10000));
case 2 -> shardCapacityUnhealthyThresholdYellow = randomValueOtherThan(
shardCapacityUnhealthyThresholdYellow,
() -> randomIntBetween(1, 10000)
);
case 3 -> shardCapacityUnhealthyThresholdRed = randomValueOtherThan(
shardCapacityUnhealthyThresholdRed,
() -> randomIntBetween(1, 10000)
);
}
return new HealthMetadata.ShardLimits(
maxShardsPerNode,
maxShardsPerNodeFrozen,
shardCapacityUnhealthyThresholdYellow,
shardCapacityUnhealthyThresholdRed
);
}
private HealthMetadata mutate(HealthMetadata base) {
return new HealthMetadata(mutate(base.getDiskMetadata()), mutate(base.getShardLimitsMetadata()));
}
public void testChunking() {
AbstractChunkedSerializingTestCase.assertChunkCount(createTestInstance(), ignored -> 1);
}
}
|
HealthMetadataSerializationTests
|
java
|
quarkusio__quarkus
|
extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/currentrequest/CurrentRequestDisabledTest.java
|
{
"start": 341,
"end": 1046
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset(
"quarkus.arc.exclude-types=io.quarkus.vertx.http.runtime.CurrentRequestProducer"),
"application.properties")
.addAsResource(new StringAsset(
"Hello {inject:vertxRequest.getParam('name')}!"),
"templates/request.txt"))
.setExpectedException(TemplateException.class);
@Test
public void testCurrentRequest() {
fail();
}
}
|
CurrentRequestDisabledTest
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/dev/DisableInstrumentationForIndexPredicateBuildItem.java
|
{
"start": 320,
"end": 794
}
|
class ____ extends MultiBuildItem
implements Supplier<Predicate<Index>> {
private final Predicate<Index> predicate;
public DisableInstrumentationForIndexPredicateBuildItem(Predicate<Index> predicate) {
this.predicate = predicate;
}
public Predicate<Index> getPredicate() {
return predicate;
}
@Override
public Predicate<Index> get() {
return getPredicate();
}
}
|
DisableInstrumentationForIndexPredicateBuildItem
|
java
|
quarkusio__quarkus
|
extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuarkusQuartzConnectionPoolProvider.java
|
{
"start": 380,
"end": 2095
}
|
class ____ implements PoolingConnectionProvider {
private AgroalDataSource dataSource;
private static String dataSourceName;
public QuarkusQuartzConnectionPoolProvider() {
final ArcContainer container = Arc.container();
final InstanceHandle<AgroalDataSource> instanceHandle;
final boolean useDefaultDataSource = "QUARKUS_QUARTZ_DEFAULT_DATASOURCE".equals(dataSourceName);
if (useDefaultDataSource) {
instanceHandle = container.instance(AgroalDataSource.class);
} else {
instanceHandle = container.instance(AgroalDataSource.class, new DataSourceLiteral(dataSourceName));
}
if (instanceHandle.isAvailable()) {
this.dataSource = instanceHandle.get();
} else {
String message = String.format(
"JDBC Store configured but '%s' datasource is missing. You can configure your datasource by following the guide available at: https://quarkus.io/guides/datasource",
useDefaultDataSource ? "default" : dataSourceName);
throw new IllegalStateException(message);
}
}
@Override
public DataSource getDataSource() {
return dataSource;
}
@Override
public Connection getConnection() throws SQLException {
return dataSource.getConnection();
}
@Override
public void shutdown() {
// Do nothing as the connection will be closed inside the Agroal extension
}
@Override
public void initialize() {
}
static void setDataSourceName(String dataSourceName) {
QuarkusQuartzConnectionPoolProvider.dataSourceName = dataSourceName;
}
}
|
QuarkusQuartzConnectionPoolProvider
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsFilter.java
|
{
"start": 1224,
"end": 2039
}
|
class ____ implements MetricsPlugin {
/**
* Whether to accept the name
* @param name to filter on
* @return true to accept; false otherwise.
*/
public abstract boolean accepts(String name);
/**
* Whether to accept the tag
* @param tag to filter on
* @return true to accept; false otherwise
*/
public abstract boolean accepts(MetricsTag tag);
/**
* Whether to accept the tags
* @param tags to filter on
* @return true to accept; false otherwise
*/
public abstract boolean accepts(Iterable<MetricsTag> tags);
/**
* Whether to accept the record
* @param record to filter on
* @return true to accept; false otherwise.
*/
public boolean accepts(MetricsRecord record) {
return accepts(record.name()) && accepts(record.tags());
}
}
|
MetricsFilter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/PropertyRef.java
|
{
"start": 988,
"end": 1085
}
|
class ____ {
* String first;
* String last;
* }
* @Entity
*
|
Name
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/ValidateTest.java
|
{
"start": 46155,
"end": 47189
}
|
class ____ {
@Test
void shouldNotThrowExceptionForNonEmptyString() {
Validate.notEmpty("Hi", "MSG");
}
@Test
void shouldReturnTheSameInstance() {
assertSame("Hi", Validate.notEmpty("Hi", "MSG"));
}
@Test
void shouldThrowIllegalArgumentExceptionWithGivenMessageForEmptyString() {
final IllegalArgumentException ex = assertIllegalArgumentException(() -> Validate.notEmpty("", "MSG"));
assertEquals("MSG", ex.getMessage());
}
@Test
void shouldThrowNullPointerExceptionWithGivenMessageForNullCharSequence() {
final NullPointerException ex = assertNullPointerException(() -> Validate.notEmpty((CharSequence) null, "MSG"));
assertEquals("MSG", ex.getMessage());
}
}
@Nested
final
|
WithMessage
|
java
|
google__dagger
|
javatests/dagger/hilt/android/InjectionTest.java
|
{
"start": 10209,
"end": 10610
}
|
class ____
extends Hilt_InjectionTest_TestBroadcastReceiverWithBaseImplementingOnReceive {
@Inject @ApplicationLevel String appBinding;
Intent baseLastIntent = null;
@Override
public void onReceive(Context context, Intent intent) {
super.onReceive(context, intent);
baseLastIntent = intent;
}
}
abstract static
|
TestBroadcastReceiverWithBaseImplementingOnReceive
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/web/server/OAuth2ClientSpecTests.java
|
{
"start": 15987,
"end": 17055
}
|
class ____ {
ReactiveAuthenticationManager manager = mock(ReactiveAuthenticationManager.class);
ServerAuthenticationConverter authenticationConverter = mock(ServerAuthenticationConverter.class);
ServerAuthorizationRequestRepository<OAuth2AuthorizationRequest> authorizationRequestRepository = mock(
ServerAuthorizationRequestRepository.class);
ServerOAuth2AuthorizationRequestResolver resolver = mock(ServerOAuth2AuthorizationRequestResolver.class);
ServerRequestCache requestCache = mock(ServerRequestCache.class);
@Bean
SecurityWebFilterChain springSecurityFilter(ServerHttpSecurity http) {
// @formatter:off
http
.oauth2Client((client) -> client
.authenticationConverter(this.authenticationConverter)
.authenticationManager(this.manager)
.authorizationRequestRepository(this.authorizationRequestRepository)
.authorizationRequestResolver(this.resolver))
.requestCache((c) -> c.requestCache(this.requestCache));
// @formatter:on
return http.build();
}
}
@Configuration
static
|
OAuth2ClientCustomConfig
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alter/MySqlAlterViewTest_0.java
|
{
"start": 906,
"end": 1740
}
|
class ____ extends TestCase {
public void test_alter_first() throws Exception {
String sql = "alter definer = 'ivan'@'%' view my_view3 as select count(*) from t3;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER DEFINER = 'ivan'@'%'\n" +
"\tVIEW my_view3\n" +
"AS\n" +
"SELECT count(*)\n" +
"FROM t3;", SQLUtils.toMySqlString(stmt));
assertEquals("alter definer = 'ivan'@'%'\n" +
"\tview my_view3\n" +
"as\n" +
"select count(*)\n" +
"from t3;", SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
}
}
|
MySqlAlterViewTest_0
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ModuleValidationTest.java
|
{
"start": 12284,
"end": 13198
}
|
class ____ {}");
CompilerTests.daggerCompiler(badModule, module)
.compile(
subject -> {
subject.hasErrorCount(2);
subject.hasErrorContaining("test.BadModule has errors")
.onSource(module)
.onLine(5);
subject.hasErrorContaining(
"@Binds methods must have exactly one parameter, whose type is "
+ "assignable to the return type")
.onSource(badModule)
.onLine(8);
});
}
@Test
public void scopeOnModule() {
Source badModule =
CompilerTests.javaSource(
"test.BadModule",
"package test;",
"",
"import dagger.Module;",
"import javax.inject.Singleton;",
"",
"@Singleton",
"@Module",
"
|
IncludesBadModule
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.