language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/records/AuxServiceRecords.java | {
"start": 1250,
"end": 1602
} | class ____ {
private List<AuxServiceRecord> services = new ArrayList<>();
public AuxServiceRecords serviceList(AuxServiceRecord... serviceList) {
for (AuxServiceRecord service : serviceList) {
this.services.add(service);
}
return this;
}
public List<AuxServiceRecord> getServices() {
return services;
}
}
| AuxServiceRecords |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/PathParamExtractor.java | {
"start": 288,
"end": 1119
} | class ____ implements ParameterExtractor {
private final int index;
private final boolean encoded;
private final boolean single;
public PathParamExtractor(int index, boolean encoded, boolean single) {
this.index = index;
this.encoded = encoded;
this.single = single;
}
@Override
public Object extractParameter(ResteasyReactiveRequestContext context) {
String pathParam = context.getPathParam(index, true);
if (single) {
return encoded ? pathParam : Encode.decodePath(pathParam);
} else {
return encoded
? List.of(pathParam.split("/"))
: Arrays.stream(pathParam.split("/")).map(Encode::decodePath)
.collect(Collectors.toList());
}
}
}
| PathParamExtractor |
java | junit-team__junit5 | documentation/src/test/java/example/registration/DocumentationDemo.java | {
"start": 689,
"end": 1095
} | class ____ {
//end::user_guide[]
@Nullable
//tag::user_guide[]
static Path lookUpDocsDir() {
// return path to docs dir
// end::user_guide[]
return null;
// tag::user_guide[]
}
@RegisterExtension
DocumentationExtension docs = DocumentationExtension.forPath(lookUpDocsDir());
@Test
void generateDocumentation() {
// use this.docs ...
}
}
//end::user_guide[]
@NullMarked
| DocumentationDemo |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/AnnotationsPropertySourceTests.java | {
"start": 11536,
"end": 11656
} | interface ____ {
}
@PropertiesFromMultipleMetaAnnotationsAnnotation
static | PropertiesFromSingleMetaAnnotationAnnotation |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/HeadersConfigurer.java | {
"start": 32103,
"end": 33019
} | class ____ {
private ContentSecurityPolicyHeaderWriter writer;
private ContentSecurityPolicyConfig() {
}
/**
* Sets the security policy directive(s) to be used in the response header.
* @param policyDirectives the security policy directive(s)
* @return the {@link ContentSecurityPolicyConfig} for additional configuration
* @throws IllegalArgumentException if policyDirectives is null or empty
*/
public ContentSecurityPolicyConfig policyDirectives(String policyDirectives) {
this.writer.setPolicyDirectives(policyDirectives);
return this;
}
/**
* Enables (includes) the Content-Security-Policy-Report-Only header in the
* response.
* @return the {@link ContentSecurityPolicyConfig} for additional configuration
*/
public ContentSecurityPolicyConfig reportOnly() {
this.writer.setReportOnly(true);
return this;
}
}
public final | ContentSecurityPolicyConfig |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/GenericGenerator.java | {
"start": 3428,
"end": 3878
} | class ____ {@link Generator}, or, more commonly,
* {@link org.hibernate.id.IdentifierGenerator}.
* </ul>
*
* @deprecated use {@link #type()} for typesafety
*/
@Deprecated(since="6.2", forRemoval = true)
String strategy() default "native";
/**
* Parameters to be passed to {@link org.hibernate.id.IdentifierGenerator#configure}
* when the identifier generator is instantiated.
*/
Parameter[] parameters() default {};
}
| implementing |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/config/AspectComponentDefinition.java | {
"start": 1206,
"end": 1954
} | class ____ extends CompositeComponentDefinition {
private final BeanDefinition[] beanDefinitions;
private final BeanReference[] beanReferences;
public AspectComponentDefinition(String aspectName, BeanDefinition @Nullable [] beanDefinitions,
BeanReference @Nullable [] beanReferences, @Nullable Object source) {
super(aspectName, source);
this.beanDefinitions = (beanDefinitions != null ? beanDefinitions : new BeanDefinition[0]);
this.beanReferences = (beanReferences != null ? beanReferences : new BeanReference[0]);
}
@Override
public BeanDefinition[] getBeanDefinitions() {
return this.beanDefinitions;
}
@Override
public BeanReference[] getBeanReferences() {
return this.beanReferences;
}
}
| AspectComponentDefinition |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/plugin/PluginConfigurationException.java | {
"start": 1177,
"end": 3033
} | class ____ extends Exception {
private PluginDescriptor pluginDescriptor;
private String originalMessage;
public PluginConfigurationException(PluginDescriptor pluginDescriptor, String originalMessage) {
super(originalMessage);
this.pluginDescriptor = pluginDescriptor;
this.originalMessage = originalMessage;
}
public PluginConfigurationException(PluginDescriptor pluginDescriptor, String originalMessage, Throwable cause) {
super(originalMessage, cause);
this.pluginDescriptor = pluginDescriptor;
this.originalMessage = originalMessage;
}
/**
* Ctor left for binary compatibility.
*
* @deprecated Use {@link #PluginConfigurationException(PluginDescriptor, String, Throwable)}
*/
@Deprecated
public PluginConfigurationException(
PluginDescriptor pluginDescriptor, String originalMessage, ExpressionEvaluationException cause) {
this(pluginDescriptor, originalMessage, (Throwable) cause);
}
/**
* Ctor left for binary compatibility.
*
* @deprecated Use {@link #PluginConfigurationException(PluginDescriptor, String, Throwable)}
*/
@Deprecated
public PluginConfigurationException(
PluginDescriptor pluginDescriptor, String originalMessage, ComponentConfigurationException cause) {
this(pluginDescriptor, originalMessage, (Throwable) cause);
}
/**
* Ctor left for binary compatibility.
*
* @deprecated Use {@link #PluginConfigurationException(PluginDescriptor, String, Throwable)}
*/
@Deprecated
public PluginConfigurationException(
PluginDescriptor pluginDescriptor, String originalMessage, ComponentLookupException cause) {
this(pluginDescriptor, originalMessage, (Throwable) cause);
}
}
| PluginConfigurationException |
java | google__guice | core/test/com/google/inject/BinderTestSuite.java | {
"start": 21948,
"end": 22231
} | class ____ extends Injectable {
@Inject
public void inject(
AWithProvidedBy aWithProvidedBy, Provider<AWithProvidedBy> aWithProvidedByProvider) {
this.value = aWithProvidedBy;
this.provider = aWithProvidedByProvider;
}
}
static | InjectsAWithProvidedBy |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/RbacConfig.java | {
"start": 2404,
"end": 3326
} | interface ____ {
/**
* Name of the RoleBinding resource to be generated. If not provided, it will use the application name plus the role
* ref name.
*/
Optional<String> name();
/**
* Labels to add into the RoleBinding resource.
*/
@ConfigDocMapKey("label-name")
Map<String, String> labels();
/**
* The name of the Role resource to use by the RoleRef element in the generated Role Binding resource.
* By default, it's "view" role name.
*/
Optional<String> roleName();
/**
* If the Role sets in the `role-name` property is cluster wide or not.
*/
Optional<Boolean> clusterWide();
/**
* List of subjects elements to use in the generated RoleBinding resource.
*/
Map<String, SubjectConfig> subjects();
}
| RoleBindingConfig |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/injection/InjectorTests.java | {
"start": 4179,
"end": 4646
} | interface ____ {}
record Service2(Service1 service1) {}
record Service3(Service2 service2) implements Service1 {}
assertThrows(IllegalStateException.class, () -> {
MethodHandles.lookup();
Injector injector = Injector.create();
injector.addClasses(List.of(Service2.class, Service3.class)).inject(List.of());
});
}
// Common injectable things
public record Service1() {}
public | Service1 |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/EntryPointAssertions_setExtractBareNamePropertyMethods_Test.java | {
"start": 1099,
"end": 2417
} | class ____ extends EntryPointAssertionsBaseTest {
private static final boolean DEFAULT_EXTRACTING_BARE_NAME_PROPERTY_METHODS = Introspection.canExtractBareNamePropertyMethods();
@AfterEach
void afterEachTest() {
// reset to the default value to avoid side effects on the other tests
Introspection.setExtractBareNamePropertyMethods(DEFAULT_EXTRACTING_BARE_NAME_PROPERTY_METHODS);
}
@ParameterizedTest
@MethodSource("setAllowExtractingBareNamePropertyMethodsFunctions")
void should_set_allowComparingPrivateFields_value(Consumer<Boolean> setAllowExtractingBareNamePropertyMethodsFunction) {
// GIVEN
boolean extractBareNamePropertyMethods = !DEFAULT_EXTRACTING_BARE_NAME_PROPERTY_METHODS;
// WHEN
setAllowExtractingBareNamePropertyMethodsFunction.accept(extractBareNamePropertyMethods);
// THEN
then(Introspection.canExtractBareNamePropertyMethods()).isEqualTo(extractBareNamePropertyMethods);
}
private static Stream<Consumer<Boolean>> setAllowExtractingBareNamePropertyMethodsFunctions() {
return Stream.of(Assertions::setExtractBareNamePropertyMethods,
BDDAssertions::setExtractBareNamePropertyMethods,
withAssertions::setExtractBareNamePropertyMethods);
}
}
| EntryPointAssertions_setExtractBareNamePropertyMethods_Test |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/MyComponentTest.java | {
"start": 381,
"end": 620
} | class ____ {
@Inject
@ConfigProperty // name and default value are nonbinding
String myProperty;
@Test
public void testProperty() {
assertEquals("foo", myProperty);
}
@Singleton
static | MyComponentTest |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/program/rest/retry/WaitStrategy.java | {
"start": 1056,
"end": 1315
} | interface ____ {
/**
* Returns the time to wait until the next attempt. Attempts start at {@code 0}.
*
* @param attempt The number of the last attempt.
* @return Waiting time in ms.
*/
long sleepTime(long attempt);
}
| WaitStrategy |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/testdata/BanSerializableReadNegativeCases.java | {
"start": 1139,
"end": 5053
} | class ____ implements Serializable, Externalizable {
public final String hi = "hi";
public Integer testField;
// mostly a smoke test
public static void noCrimesHere() {
System.out.println(new BanSerializableReadNegativeCases().hi);
}
/**
* The checker has a special allowlist that allows classes to define methods called readObject --
* Java accepts these as an override to the default serialization behaviour. While we want to
* allow such methods to be defined, we don't want to allow these methods to be called.
*
* <p>this version has the checks suppressed
*
* @throws IOException
* @throws ClassNotFoundException
*/
@SuppressWarnings("BanSerializableRead")
public static final void directCall() throws IOException, ClassNotFoundException {
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
ObjectOutputStream serializer = new ObjectOutputStream(out);
ObjectInputStream deserializer = new ObjectInputStream(in);
BanSerializableReadPositiveCases self = new BanSerializableReadPositiveCases();
self.readObject(deserializer);
}
/**
* Says 'hi' by piping the string value unsafely through Object I/O stream. This one has the check
* suppressed, though
*
* @throws IOException
* @throws ClassNotFoundException
*/
@SuppressWarnings("BanSerializableRead")
public static void sayHi() throws IOException, ClassNotFoundException {
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
ObjectOutputStream serializer = new ObjectOutputStream(out);
ObjectInputStream deserializer = new ObjectInputStream(in);
serializer.writeObject(new BanSerializableReadPositiveCases());
serializer.close();
BanSerializableReadPositiveCases crime =
(BanSerializableReadPositiveCases) deserializer.readObject();
System.out.println(crime.hi);
}
// These test the more esoteric annotations
// code has gone through a security review
@SuppressWarnings("BanSerializableRead")
public static void directCall2() throws IOException, ClassNotFoundException {
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
ObjectOutputStream serializer = new ObjectOutputStream(out);
ObjectInputStream deserializer = new ObjectInputStream(in);
BanSerializableReadPositiveCases self = new BanSerializableReadPositiveCases();
self.readObject(deserializer);
}
// code is well-tested legacy
@SuppressWarnings("BanSerializableRead")
public static final void directCall3() throws IOException, ClassNotFoundException {
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
ObjectOutputStream serializer = new ObjectOutputStream(out);
ObjectInputStream deserializer = new ObjectInputStream(in);
BanSerializableReadPositiveCases self = new BanSerializableReadPositiveCases();
self.readObject(deserializer);
}
// code is for Android
@SuppressWarnings("BanSerializableRead")
public static final void directCall4() throws IOException, ClassNotFoundException {
PipedInputStream in = new PipedInputStream();
PipedOutputStream out = new PipedOutputStream(in);
ObjectOutputStream serializer = new ObjectOutputStream(out);
ObjectInputStream deserializer = new ObjectInputStream(in);
BanSerializableReadPositiveCases self = new BanSerializableReadPositiveCases();
self.readObject(deserializer);
}
// calls to readObject should themselves be excluded in a readObject method
void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
BanSerializableReadNegativeCases c = new BanSerializableReadNegativeCases();
c.readObject(ois);
ois.defaultReadObject();
}
private static | BanSerializableReadNegativeCases |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/interceptor/InterceptorTransactionEventTest.java | {
"start": 543,
"end": 1305
} | class ____ {
@Test
public void testTransactionEvents(SessionFactoryScope factoryScope) {
LoggingInterceptor interceptor = new LoggingInterceptor();
factoryScope.inTransaction(
(sf) -> sf.withOptions().interceptor( interceptor ).openSession(),
(s) -> {
// Do nothing, open and closing the transaction is enough
}
);
Assertions.assertTrue( interceptor.isAfterTransactionBeginCalled(), "afterTransactionBeginCalled not called" );
Assertions.assertTrue( interceptor.isAfterTransactionCompletionCalled(),
"afterTransactionCompletionCalled not called" );
Assertions.assertTrue( interceptor.isBeforeTransactionCompletionCalled(),
"beforeTransactionCompletionCalled not called" );
}
private static | InterceptorTransactionEventTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/annotation/WrongSetOfAnnotationsTest.java | {
"start": 1762,
"end": 4463
} | class ____ {
List<?> list;
}
@Test
public void should_not_allow_Mock_and_InjectMocks() {
assertThatThrownBy(
() -> {
MockitoAnnotations.openMocks(
new Object() {
@InjectMocks @Mock List<?> mock;
});
})
.isInstanceOf(MockitoException.class)
.hasMessage(
"This combination of annotations is not permitted on a single field:\n"
+ "@Mock and @InjectMocks");
}
@Test
public void should_not_allow_Captor_and_Mock() {
assertThatThrownBy(
() -> {
MockitoAnnotations.openMocks(
new Object() {
@Mock @Captor ArgumentCaptor<?> captor;
});
})
.isInstanceOf(MockitoException.class)
.hasMessageContainingAll(
"You cannot have more than one Mockito annotation on a field!",
"The field 'captor' has multiple Mockito annotations.",
"For info how to use annotations see examples in javadoc for MockitoAnnotations class.");
}
@Test
public void should_not_allow_Captor_and_Spy() {
assertThatThrownBy(
() -> {
MockitoAnnotations.openMocks(
new Object() {
@Spy @Captor ArgumentCaptor<?> captor;
});
})
.isInstanceOf(MockitoException.class)
.hasMessage(
"This combination of annotations is not permitted on a single field:\n"
+ "@Spy and @Captor");
}
@Test
public void should_not_allow_Captor_and_InjectMocks() {
assertThatThrownBy(
() -> {
MockitoAnnotations.openMocks(
new Object() {
@InjectMocks @Captor ArgumentCaptor<?> captor;
});
})
.isInstanceOf(MockitoException.class)
.hasMessage(
"This combination of annotations is not permitted on a single field:\n"
+ "@Captor and @InjectMocks");
}
}
| WithDependency |
java | apache__maven | its/core-it-support/core-it-plugins/maven-it-plugin-extension-provider/src/main/java/org/apache/maven/plugin/coreit/UpdateSingletonMojo.java | {
"start": 1514,
"end": 2791
} | class ____ extends AbstractMojo {
/**
*/
@Parameter(defaultValue = "provider")
private String key;
/**
*/
@Parameter(defaultValue = "passed")
private String value;
/**
*/
@Parameter
private File propertiesFile;
/**
*/
@Component
private StatefulSingleton singleton;
/**
* Runs this mojo.
*
* @throws MojoExecutionException If the output file could not be created.
*/
public void execute() throws MojoExecutionException {
getLog().info("[MAVEN-CORE-IT-LOG] Singleton Instance: " + System.identityHashCode(singleton));
getLog().info("[MAVEN-CORE-IT-LOG] Singleton Class Loader: "
+ singleton.getClass().getClassLoader());
getLog().info("[MAVEN-CORE-IT-LOG] Setting property " + key + " = " + value);
singleton.setProperty(key, value);
if (propertiesFile != null) {
getLog().info("[MAVEN-CORE-IT-LOG] Saving properties to " + propertiesFile);
try {
singleton.saveProperties(propertiesFile);
} catch (IOException e) {
throw new MojoExecutionException("Failed to save properties to " + propertiesFile, e);
}
}
}
}
| UpdateSingletonMojo |
java | apache__logging-log4j2 | log4j-appserver/src/main/java/org/apache/logging/log4j/appserver/tomcat/TomcatLogger.java | {
"start": 1491,
"end": 2317
} | class ____ well as the
* log4j-api and log4j-core jars must be added to Tomcat's boot classpath. This is most easily accomplished by
* placing these jars in a directory and then adding the contents of that directory to the CLASSPATH
* environment variable in setenv.sh in Tomcat's bin directory.
*
* The Log4j configuration file must also be present on the classpath. This implementation will use the
* first file it finds with one of the following file names: log4j2-tomcat.xml, log4j2-tomcat.json,
* log4j2-tomcat.yaml, log4j2-tomcat.yml, log4j2-tomcat.properties. Again, this can be accomplished by adding
* this file to a directory and then adding that directory to the CLASSPATH environment variable in setenv.sh.
*
* @since 2.10.0
*/
@ServiceProvider(value = Log.class, resolution = Resolution.OPTIONAL)
public | as |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/security/HttpUpgradePermissionsAllowedAnnotationTest.java | {
"start": 1239,
"end": 4873
} | class ____ extends SecurityTestBase {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Endpoint.class, WSClient.class, TestIdentityProvider.class, TestIdentityController.class,
AdminEndpoint.class, InclusiveEndpoint.class, MetaAnnotationEndpoint.class,
StringEndpointReadPermissionMetaAnnotation.class));
@TestHTTPResource("admin-end")
URI adminEndpointUri;
@TestHTTPResource("meta-annotation")
URI metaAnnotationEndpointUri;
@TestHTTPResource("inclusive-end")
URI inclusiveEndpointUri;
@Test
public void testInsufficientRights() {
try (WSClient client = new WSClient(vertx)) {
CompletionException ce = assertThrows(CompletionException.class,
() -> client.connect(basicAuth("user", "user"), adminEndpointUri));
Throwable root = ExceptionUtil.getRootCause(ce);
assertInstanceOf(UpgradeRejectedException.class, root);
assertTrue(root.getMessage().contains("403"));
}
try (WSClient client = new WSClient(vertx)) {
client.connect(basicAuth("admin", "admin"), adminEndpointUri);
client.waitForMessages(1);
assertEquals("ready", client.getMessages().get(0).toString());
client.sendAndAwait("hello");
client.waitForMessages(2);
assertEquals("hello", client.getMessages().get(1).toString());
}
}
@Test
public void testMetaAnnotation() {
try (WSClient client = new WSClient(vertx)) {
CompletionException ce = assertThrows(CompletionException.class,
() -> client.connect(basicAuth("user", "user"), metaAnnotationEndpointUri));
Throwable root = ExceptionUtil.getRootCause(ce);
assertInstanceOf(UpgradeRejectedException.class, root);
assertTrue(root.getMessage().contains("403"));
}
try (WSClient client = new WSClient(vertx)) {
client.connect(basicAuth("admin", "admin"), metaAnnotationEndpointUri);
client.waitForMessages(1);
assertEquals("ready", client.getMessages().get(0).toString());
client.sendAndAwait("hello");
client.waitForMessages(2);
assertEquals("hello", client.getMessages().get(1).toString());
}
}
@Test
public void testInclusivePermissions() {
Stream.of("admin", "user").forEach(name -> {
try (WSClient client = new WSClient(vertx)) {
CompletionException ce = assertThrows(CompletionException.class,
() -> client.connect(basicAuth(name, name), inclusiveEndpointUri));
Throwable root = ExceptionUtil.getRootCause(ce);
assertInstanceOf(UpgradeRejectedException.class, root);
assertTrue(root.getMessage().contains("403"));
}
});
try (WSClient client = new WSClient(vertx)) {
client.connect(basicAuth("almighty", "almighty"), inclusiveEndpointUri);
client.waitForMessages(1);
assertEquals("ready", client.getMessages().get(0).toString());
client.sendAndAwait("hello");
client.waitForMessages(2);
assertEquals("hello", client.getMessages().get(1).toString());
}
}
@PermissionsAllowed(value = { "perm1", "perm2" }, inclusive = true)
@WebSocket(path = "/inclusive-end")
public static | HttpUpgradePermissionsAllowedAnnotationTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainRefCountingTests.java | {
"start": 5434,
"end": 5764
} | class ____ extends TestAsyncActionFilter implements MappedActionFilter {
private TestAsyncMappedActionFilter(ThreadPool threadPool) {
super(threadPool);
}
@Override
public String actionName() {
return TYPE.name();
}
}
public static | TestAsyncMappedActionFilter |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Issue993.java | {
"start": 630,
"end": 893
} | class ____ {
@JSONField(name="student_name",ordinal = 0)
public String name;
@JSONField(name="student_age",ordinal = 1)
public int age;
@JSONField(name="student_grade",ordinal = 2)
public String grade;
}
}
| Student |
java | elastic__elasticsearch | libs/geo/src/test/java/org/elasticsearch/geometry/RectangleTests.java | {
"start": 857,
"end": 3192
} | class ____ extends BaseGeometryTestCase<Rectangle> {
@Override
protected Rectangle createTestInstance(boolean hasAlt) {
assumeFalse("3rd dimension is not supported yet", hasAlt);
return GeometryTestUtils.randomRectangle();
}
public void testBasicSerialization() throws IOException, ParseException {
GeometryValidator validator = GeographyValidator.instance(true);
assertEquals("BBOX (10.0, 20.0, 40.0, 30.0)", WellKnownText.toWKT(new Rectangle(10, 20, 40, 30)));
assertEquals(new Rectangle(10, 20, 40, 30), WellKnownText.fromWKT(validator, true, "BBOX (10.0, 20.0, 40.0, 30.0)"));
assertEquals("BBOX EMPTY", WellKnownText.toWKT(Rectangle.EMPTY));
assertEquals(Rectangle.EMPTY, WellKnownText.fromWKT(validator, true, "BBOX EMPTY)"));
}
public void testInitValidation() {
GeometryValidator validator = GeographyValidator.instance(true);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Rectangle(2, 3, 100, 1)));
assertEquals("invalid latitude 100.0; must be between -90.0 and 90.0", ex.getMessage());
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Rectangle(200, 3, 2, 1)));
assertEquals("invalid longitude 200.0; must be between -180.0 and 180.0", ex.getMessage());
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Rectangle(2, 3, 1, 2)));
assertEquals("max y cannot be less than min y", ex.getMessage());
ex = expectThrows(IllegalArgumentException.class, () -> validator.validate(new Rectangle(2, 3, 2, 1, 5, Double.NaN)));
assertEquals("only one z value is specified", ex.getMessage());
ex = expectThrows(
IllegalArgumentException.class,
() -> StandardValidator.instance(false).validate(new Rectangle(50, 10, 40, 30, 20, 60))
);
assertEquals("found Z value [20.0] but [ignore_z_value] parameter is [false]", ex.getMessage());
StandardValidator.instance(true).validate(new Rectangle(50, 10, 40, 30, 20, 60));
}
@Override
protected Rectangle mutateInstance(Rectangle instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| RectangleTests |
java | elastic__elasticsearch | x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java | {
"start": 6724,
"end": 61000
} | class ____ extends Expression {
private final int id;
public DummyBooleanExpression(Source source, int id) {
super(source, Collections.emptyList());
this.id = id;
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, DummyBooleanExpression::new, id);
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
throw new UnsupportedOperationException("this type of node doesn't have any children");
}
@Override
public Nullability nullable() {
return Nullability.FALSE;
}
@Override
public DataType dataType() {
return BOOLEAN;
}
@Override
public int hashCode() {
int h = getClass().hashCode();
h = 31 * h + id;
return h;
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return id == ((DummyBooleanExpression) obj).id;
}
}
private static Literal L(Object value) {
return of(value);
}
private static FieldAttribute getFieldAttribute() {
return TestUtils.getFieldAttribute("a");
}
//
// Constant folding
//
public void testConstantFolding() {
Expression exp = new Add(EMPTY, TWO, THREE);
assertTrue(exp.foldable());
Expression result = new ConstantFolding().rule(exp);
assertTrue(result instanceof Literal);
assertEquals(5, ((Literal) result).value());
// check now with an alias
result = new ConstantFolding().rule(new Alias(EMPTY, "a", exp));
assertEquals("a", Expressions.name(result));
assertEquals(Alias.class, result.getClass());
}
public void testConstantFoldingBinaryComparison() {
assertEquals(FALSE, new ConstantFolding().rule(greaterThanOf(TWO, THREE)).canonical());
assertEquals(FALSE, new ConstantFolding().rule(greaterThanOrEqualOf(TWO, THREE)).canonical());
assertEquals(FALSE, new ConstantFolding().rule(equalsOf(TWO, THREE)).canonical());
assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, THREE)).canonical());
assertEquals(FALSE, new ConstantFolding().rule(nullEqualsOf(TWO, NULL)).canonical());
assertEquals(TRUE, new ConstantFolding().rule(notEqualsOf(TWO, THREE)).canonical());
assertEquals(TRUE, new ConstantFolding().rule(lessThanOrEqualOf(TWO, THREE)).canonical());
assertEquals(TRUE, new ConstantFolding().rule(lessThanOf(TWO, THREE)).canonical());
}
public void testConstantFoldingBinaryLogic() {
assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, greaterThanOf(TWO, THREE), TRUE)).canonical());
assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, greaterThanOrEqualOf(TWO, THREE), TRUE)).canonical());
}
public void testConstantFoldingBinaryLogic_WithNullHandling() {
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, TRUE)).canonical().nullable());
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, TRUE, NULL)).canonical().nullable());
assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, NULL, FALSE)).canonical());
assertEquals(FALSE, new ConstantFolding().rule(new And(EMPTY, FALSE, NULL)).canonical());
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new And(EMPTY, NULL, NULL)).canonical().nullable());
assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, TRUE)).canonical());
assertEquals(TRUE, new ConstantFolding().rule(new Or(EMPTY, TRUE, NULL)).canonical());
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, FALSE)).canonical().nullable());
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, FALSE, NULL)).canonical().nullable());
assertEquals(Nullability.TRUE, new ConstantFolding().rule(new Or(EMPTY, NULL, NULL)).canonical().nullable());
}
public void testConstantFoldingRange() {
assertEquals(true, new ConstantFolding().rule(rangeOf(FIVE, FIVE, true, L(10), false)).fold());
assertEquals(false, new ConstantFolding().rule(rangeOf(FIVE, FIVE, false, L(10), false)).fold());
}
public void testConstantNot() {
assertEquals(FALSE, new ConstantFolding().rule(new Not(EMPTY, TRUE)));
assertEquals(TRUE, new ConstantFolding().rule(new Not(EMPTY, FALSE)));
}
public void testConstantFoldingLikes() {
assertEquals(TRUE, new ConstantFolding().rule(new Like(EMPTY, of("test_emp"), new LikePattern("test%", (char) 0))).canonical());
assertEquals(TRUE, new ConstantFolding().rule(new WildcardLike(EMPTY, of("test_emp"), new WildcardPattern("test*"))).canonical());
assertEquals(TRUE, new ConstantFolding().rule(new RLike(EMPTY, of("test_emp"), new RLikePattern("test.emp"))).canonical());
}
public void testArithmeticFolding() {
assertEquals(10, foldOperator(new Add(EMPTY, L(7), THREE)));
assertEquals(4, foldOperator(new Sub(EMPTY, L(7), THREE)));
assertEquals(21, foldOperator(new Mul(EMPTY, L(7), THREE)));
assertEquals(2, foldOperator(new Div(EMPTY, L(7), THREE)));
assertEquals(1, foldOperator(new Mod(EMPTY, L(7), THREE)));
}
private static Object foldOperator(BinaryOperator<?, ?, ?, ?> b) {
return ((Literal) new ConstantFolding().rule(b)).value();
}
//
// Logical simplifications
//
public void testLiteralsOnTheRight() {
Alias a = new Alias(EMPTY, "a", L(10));
Expression result = new LiteralsOnTheRight().rule(equalsOf(FIVE, a));
assertTrue(result instanceof Equals);
Equals eq = (Equals) result;
assertEquals(a, eq.left());
assertEquals(FIVE, eq.right());
a = new Alias(EMPTY, "a", L(10));
result = new LiteralsOnTheRight().rule(nullEqualsOf(FIVE, a));
assertTrue(result instanceof NullEquals);
NullEquals nullEquals = (NullEquals) result;
assertEquals(a, nullEquals.left());
assertEquals(FIVE, nullEquals.right());
}
public void testBoolSimplifyOr() {
BooleanSimplification simplification = new BooleanSimplification();
assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, TRUE)));
assertEquals(TRUE, simplification.rule(new Or(EMPTY, TRUE, DUMMY_EXPRESSION)));
assertEquals(TRUE, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, TRUE)));
assertEquals(FALSE, simplification.rule(new Or(EMPTY, FALSE, FALSE)));
assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, FALSE, DUMMY_EXPRESSION)));
assertEquals(DUMMY_EXPRESSION, simplification.rule(new Or(EMPTY, DUMMY_EXPRESSION, FALSE)));
}
public void testBoolSimplifyAnd() {
BooleanSimplification simplification = new BooleanSimplification();
assertEquals(TRUE, simplification.rule(new And(EMPTY, TRUE, TRUE)));
assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, TRUE, DUMMY_EXPRESSION)));
assertEquals(DUMMY_EXPRESSION, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, TRUE)));
assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, FALSE)));
assertEquals(FALSE, simplification.rule(new And(EMPTY, FALSE, DUMMY_EXPRESSION)));
assertEquals(FALSE, simplification.rule(new And(EMPTY, DUMMY_EXPRESSION, FALSE)));
}
public void testBoolCommonFactorExtraction() {
BooleanSimplification simplification = new BooleanSimplification();
Expression a1 = new DummyBooleanExpression(EMPTY, 1);
Expression a2 = new DummyBooleanExpression(EMPTY, 1);
Expression b = new DummyBooleanExpression(EMPTY, 2);
Expression c = new DummyBooleanExpression(EMPTY, 3);
Or actual = new Or(EMPTY, new And(EMPTY, a1, b), new And(EMPTY, a2, c));
And expected = new And(EMPTY, a1, new Or(EMPTY, b, c));
assertEquals(expected, simplification.rule(actual));
}
public void testBinaryComparisonSimplification() {
assertEquals(TRUE, new BinaryComparisonSimplification().rule(equalsOf(FIVE, FIVE)));
assertEquals(TRUE, new BinaryComparisonSimplification().rule(nullEqualsOf(FIVE, FIVE)));
assertEquals(TRUE, new BinaryComparisonSimplification().rule(nullEqualsOf(NULL, NULL)));
assertEquals(FALSE, new BinaryComparisonSimplification().rule(notEqualsOf(FIVE, FIVE)));
assertEquals(TRUE, new BinaryComparisonSimplification().rule(greaterThanOrEqualOf(FIVE, FIVE)));
assertEquals(TRUE, new BinaryComparisonSimplification().rule(lessThanOrEqualOf(FIVE, FIVE)));
assertEquals(FALSE, new BinaryComparisonSimplification().rule(greaterThanOf(FIVE, FIVE)));
assertEquals(FALSE, new BinaryComparisonSimplification().rule(lessThanOf(FIVE, FIVE)));
}
public void testNullEqualsWithNullLiteralBecomesIsNull() {
LiteralsOnTheRight swapLiteralsToRight = new LiteralsOnTheRight();
BinaryComparisonSimplification bcSimpl = new BinaryComparisonSimplification();
FieldAttribute fa = getFieldAttribute();
Source source = new Source(1, 10, "IS_NULL(a)");
Expression e = bcSimpl.rule((BinaryComparison) swapLiteralsToRight.rule(new NullEquals(source, fa, NULL, randomZone())));
assertEquals(IsNull.class, e.getClass());
IsNull isNull = (IsNull) e;
assertEquals(source, isNull.source());
e = bcSimpl.rule((BinaryComparison) swapLiteralsToRight.rule(new NullEquals(source, NULL, fa, randomZone())));
assertEquals(IsNull.class, e.getClass());
isNull = (IsNull) e;
assertEquals(source, isNull.source());
}
public void testBoolEqualsSimplificationOnExpressions() {
BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination();
Expression exp = new GreaterThan(EMPTY, getFieldAttribute(), L(0), null);
assertEquals(exp, s.rule(new Equals(EMPTY, exp, TRUE)));
assertEquals(new Not(EMPTY, exp), s.rule(new Equals(EMPTY, exp, FALSE)));
}
public void testBoolEqualsSimplificationOnFields() {
BooleanFunctionEqualsElimination s = new BooleanFunctionEqualsElimination();
FieldAttribute field = getFieldAttribute();
List<? extends BinaryComparison> comparisons = asList(
new Equals(EMPTY, field, TRUE),
new Equals(EMPTY, field, FALSE),
notEqualsOf(field, TRUE),
notEqualsOf(field, FALSE),
new Equals(EMPTY, NULL, TRUE),
new Equals(EMPTY, NULL, FALSE),
notEqualsOf(NULL, TRUE),
notEqualsOf(NULL, FALSE)
);
for (BinaryComparison comparison : comparisons) {
assertEquals(comparison, s.rule(comparison));
}
}
//
// Range optimization
//
// 6 < a <= 5 -> FALSE
public void testFoldExcludingRangeToFalse() {
FieldAttribute fa = getFieldAttribute();
Range r = rangeOf(fa, SIX, false, FIVE, true);
assertTrue(r.foldable());
assertEquals(Boolean.FALSE, r.fold());
}
// 6 < a <= 5.5 -> FALSE
public void testFoldExcludingRangeWithDifferentTypesToFalse() {
FieldAttribute fa = getFieldAttribute();
Range r = rangeOf(fa, SIX, false, L(5.5d), true);
assertTrue(r.foldable());
assertEquals(Boolean.FALSE, r.fold());
}
// Conjunction
public void testCombineBinaryComparisonsNotComparable() {
FieldAttribute fa = getFieldAttribute();
LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX);
LessThan lt = lessThanOf(fa, FALSE);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
And and = new And(EMPTY, lte, lt);
Expression exp = rule.rule(and);
assertEquals(exp, and);
}
// a <= 6 AND a < 5 -> a < 5
public void testCombineBinaryComparisonsUpper() {
FieldAttribute fa = getFieldAttribute();
LessThanOrEqual lte = lessThanOrEqualOf(fa, SIX);
LessThan lt = lessThanOf(fa, FIVE);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, lte, lt));
assertEquals(LessThan.class, exp.getClass());
LessThan r = (LessThan) exp;
assertEquals(FIVE, r.right());
}
// 6 <= a AND 5 < a -> 6 <= a
public void testCombineBinaryComparisonsLower() {
FieldAttribute fa = getFieldAttribute();
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, SIX);
GreaterThan gt = greaterThanOf(fa, FIVE);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, gte, gt));
assertEquals(GreaterThanOrEqual.class, exp.getClass());
GreaterThanOrEqual r = (GreaterThanOrEqual) exp;
assertEquals(SIX, r.right());
}
// 5 <= a AND 5 < a -> 5 < a
public void testCombineBinaryComparisonsInclude() {
FieldAttribute fa = getFieldAttribute();
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, FIVE);
GreaterThan gt = greaterThanOf(fa, FIVE);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, gte, gt));
assertEquals(GreaterThan.class, exp.getClass());
GreaterThan r = (GreaterThan) exp;
assertEquals(FIVE, r.right());
}
// 2 < a AND (2 <= a < 3) -> 2 < a < 3
public void testCombineBinaryComparisonsAndRangeLower() {
FieldAttribute fa = getFieldAttribute();
GreaterThan gt = greaterThanOf(fa, TWO);
Range range = rangeOf(fa, TWO, true, THREE, false);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, gt, range));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// a < 4 AND (1 < a < 3) -> 1 < a < 3
public void testCombineBinaryComparisonsAndRangeUpper() {
FieldAttribute fa = getFieldAttribute();
LessThan lt = lessThanOf(fa, FOUR);
Range range = rangeOf(fa, ONE, false, THREE, false);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, range, lt));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(ONE, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// a <= 2 AND (1 < a < 3) -> 1 < a <= 2
public void testCombineBinaryComparisonsAndRangeUpperEqual() {
FieldAttribute fa = getFieldAttribute();
LessThanOrEqual lte = lessThanOrEqualOf(fa, TWO);
Range range = rangeOf(fa, ONE, false, THREE, false);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, lte, range));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(ONE, r.lower());
assertFalse(r.includeLower());
assertEquals(TWO, r.upper());
assertTrue(r.includeUpper());
}
// 3 <= a AND 4 < a AND a <= 7 AND a < 6 -> 4 < a < 6
public void testCombineMultipleBinaryComparisons() {
FieldAttribute fa = getFieldAttribute();
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, THREE);
GreaterThan gt = greaterThanOf(fa, FOUR);
LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7));
LessThan lt = lessThanOf(fa, SIX);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, gt, new And(EMPTY, lt, lte))));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(FOUR, r.lower());
assertFalse(r.includeLower());
assertEquals(SIX, r.upper());
assertFalse(r.includeUpper());
}
// 3 <= a AND TRUE AND 4 < a AND a != 5 AND a <= 7 -> 4 < a <= 7 AND a != 5 AND TRUE
public void testCombineMixedMultipleBinaryComparisons() {
FieldAttribute fa = getFieldAttribute();
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, THREE);
GreaterThan gt = greaterThanOf(fa, FOUR);
LessThanOrEqual lte = lessThanOrEqualOf(fa, L(7));
Expression ne = new Not(EMPTY, equalsOf(fa, FIVE));
CombineBinaryComparisons rule = new CombineBinaryComparisons();
// TRUE AND a != 5 AND 4 < a <= 7
Expression exp = rule.rule(new And(EMPTY, gte, new And(EMPTY, TRUE, new And(EMPTY, gt, new And(EMPTY, ne, lte)))));
assertEquals(And.class, exp.getClass());
And and = ((And) exp);
assertEquals(Range.class, and.right().getClass());
Range r = (Range) and.right();
assertEquals(FOUR, r.lower());
assertFalse(r.includeLower());
assertEquals(L(7), r.upper());
assertTrue(r.includeUpper());
}
// 1 <= a AND a < 5 -> 1 <= a < 5
public void testCombineComparisonsIntoRange() {
FieldAttribute fa = getFieldAttribute();
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE);
LessThan lt = lessThanOf(fa, FIVE);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(new And(EMPTY, gte, lt));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(ONE, r.lower());
assertTrue(r.includeLower());
assertEquals(FIVE, r.upper());
assertFalse(r.includeUpper());
}
// 1 < a AND a < 3 AND 2 < b AND b < 4 AND c < 4 -> (1 < a < 3) AND (2 < b < 4) AND c < 4
public void testCombineMultipleComparisonsIntoRange() {
FieldAttribute fa = TestUtils.getFieldAttribute("a");
FieldAttribute fb = TestUtils.getFieldAttribute("b");
FieldAttribute fc = TestUtils.getFieldAttribute("c");
ZoneId zoneId = randomZone();
GreaterThan agt1 = new GreaterThan(EMPTY, fa, ONE, zoneId);
LessThan alt3 = new LessThan(EMPTY, fa, THREE, zoneId);
GreaterThan bgt2 = new GreaterThan(EMPTY, fb, TWO, zoneId);
LessThan blt4 = new LessThan(EMPTY, fb, FOUR, zoneId);
LessThan clt4 = new LessThan(EMPTY, fc, FOUR, zoneId);
Expression inputAnd = Predicates.combineAnd(asList(agt1, alt3, bgt2, blt4, clt4));
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression outputAnd = rule.rule((And) inputAnd);
Range agt1lt3 = new Range(EMPTY, fa, ONE, false, THREE, false, zoneId);
Range bgt2lt4 = new Range(EMPTY, fb, TWO, false, FOUR, false, zoneId);
// The actual outcome is (c < 4) AND (1 < a < 3) AND (2 < b < 4), due to the way the Expression types are combined in the Optimizer
Expression expectedAnd = Predicates.combineAnd(asList(clt4, agt1lt3, bgt2lt4));
assertTrue(outputAnd.semanticEquals(expectedAnd));
}
// (2 < a < 3) AND (1 < a < 4) -> (2 < a < 3)
public void testCombineBinaryComparisonsConjunctionOfIncludedRange() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, ONE, false, FOUR, false);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(r1, exp);
}
// (2 < a < 3) AND a < 2 -> 2 < a < 2
public void testCombineBinaryComparisonsConjunctionOfNonOverlappingBoundaries() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, ONE, false, TWO, false);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(TWO, r.upper());
assertFalse(r.includeUpper());
assertEquals(Boolean.FALSE, r.fold());
}
// (2 < a < 3) AND (2 < a <= 3) -> 2 < a < 3
public void testCombineBinaryComparisonsConjunctionOfUpperEqualsOverlappingBoundaries() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, TWO, false, THREE, true);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(r1, exp);
}
// (2 < a < 3) AND (1 < a < 3) -> 2 < a < 3
public void testCombineBinaryComparisonsConjunctionOverlappingUpperBoundary() {
FieldAttribute fa = getFieldAttribute();
Range r2 = rangeOf(fa, TWO, false, THREE, false);
Range r1 = rangeOf(fa, ONE, false, THREE, false);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(r2, exp);
}
// (2 < a <= 3) AND (1 < a < 3) -> 2 < a < 3
public void testCombineBinaryComparisonsConjunctionWithDifferentUpperLimitInclusion() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, ONE, false, THREE, false);
Range r2 = rangeOf(fa, TWO, false, THREE, true);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// (0 < a <= 1) AND (0 <= a < 2) -> 0 < a <= 1
public void testRangesOverlappingConjunctionNoLowerBoundary() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, L(0), false, ONE, true);
Range r2 = rangeOf(fa, L(0), true, TWO, false);
And and = new And(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(r1, exp);
}
// a != 2 AND 3 < a < 5 -> 3 < a < 5
public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt3Lt5() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
Range range = rangeOf(fa, THREE, false, FIVE, false);
And and = new And(EMPTY, range, neq);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(THREE, r.lower());
assertFalse(r.includeLower());
assertEquals(FIVE, r.upper());
assertFalse(r.includeUpper());
}
// a != 2 AND 0 < a < 1 -> 0 < a < 1
public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt0Lt1() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
Range range = rangeOf(fa, L(0), false, ONE, false);
And and = new And(EMPTY, neq, range);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(L(0), r.lower());
assertFalse(r.includeLower());
assertEquals(ONE, r.upper());
assertFalse(r.includeUpper());
}
// a != 2 AND 2 <= a < 3 -> 2 < a < 3
public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGte2Lt3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
Range range = rangeOf(fa, TWO, true, THREE, false);
And and = new And(EMPTY, neq, range);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// a != 3 AND 2 < a <= 3 -> 2 < a < 3
public void testCombineBinaryComparisonsConjunction_Neq3AndRangeGt2Lte3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, THREE);
Range range = rangeOf(fa, TWO, false, THREE, true);
And and = new And(EMPTY, neq, range);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// a != 2 AND 1 < a < 3
public void testCombineBinaryComparisonsConjunction_Neq2AndRangeGt1Lt3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
Range range = rangeOf(fa, ONE, false, THREE, false);
And and = new And(EMPTY, neq, range);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(And.class, exp.getClass()); // can't optimize
}
// a != 2 AND a > 3 -> a > 3
public void testCombineBinaryComparisonsConjunction_Neq2AndGt3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
GreaterThan gt = greaterThanOf(fa, THREE);
And and = new And(EMPTY, neq, gt);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(gt, exp);
}
// a != 2 AND a >= 2 -> a > 2
public void testCombineBinaryComparisonsConjunction_Neq2AndGte2() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO);
And and = new And(EMPTY, neq, gte);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(GreaterThan.class, exp.getClass());
GreaterThan gt = (GreaterThan) exp;
assertEquals(TWO, gt.right());
}
// a != 2 AND a >= 1 -> nop
public void testCombineBinaryComparisonsConjunction_Neq2AndGte1() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, ONE);
And and = new And(EMPTY, neq, gte);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(And.class, exp.getClass()); // can't optimize
}
// a != 2 AND a <= 3 -> nop
public void testCombineBinaryComparisonsConjunction_Neq2AndLte3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
LessThanOrEqual lte = lessThanOrEqualOf(fa, THREE);
And and = new And(EMPTY, neq, lte);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(and, exp); // can't optimize
}
// a != 2 AND a <= 2 -> a < 2
public void testCombineBinaryComparisonsConjunction_Neq2AndLte2() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
LessThanOrEqual lte = lessThanOrEqualOf(fa, TWO);
And and = new And(EMPTY, neq, lte);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(LessThan.class, exp.getClass());
LessThan lt = (LessThan) exp;
assertEquals(TWO, lt.right());
}
// a != 2 AND a <= 1 -> a <= 1
public void testCombineBinaryComparisonsConjunction_Neq2AndLte1() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, TWO);
LessThanOrEqual lte = lessThanOrEqualOf(fa, ONE);
And and = new And(EMPTY, neq, lte);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals(lte, exp);
}
// Disjunction
public void testCombineBinaryComparisonsDisjunctionNotComparable() {
FieldAttribute fa = getFieldAttribute();
GreaterThan gt1 = greaterThanOf(fa, ONE);
GreaterThan gt2 = greaterThanOf(fa, FALSE);
Or or = new Or(EMPTY, gt1, gt2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(exp, or);
}
// 2 < a OR 1 < a OR 3 < a -> 1 < a
public void testCombineBinaryComparisonsDisjunctionLowerBound() {
FieldAttribute fa = getFieldAttribute();
GreaterThan gt1 = greaterThanOf(fa, ONE);
GreaterThan gt2 = greaterThanOf(fa, TWO);
GreaterThan gt3 = greaterThanOf(fa, THREE);
Or or = new Or(EMPTY, gt1, new Or(EMPTY, gt2, gt3));
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(GreaterThan.class, exp.getClass());
GreaterThan gt = (GreaterThan) exp;
assertEquals(ONE, gt.right());
}
// 2 < a OR 1 < a OR 3 <= a -> 1 < a
public void testCombineBinaryComparisonsDisjunctionIncludeLowerBounds() {
FieldAttribute fa = getFieldAttribute();
GreaterThan gt1 = greaterThanOf(fa, ONE);
GreaterThan gt2 = greaterThanOf(fa, TWO);
GreaterThanOrEqual gte3 = greaterThanOrEqualOf(fa, THREE);
Or or = new Or(EMPTY, new Or(EMPTY, gt1, gt2), gte3);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(GreaterThan.class, exp.getClass());
GreaterThan gt = (GreaterThan) exp;
assertEquals(ONE, gt.right());
}
// a < 1 OR a < 2 OR a < 3 -> a < 3
public void testCombineBinaryComparisonsDisjunctionUpperBound() {
FieldAttribute fa = getFieldAttribute();
LessThan lt1 = lessThanOf(fa, ONE);
LessThan lt2 = lessThanOf(fa, TWO);
LessThan lt3 = lessThanOf(fa, THREE);
Or or = new Or(EMPTY, new Or(EMPTY, lt1, lt2), lt3);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(LessThan.class, exp.getClass());
LessThan lt = (LessThan) exp;
assertEquals(THREE, lt.right());
}
// a < 2 OR a <= 2 OR a < 1 -> a <= 2
public void testCombineBinaryComparisonsDisjunctionIncludeUpperBounds() {
FieldAttribute fa = getFieldAttribute();
LessThan lt1 = lessThanOf(fa, ONE);
LessThan lt2 = lessThanOf(fa, TWO);
LessThanOrEqual lte2 = lessThanOrEqualOf(fa, TWO);
Or or = new Or(EMPTY, lt2, new Or(EMPTY, lte2, lt1));
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(LessThanOrEqual.class, exp.getClass());
LessThanOrEqual lte = (LessThanOrEqual) exp;
assertEquals(TWO, lte.right());
}
// a < 2 OR 3 < a OR a < 1 OR 4 < a -> a < 2 OR 3 < a
public void testCombineBinaryComparisonsDisjunctionOfLowerAndUpperBounds() {
FieldAttribute fa = getFieldAttribute();
LessThan lt1 = lessThanOf(fa, ONE);
LessThan lt2 = lessThanOf(fa, TWO);
GreaterThan gt3 = greaterThanOf(fa, THREE);
GreaterThan gt4 = greaterThanOf(fa, FOUR);
Or or = new Or(EMPTY, new Or(EMPTY, lt2, gt3), new Or(EMPTY, lt1, gt4));
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(Or.class, exp.getClass());
Or ro = (Or) exp;
assertEquals(LessThan.class, ro.left().getClass());
LessThan lt = (LessThan) ro.left();
assertEquals(TWO, lt.right());
assertEquals(GreaterThan.class, ro.right().getClass());
GreaterThan gt = (GreaterThan) ro.right();
assertEquals(THREE, gt.right());
}
// (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4)
public void testCombineBinaryComparisonsDisjunctionOfIncludedRangeNotComparable() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, ONE, false, FALSE, false);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(or, exp);
}
// (2 < a < 3) OR (1 < a < 4) -> (1 < a < 4)
public void testCombineBinaryComparisonsDisjunctionOfIncludedRange() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, ONE, false, FOUR, false);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(ONE, r.lower());
assertFalse(r.includeLower());
assertEquals(FOUR, r.upper());
assertFalse(r.includeUpper());
}
// (2 < a < 3) OR (1 < a < 2) -> same
public void testCombineBinaryComparisonsDisjunctionOfNonOverlappingBoundaries() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, ONE, false, TWO, false);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(or, exp);
}
// (2 < a < 3) OR (2 < a <= 3) -> 2 < a <= 3
public void testCombineBinaryComparisonsDisjunctionOfUpperEqualsOverlappingBoundaries() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, TWO, false, THREE, false);
Range r2 = rangeOf(fa, TWO, false, THREE, true);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(r2, exp);
}
// (2 < a < 3) OR (1 < a < 3) -> 1 < a < 3
public void testCombineBinaryComparisonsOverlappingUpperBoundary() {
FieldAttribute fa = getFieldAttribute();
Range r2 = rangeOf(fa, TWO, false, THREE, false);
Range r1 = rangeOf(fa, ONE, false, THREE, false);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(r1, exp);
}
// (2 < a <= 3) OR (1 < a < 3) -> same (the <= prevents the ranges from being combined)
public void testCombineBinaryComparisonsWithDifferentUpperLimitInclusion() {
FieldAttribute fa = getFieldAttribute();
Range r1 = rangeOf(fa, ONE, false, THREE, false);
Range r2 = rangeOf(fa, TWO, false, THREE, true);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(or, exp);
}
// (a = 1 AND b = 3 AND c = 4) OR (a = 2 AND b = 3 AND c = 4) -> (b = 3 AND c = 4) AND (a = 1 OR a = 2)
public void testBooleanSimplificationCommonExpressionSubstraction() {
FieldAttribute fa = TestUtils.getFieldAttribute("a");
FieldAttribute fb = TestUtils.getFieldAttribute("b");
FieldAttribute fc = TestUtils.getFieldAttribute("c");
Expression a1 = equalsOf(fa, ONE);
Expression a2 = equalsOf(fa, TWO);
And common = new And(EMPTY, equalsOf(fb, THREE), equalsOf(fc, FOUR));
And left = new And(EMPTY, a1, common);
And right = new And(EMPTY, a2, common);
Or or = new Or(EMPTY, left, right);
Expression exp = new BooleanSimplification().rule(or);
assertEquals(new And(EMPTY, common, new Or(EMPTY, a1, a2)), exp);
}
// (0 < a <= 1) OR (0 < a < 2) -> 0 < a < 2
public void testRangesOverlappingNoLowerBoundary() {
FieldAttribute fa = getFieldAttribute();
Range r2 = rangeOf(fa, L(0), false, TWO, false);
Range r1 = rangeOf(fa, L(0), false, ONE, true);
Or or = new Or(EMPTY, r1, r2);
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(or);
assertEquals(r2, exp);
}
public void testBinaryComparisonAndOutOfRangeNotEqualsDifferentFields() {
FieldAttribute doubleOne = fieldAttribute("double", DOUBLE);
FieldAttribute doubleTwo = fieldAttribute("double2", DOUBLE);
FieldAttribute intOne = fieldAttribute("int", INTEGER);
FieldAttribute datetimeOne = fieldAttribute("datetime", INTEGER);
FieldAttribute keywordOne = fieldAttribute("keyword", KEYWORD);
FieldAttribute keywordTwo = fieldAttribute("keyword2", KEYWORD);
List<And> testCases = asList(
// double > 10 AND integer != -10
new And(EMPTY, greaterThanOf(doubleOne, L(10)), notEqualsOf(intOne, L(-10))),
// keyword > '5' AND keyword2 != '48'
new And(EMPTY, greaterThanOf(keywordOne, L("5")), notEqualsOf(keywordTwo, L("48"))),
// keyword != '2021' AND datetime <= '2020-12-04T17:48:22.954240Z'
new And(EMPTY, notEqualsOf(keywordOne, L("2021")), lessThanOrEqualOf(datetimeOne, L("2020-12-04T17:48:22.954240Z"))),
// double > 10.1 AND double2 != -10.1
new And(EMPTY, greaterThanOf(doubleOne, L(10.1d)), notEqualsOf(doubleTwo, L(-10.1d)))
);
for (And and : testCases) {
CombineBinaryComparisons rule = new CombineBinaryComparisons();
Expression exp = rule.rule(and);
assertEquals("Rule should not have transformed [" + and.nodeString() + "]", and, exp);
}
}
// Equals & NullEquals
// 1 <= a < 10 AND a == 1 -> a == 1
public void testEliminateRangeByEqualsInInterval() {
FieldAttribute fa = getFieldAttribute();
Equals eq1 = equalsOf(fa, ONE);
Range r = rangeOf(fa, ONE, true, L(10), false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, r));
assertEquals(eq1, exp);
}
// 1 <= a < 10 AND a <=> 1 -> a <=> 1
public void testEliminateRangeByNullEqualsInInterval() {
FieldAttribute fa = getFieldAttribute();
NullEquals eq1 = nullEqualsOf(fa, ONE);
Range r = rangeOf(fa, ONE, true, L(10), false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, r));
assertEquals(eq1, exp);
}
// The following tests should work only to simplify filters and
// not if the expressions are part of a projection
// See: https://github.com/elastic/elasticsearch/issues/35859
// a == 1 AND a == 2 -> FALSE
public void testDualEqualsConjunction() {
FieldAttribute fa = getFieldAttribute();
Equals eq1 = equalsOf(fa, ONE);
Equals eq2 = equalsOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, eq2));
assertEquals(FALSE, exp);
}
// a <=> 1 AND a <=> 2 -> FALSE
public void testDualNullEqualsConjunction() {
FieldAttribute fa = getFieldAttribute();
NullEquals eq1 = nullEqualsOf(fa, ONE);
NullEquals eq2 = nullEqualsOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, eq2));
assertEquals(FALSE, exp);
}
// 1 < a < 10 AND a == 10 -> FALSE
public void testEliminateRangeByEqualsOutsideInterval() {
FieldAttribute fa = getFieldAttribute();
Equals eq1 = equalsOf(fa, L(10));
Range r = rangeOf(fa, ONE, false, L(10), false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, r));
assertEquals(FALSE, exp);
}
// 1 < a < 10 AND a <=> 10 -> FALSE
public void testEliminateRangeByNullEqualsOutsideInterval() {
FieldAttribute fa = getFieldAttribute();
NullEquals eq1 = nullEqualsOf(fa, L(10));
Range r = rangeOf(fa, ONE, false, L(10), false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq1, r));
assertEquals(FALSE, exp);
}
// a != 3 AND a = 3 -> FALSE
public void testPropagateEquals_VarNeq3AndVarEq3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, THREE);
Equals eq = equalsOf(fa, THREE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, neq, eq));
assertEquals(FALSE, exp);
}
// a != 4 AND a = 3 -> a = 3
public void testPropagateEquals_VarNeq4AndVarEq3() {
FieldAttribute fa = getFieldAttribute();
NotEquals neq = notEqualsOf(fa, FOUR);
Equals eq = equalsOf(fa, THREE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, neq, eq));
assertEquals(Equals.class, exp.getClass());
assertEquals(eq, exp);
}
// a = 2 AND a < 2 -> FALSE
public void testPropagateEquals_VarEq2AndVarLt2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
LessThan lt = lessThanOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, lt));
assertEquals(FALSE, exp);
}
// a = 2 AND a <= 2 -> a = 2
public void testPropagateEquals_VarEq2AndVarLte2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
LessThanOrEqual lt = lessThanOrEqualOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, lt));
assertEquals(eq, exp);
}
// a = 2 AND a <= 1 -> FALSE
public void testPropagateEquals_VarEq2AndVarLte1() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
LessThanOrEqual lt = lessThanOrEqualOf(fa, ONE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, lt));
assertEquals(FALSE, exp);
}
// a = 2 AND a > 2 -> FALSE
public void testPropagateEquals_VarEq2AndVarGt2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
GreaterThan gt = greaterThanOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, gt));
assertEquals(FALSE, exp);
}
// a = 2 AND a >= 2 -> a = 2
public void testPropagateEquals_VarEq2AndVarGte2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
GreaterThanOrEqual gte = greaterThanOrEqualOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, gte));
assertEquals(eq, exp);
}
// a = 2 AND a > 3 -> FALSE
public void testPropagateEquals_VarEq2AndVarLt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
GreaterThan gt = greaterThanOf(fa, THREE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new And(EMPTY, eq, gt));
assertEquals(FALSE, exp);
}
// a = 2 AND a < 3 AND a > 1 AND a != 4 -> a = 2
public void testPropagateEquals_VarEq2AndVarLt3AndVarGt1AndVarNeq4() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
LessThan lt = lessThanOf(fa, THREE);
GreaterThan gt = greaterThanOf(fa, ONE);
NotEquals neq = notEqualsOf(fa, FOUR);
PropagateEquals rule = new PropagateEquals();
Expression and = Predicates.combineAnd(asList(eq, lt, gt, neq));
Expression exp = rule.rule((And) and);
assertEquals(eq, exp);
}
// a = 2 AND 1 < a < 3 AND a > 0 AND a != 4 -> a = 2
public void testPropagateEquals_VarEq2AndVarRangeGt1Lt3AndVarGt0AndVarNeq4() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
Range range = rangeOf(fa, ONE, false, THREE, false);
GreaterThan gt = greaterThanOf(fa, L(0));
NotEquals neq = notEqualsOf(fa, FOUR);
PropagateEquals rule = new PropagateEquals();
Expression and = Predicates.combineAnd(asList(eq, range, gt, neq));
Expression exp = rule.rule((And) and);
assertEquals(eq, exp);
}
// a = 2 OR a > 1 -> a > 1
public void testPropagateEquals_VarEq2OrVarGt1() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
GreaterThan gt = greaterThanOf(fa, ONE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, gt));
assertEquals(gt, exp);
}
// a = 2 OR a > 2 -> a >= 2
public void testPropagateEquals_VarEq2OrVarGte2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
GreaterThan gt = greaterThanOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, gt));
assertEquals(GreaterThanOrEqual.class, exp.getClass());
GreaterThanOrEqual gte = (GreaterThanOrEqual) exp;
assertEquals(TWO, gte.right());
}
// a = 2 OR a < 3 -> a < 3
public void testPropagateEquals_VarEq2OrVarLt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
LessThan lt = lessThanOf(fa, THREE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, lt));
assertEquals(lt, exp);
}
// a = 3 OR a < 3 -> a <= 3
public void testPropagateEquals_VarEq3OrVarLt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, THREE);
LessThan lt = lessThanOf(fa, THREE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, lt));
assertEquals(LessThanOrEqual.class, exp.getClass());
LessThanOrEqual lte = (LessThanOrEqual) exp;
assertEquals(THREE, lte.right());
}
// a = 2 OR 1 < a < 3 -> 1 < a < 3
public void testPropagateEquals_VarEq2OrVarRangeGt1Lt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
Range range = rangeOf(fa, ONE, false, THREE, false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, range));
assertEquals(range, exp);
}
// a = 2 OR 2 < a < 3 -> 2 <= a < 3
public void testPropagateEquals_VarEq2OrVarRangeGt2Lt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
Range range = rangeOf(fa, TWO, false, THREE, false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, range));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertTrue(r.includeLower());
assertEquals(THREE, r.upper());
assertFalse(r.includeUpper());
}
// a = 3 OR 2 < a < 3 -> 2 < a <= 3
public void testPropagateEquals_VarEq3OrVarRangeGt2Lt3() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, THREE);
Range range = rangeOf(fa, TWO, false, THREE, false);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, range));
assertEquals(Range.class, exp.getClass());
Range r = (Range) exp;
assertEquals(TWO, r.lower());
assertFalse(r.includeLower());
assertEquals(THREE, r.upper());
assertTrue(r.includeUpper());
}
// a = 2 OR a != 2 -> TRUE
public void testPropagateEquals_VarEq2OrVarNeq2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
NotEquals neq = notEqualsOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, neq));
assertEquals(TRUE, exp);
}
// a = 2 OR a != 5 -> a != 5
public void testPropagateEquals_VarEq2OrVarNeq5() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
NotEquals neq = notEqualsOf(fa, FIVE);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(new Or(EMPTY, eq, neq));
assertEquals(NotEquals.class, exp.getClass());
NotEquals ne = (NotEquals) exp;
assertEquals(FIVE, ne.right());
}
// a = 2 OR 3 < a < 4 OR a > 2 OR a!= 2 -> TRUE
public void testPropagateEquals_VarEq2OrVarRangeGt3Lt4OrVarGt2OrVarNe2() {
FieldAttribute fa = getFieldAttribute();
Equals eq = equalsOf(fa, TWO);
Range range = rangeOf(fa, THREE, false, FOUR, false);
GreaterThan gt = greaterThanOf(fa, TWO);
NotEquals neq = notEqualsOf(fa, TWO);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule((Or) Predicates.combineOr(asList(eq, range, neq, gt)));
assertEquals(TRUE, exp);
}
// a == 1 AND a == 2 -> nop for date/time fields
public void testPropagateEquals_ignoreDateTimeFields() {
FieldAttribute fa = TestUtils.getFieldAttribute("a", DataTypes.DATETIME);
Equals eq1 = equalsOf(fa, ONE);
Equals eq2 = equalsOf(fa, TWO);
And and = new And(EMPTY, eq1, eq2);
PropagateEquals rule = new PropagateEquals();
Expression exp = rule.rule(and);
assertEquals(and, exp);
}
//
// Like / Regex
//
public void testMatchAllLikeToExist() throws Exception {
for (String s : asList("%", "%%", "%%%")) {
LikePattern pattern = new LikePattern(s, (char) 0);
FieldAttribute fa = getFieldAttribute();
Like l = new Like(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(IsNotNull.class, e.getClass());
IsNotNull inn = (IsNotNull) e;
assertEquals(fa, inn.field());
}
}
public void testMatchAllWildcardLikeToExist() throws Exception {
for (String s : asList("*", "**", "***")) {
WildcardPattern pattern = new WildcardPattern(s);
FieldAttribute fa = getFieldAttribute();
WildcardLike l = new WildcardLike(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(IsNotNull.class, e.getClass());
IsNotNull inn = (IsNotNull) e;
assertEquals(fa, inn.field());
}
}
public void testMatchAllRLikeToExist() throws Exception {
RLikePattern pattern = new RLikePattern(".*");
FieldAttribute fa = getFieldAttribute();
RLike l = new RLike(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(IsNotNull.class, e.getClass());
IsNotNull inn = (IsNotNull) e;
assertEquals(fa, inn.field());
}
public void testExactMatchLike() throws Exception {
for (String s : asList("ab", "ab0%", "ab0_c")) {
LikePattern pattern = new LikePattern(s, '0');
FieldAttribute fa = getFieldAttribute();
Like l = new Like(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(Equals.class, e.getClass());
Equals eq = (Equals) e;
assertEquals(fa, eq.left());
assertEquals(s.replace("0", StringUtils.EMPTY), eq.right().fold());
}
}
public void testExactMatchWildcardLike() throws Exception {
String s = "ab";
WildcardPattern pattern = new WildcardPattern(s);
FieldAttribute fa = getFieldAttribute();
WildcardLike l = new WildcardLike(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(Equals.class, e.getClass());
Equals eq = (Equals) e;
assertEquals(fa, eq.left());
assertEquals(s, eq.right().fold());
}
public void testExactMatchRLike() throws Exception {
RLikePattern pattern = new RLikePattern("abc");
FieldAttribute fa = getFieldAttribute();
RLike l = new RLike(EMPTY, fa, pattern);
Expression e = new ReplaceRegexMatch().rule(l);
assertEquals(Equals.class, e.getClass());
Equals eq = (Equals) e;
assertEquals(fa, eq.left());
assertEquals("abc", eq.right().fold());
}
//
// CombineDisjunction in Equals
//
// CombineDisjunctionsToIn with shouldValidateIn as true
private final | DummyBooleanExpression |
java | apache__avro | lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java | {
"start": 1834,
"end": 5383
} | class ____ {
private static final String RECORD_A = "RecordA";
private static final String FIELD_A = "fieldA";
private static final char LATIN_SMALL_LETTER_O_WITH_DIARESIS = '\u00F6';
private static final Schema DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().doubleType().noDefault() //
.endRecord();
private static final Schema FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().floatType().noDefault() //
.endRecord();
private static final Schema LONG_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().longType().noDefault() //
.endRecord();
private static final Schema INT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().intType().noDefault() //
.endRecord();
private static final Schema UNION_INT_LONG_FLOAT_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().doubleType().and().floatType().and().longType().and().intType().endUnion()
.noDefault() //
.endRecord();
private static final Schema STRING_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().stringType().noDefault() //
.endRecord();
private static final Schema BYTES_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().bytesType().noDefault() //
.endRecord();
private static final Schema UNION_STRING_BYTES_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().stringType().and().bytesType().endUnion().noDefault() //
.endRecord();
private static final Schema ENUM_AB = SchemaBuilder.enumeration("Enum1").symbols("A", "B");
private static final Schema ENUM_AB_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type(ENUM_AB).noDefault() //
.endRecord();
private static final Schema ENUM_ABC = SchemaBuilder.enumeration("Enum1").symbols("A", "B", "C");
private static final Schema ENUM_ABC_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type(ENUM_ABC).noDefault() //
.endRecord();
private static final Schema UNION_INT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().intType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_LONG_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().longType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().doubleType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_LONG_FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().and().longType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_FLOAT_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().and().doubleType().endUnion().noDefault() //
.endRecord();
| TestReadingWritingDataInEvolvedSchemas |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/commit/SuccessData.java | {
"start": 1410,
"end": 4870
} | class ____ implements Serializer {
private static final JsonCodec<SuccessData> CODEC = new JsonCodec<>(SuccessData.class);
private String name;
private boolean success = true;
private long timestamp;
private String date;
private String hostname;
private String committer;
private String description;
private String jobId;
// Filenames in the commit.
private final List<String> filenames = new ArrayList<>();
// Diagnostics information.
private final Map<String, String> diagnostics = new HashMap<>();
// No-arg constructor for json serializer, Don't use.
public SuccessData() {
}
public SuccessData(Builder builder) {
this.name = builder.name;
this.success = builder.success;
this.timestamp = builder.timestamp;
this.date = builder.date;
this.hostname = builder.hostname;
this.committer = builder.committer;
this.description = builder.description;
this.jobId = builder.jobId;
this.filenames.addAll(builder.filenames);
}
public String name() {
return name;
}
public boolean success() {
return success;
}
public long timestamp() {
return timestamp;
}
public String date() {
return date;
}
public String hostname() {
return hostname;
}
public String committer() {
return committer;
}
public String description() {
return description;
}
public String jobId() {
return jobId;
}
public Map<String, String> diagnostics() {
return diagnostics;
}
public List<String> filenames() {
return filenames;
}
public void recordJobFailure(Throwable thrown) {
this.success = false;
String stacktrace = Throwables.getStackTraceAsString(thrown);
addDiagnosticInfo("exception", thrown.toString());
addDiagnosticInfo("stacktrace", stacktrace);
}
public void addDiagnosticInfo(String key, String value) {
diagnostics.put(key, value);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("name", name)
.add("success", success)
.add("timestamp", timestamp)
.add("date", date)
.add("hostname", hostname)
.add("committer", committer)
.add("description", description)
.add("jobId", jobId)
.add("filenames", StringUtils.join(",", filenames))
.toString();
}
public static Builder builder() {
return new Builder();
}
@Override
public byte[] serialize() throws IOException {
return CODEC.toBytes(this);
}
public static SuccessData deserialize(byte[] data) throws IOException {
return CODEC.fromBytes(data);
}
@Override
public int hashCode() {
return Objects.hash(name, success, timestamp, date, hostname, committer, description, jobId,
filenames);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof SuccessData)) {
return false;
}
SuccessData that = (SuccessData) o;
return Objects.equals(name, that.name)
&& Objects.equals(success, that.success)
&& Objects.equals(timestamp, that.timestamp)
&& Objects.equals(date, that.date)
&& Objects.equals(hostname, that.hostname)
&& Objects.equals(committer, that.committer)
&& Objects.equals(description, that.description)
&& Objects.equals(jobId, that.jobId)
&& Objects.equals(filenames, that.filenames);
}
public static | SuccessData |
java | apache__flink | flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/src/FileSourceHeavyThroughputTest.java | {
"start": 5250,
"end": 5763
} | class ____ extends SimpleStreamFormat<byte[]> {
private static final long serialVersionUID = 1L;
@Override
public Reader<byte[]> createReader(Configuration config, FSDataInputStream stream)
throws IOException {
return new ArrayReader(stream);
}
@Override
public TypeInformation<byte[]> getProducedType() {
return PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO;
}
}
private static final | ArrayReaderFormat |
java | google__dagger | javatests/dagger/functional/producers/subcomponent/UsesProducerModuleSubcomponents.java | {
"start": 2228,
"end": 2322
} | interface ____ {}
@ProducerModule(includes = ProducerModuleWithSubcomponents.class)
| FromChild |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/json/RedisJsonIntegrationTests.java | {
"start": 1405,
"end": 25557
} | class ____ {
private static final String BIKES_INVENTORY = "bikes:inventory";
private static final String BIKE_COLORS_V1 = "..mountain_bikes[1].colors";
private static final String BIKE_COLORS_V2 = "$..mountain_bikes[1].colors";
private static final String MOUNTAIN_BIKES_V1 = "..mountain_bikes";
private static final String MOUNTAIN_BIKES_V2 = "$..mountain_bikes";
protected static RedisClient client;
protected static RedisCommands<String, String> redis;
public RedisJsonIntegrationTests() {
RedisURI redisURI = RedisURI.Builder.redis("127.0.0.1").withPort(16379).build();
client = RedisClient.create(redisURI);
redis = client.connect().sync();
}
@BeforeEach
public void prepare() throws IOException {
redis.flushall();
Path path = Paths.get("src/test/resources/bike-inventory.json");
String read = String.join("", Files.readAllLines(path));
JsonValue value = redis.getJsonParser().createJsonValue(read);
redis.jsonSet("bikes:inventory", JsonPath.ROOT_PATH, value);
}
@AfterAll
static void teardown() {
if (client != null) {
client.shutdown();
}
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonArrappend(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonValue element = parser.createJsonValue("\"{id:bike6}\"");
List<Long> appendedElements = redis.jsonArrappend(BIKES_INVENTORY, myPath, element);
assertThat(appendedElements).hasSize(1);
assertThat(appendedElements.get(0)).isEqualTo(4);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonArrappendStringOverload(String path) {
JsonPath myPath = JsonPath.of(path);
List<Long> appendedElements = redis.jsonArrappend(BIKES_INVENTORY, myPath, "\"{id:bike7}\"");
assertThat(appendedElements).hasSize(1);
assertThat(appendedElements.get(0)).isEqualTo(4);
// Verify appended content
List<JsonValue> value = redis.jsonGet(BIKES_INVENTORY, myPath);
assertThat(value).hasSize(1);
if (path.startsWith("$")) {
JsonArray matches = value.get(0).asJsonArray();
assertThat(matches.size()).isEqualTo(1);
JsonArray arr = matches.get(0).asJsonArray();
String last = arr.get(arr.size() - 1).toString();
assertThat(last).isEqualTo("\"{id:bike7}\"");
} else {
JsonArray arr = value.get(0).asJsonArray();
String last = arr.get(arr.size() - 1).toString();
assertThat(last).isEqualTo("\"{id:bike7}\"");
}
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { BIKE_COLORS_V1, BIKE_COLORS_V2 })
void jsonArrindex(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonValue element = parser.createJsonValue("\"white\"");
List<Long> arrayIndex = redis.jsonArrindex(BIKES_INVENTORY, myPath, element);
assertThat(arrayIndex).isNotNull();
assertThat(arrayIndex).hasSize(1);
assertThat(arrayIndex.get(0).longValue()).isEqualTo(1L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { BIKE_COLORS_V1, BIKE_COLORS_V2 })
void jsonArrinsert(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonValue element = parser.createJsonValue("\"ultramarine\"");
List<Long> arrayIndex = redis.jsonArrinsert(BIKES_INVENTORY, myPath, 1, element);
assertThat(arrayIndex).isNotNull();
assertThat(arrayIndex).hasSize(1);
assertThat(arrayIndex.get(0).longValue()).isEqualTo(3L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonArrlen(String path) {
JsonPath myPath = JsonPath.of(path);
List<Long> poppedJson = redis.jsonArrlen(BIKES_INVENTORY, myPath);
assertThat(poppedJson).hasSize(1);
assertThat(poppedJson.get(0).longValue()).isEqualTo(3);
}
@Test
void jsonArrLenAsyncAndReactive() throws ExecutionException, InterruptedException {
RedisAsyncCommands<String, String> asyncCommands = client.connect().async();
RedisReactiveCommands<String, String> reactiveCommands = client.connect().reactive();
JsonPath myPath = JsonPath.of(MOUNTAIN_BIKES_V1);
List<Long> poppedJson = asyncCommands.jsonArrlen(BIKES_INVENTORY, myPath).get();
assertThat(poppedJson).hasSize(1);
assertThat(poppedJson.get(0).longValue()).isEqualTo(3);
StepVerifier.create(reactiveCommands.jsonArrlen(BIKES_INVENTORY, myPath)).consumeNextWith(actual -> {
assertThat(actual).isEqualTo(3);
}).verifyComplete();
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonArrpop(String path) {
JsonPath myPath = JsonPath.of(path);
List<JsonValue> poppedJson = redis.jsonArrpop(BIKES_INVENTORY, myPath);
assertThat(poppedJson).hasSize(1);
assertThat(poppedJson.get(0).toString()).contains(
"{\"id\":\"bike:3\",\"model\":\"Weywot\",\"description\":\"This bike gives kids aged six years and old");
}
@Test
public void jsonArrpopEmptyArray() {
JsonValue value = redis.getJsonParser().createJsonValue("[\"one\"]");
redis.jsonSet("myKey", JsonPath.ROOT_PATH, value);
List<JsonValue> result = redis.jsonArrpop("myKey");
assertThat(result.toString()).isEqualTo("[\"one\"]");
assertThat(redis.jsonGet("myKey").get(0).toString()).isEqualTo("[]");
}
@Test
public void jsonArrpopWithRootPathAndIndex() {
JsonValue value = redis.getJsonParser().createJsonValue("[\"one\",\"two\",\"three\"]");
redis.jsonSet("myKey", JsonPath.ROOT_PATH, value);
List<JsonValue> result = redis.jsonArrpop("myKey", JsonPath.ROOT_PATH, 1);
assertThat(result.toString()).isEqualTo("[\"two\"]");
assertThat(redis.jsonGet("myKey").get(0).toString()).isEqualTo("[\"one\",\"three\"]");
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { BIKE_COLORS_V1, BIKE_COLORS_V2 })
void jsonArrtrim(String path) {
JsonPath myPath = JsonPath.of(path);
JsonRangeArgs range = JsonRangeArgs.Builder.start(1).stop(2);
List<Long> arrayIndex = redis.jsonArrtrim(BIKES_INVENTORY, myPath, range);
assertThat(arrayIndex).isNotNull();
assertThat(arrayIndex).hasSize(1);
assertThat(arrayIndex.get(0).longValue()).isEqualTo(1L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { BIKE_COLORS_V1, BIKE_COLORS_V2 })
void jsonClear(String path) {
JsonPath myPath = JsonPath.of(path);
Long result = redis.jsonClear(BIKES_INVENTORY, myPath);
assertThat(result).isNotNull();
assertThat(result).isEqualTo(1L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { "..mountain_bikes[0:2].model", "$..mountain_bikes[0:2].model" })
void jsonGet(String path) {
JsonPath myPath = JsonPath.of(path);
// Verify codec parsing
List<JsonValue> value = redis.jsonGet(BIKES_INVENTORY, myPath);
assertThat(value).hasSize(1);
if (path.startsWith("$")) {
assertThat(value.get(0).toString()).isEqualTo("[\"Phoebe\",\"Quaoar\"]");
// Verify array parsing
assertThat(value.get(0).isJsonArray()).isTrue();
assertThat(value.get(0).asJsonArray().size()).isEqualTo(2);
assertThat(value.get(0).asJsonArray().asList().get(0).toString()).isEqualTo("\"Phoebe\"");
assertThat(value.get(0).asJsonArray().asList().get(1).toString()).isEqualTo("\"Quaoar\"");
// Verify String parsing
assertThat(value.get(0).asJsonArray().asList().get(0).isString()).isTrue();
assertThat(value.get(0).asJsonArray().asList().get(0).asString()).isEqualTo("Phoebe");
assertThat(value.get(0).asJsonArray().asList().get(1).isString()).isTrue();
assertThat(value.get(0).asJsonArray().asList().get(1).isNull()).isFalse();
assertThat(value.get(0).asJsonArray().asList().get(1).asString()).isEqualTo("Quaoar");
} else {
assertThat(value.get(0).toString()).isEqualTo("\"Phoebe\"");
// Verify array parsing
assertThat(value.get(0).isString()).isTrue();
assertThat(value.get(0).asString()).isEqualTo("Phoebe");
}
}
@Test
void jsonGetNull() {
JsonPath myPath = JsonPath.of("$..inventory.owner");
// Verify codec parsing
List<JsonValue> value = redis.jsonGet(BIKES_INVENTORY, myPath);
assertThat(value).hasSize(1);
assertThat(value.get(0).toString()).isEqualTo("[null]");
// Verify array parsing
assertThat(value.get(0).isJsonArray()).isTrue();
assertThat(value.get(0).asJsonArray().size()).isEqualTo(1);
assertThat(value.get(0).asJsonArray().asList().get(0).toString()).isEqualTo("null");
assertThat(value.get(0).asJsonArray().asList().get(0).isNull()).isTrue();
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1 + "[1]", MOUNTAIN_BIKES_V2 + "[1]" })
void jsonMerge(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonValue element = parser.createJsonValue("\"ultramarine\"");
String result = redis.jsonMerge(BIKES_INVENTORY, myPath, element);
assertThat(result).isNotNull();
assertThat(result).isEqualTo("OK");
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { "..model", "$..model" })
void jsonMGet(String path) {
JsonPath myPath = JsonPath.of(path);
List<JsonValue> value = redis.jsonMGet(myPath, BIKES_INVENTORY);
assertThat(value).hasSize(1);
if (path.startsWith("$")) {
assertThat(value.get(0).toString()).isEqualTo("[\"Phoebe\",\"Quaoar\",\"Weywot\"]");
} else {
assertThat(value.get(0).toString()).isEqualTo("\"Phoebe\"");
}
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1 + "[1]", MOUNTAIN_BIKES_V2 + "[1]" })
void jsonMset(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonObject bikeRecord = parser.createJsonObject();
JsonObject bikeSpecs = parser.createJsonObject();
JsonArray bikeColors = parser.createJsonArray();
bikeSpecs.put("material", parser.createJsonValue("\"composite\""));
bikeSpecs.put("weight", parser.createJsonValue("11"));
bikeColors.add(parser.createJsonValue("\"yellow\""));
bikeColors.add(parser.createJsonValue("\"orange\""));
bikeRecord.put("id", parser.createJsonValue("\"bike:43\""));
bikeRecord.put("model", parser.createJsonValue("\"DesertFox\""));
bikeRecord.put("description", parser.createJsonValue("\"The DesertFox is a versatile bike for all terrains\""));
bikeRecord.put("price", parser.createJsonValue("\"1299\""));
bikeRecord.put("specs", bikeSpecs);
bikeRecord.put("colors", bikeColors);
JsonMsetArgs<String, String> args1 = new JsonMsetArgs<>(BIKES_INVENTORY, myPath, bikeRecord);
bikeRecord = parser.createJsonObject();
bikeSpecs = parser.createJsonObject();
bikeColors = parser.createJsonArray();
bikeSpecs.put("material", parser.createJsonValue("\"wood\""));
bikeSpecs.put("weight", parser.createJsonValue("19"));
bikeColors.add(parser.createJsonValue("\"walnut\""));
bikeColors.add(parser.createJsonValue("\"chestnut\""));
bikeRecord.put("id", parser.createJsonValue("\"bike:13\""));
bikeRecord.put("model", parser.createJsonValue("\"Woody\""));
bikeRecord.put("description", parser.createJsonValue("\"The Woody is an environmentally-friendly wooden bike\""));
bikeRecord.put("price", parser.createJsonValue("\"1112\""));
bikeRecord.put("specs", bikeSpecs);
bikeRecord.put("colors", bikeColors);
JsonMsetArgs<String, String> args2 = new JsonMsetArgs<>(BIKES_INVENTORY, myPath, bikeRecord);
List<JsonMsetArgs<String, String>> args = Arrays.asList(args1, args2);
String result = redis.jsonMSet(args);
assertThat(result).isNotNull();
assertThat(result).isEqualTo("OK");
JsonValue value = redis.jsonGet(BIKES_INVENTORY, JsonPath.ROOT_PATH).get(0);
assertThat(value).isNotNull();
assertThat(value.isJsonArray()).isTrue();
assertThat(value.asJsonArray().size()).isEqualTo(1);
assertThat(value.asJsonArray().asList().get(0).toString()).contains(
"{\"id\":\"bike:13\",\"model\":\"Woody\",\"description\":\"The Woody is an environmentally-friendly wooden bike\"");
}
@Test
void jsonMsetCrossslot() {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(BIKES_INVENTORY);
JsonObject bikeRecord = parser.createJsonObject();
JsonObject bikeSpecs = parser.createJsonObject();
JsonArray bikeColors = parser.createJsonArray();
bikeSpecs.put("material", parser.createJsonValue("\"composite\""));
bikeSpecs.put("weight", parser.createJsonValue("11"));
bikeColors.add(parser.createJsonValue("\"yellow\""));
bikeColors.add(parser.createJsonValue("\"orange\""));
bikeRecord.put("id", parser.createJsonValue("\"bike:43\""));
bikeRecord.put("model", parser.createJsonValue("\"DesertFox\""));
bikeRecord.put("description", parser.createJsonValue("\"The DesertFox is a versatile bike for all terrains\""));
bikeRecord.put("price", parser.createJsonValue("\"1299\""));
bikeRecord.put("specs", bikeSpecs);
bikeRecord.put("colors", bikeColors);
JsonMsetArgs<String, String> args1 = new JsonMsetArgs<>(BIKES_INVENTORY, myPath, bikeRecord);
bikeRecord = parser.createJsonObject();
bikeSpecs = parser.createJsonObject();
bikeColors = parser.createJsonArray();
bikeSpecs.put("material", parser.createJsonValue("\"wood\""));
bikeSpecs.put("weight", parser.createJsonValue("19"));
bikeColors.add(parser.createJsonValue("\"walnut\""));
bikeColors.add(parser.createJsonValue("\"chestnut\""));
bikeRecord.put("id", parser.createJsonValue("\"bike:13\""));
bikeRecord.put("model", parser.createJsonValue("\"Woody\""));
bikeRecord.put("description", parser.createJsonValue("\"The Woody is an environmentally-friendly wooden bike\""));
bikeRecord.put("price", parser.createJsonValue("\"1112\""));
bikeRecord.put("specs", bikeSpecs);
bikeRecord.put("colors", bikeColors);
JsonMsetArgs<String, String> args2 = new JsonMsetArgs<>("bikes:service", JsonPath.ROOT_PATH, bikeRecord);
List<JsonMsetArgs<String, String>> args = Arrays.asList(args1, args2);
String result = redis.jsonMSet(args);
assertThat(result).isNotNull();
assertThat(result).isEqualTo("OK");
JsonValue value = redis.jsonGet("bikes:service", JsonPath.ROOT_PATH).get(0);
assertThat(value).isNotNull();
assertThat(value.isJsonArray()).isTrue();
assertThat(value.asJsonArray().size()).isEqualTo(1);
assertThat(value.asJsonArray().asList().get(0).toString()).contains(
"{\"id\":\"bike:13\",\"model\":\"Woody\",\"description\":\"The Woody is an environmentally-friendly wooden bike\"");
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { "$..mountain_bikes[0:1].price", "..mountain_bikes[0:1].price" })
void jsonNumincrby(String path) {
JsonPath myPath = JsonPath.of(path);
List<Number> value = redis.jsonNumincrby(BIKES_INVENTORY, myPath, 5L);
assertThat(value).hasSize(1);
assertThat(value.get(0).longValue()).isEqualTo(1933L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1 + "[1]", MOUNTAIN_BIKES_V2 + "[1]" })
void jsonObjkeys(String path) {
JsonPath myPath = JsonPath.of(path);
List<String> result = redis.jsonObjkeys(BIKES_INVENTORY, myPath);
assertThat(result).isNotNull();
assertThat(result).hasSize(6);
assertThat(result).contains("id", "model", "description", "price", "specs", "colors");
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1 + "[1]", MOUNTAIN_BIKES_V2 + "[1]" })
void jsonObjlen(String path) {
JsonPath myPath = JsonPath.of(path);
List<Long> result = redis.jsonObjlen(BIKES_INVENTORY, myPath);
assertThat(result).isNotNull();
assertThat(result).hasSize(1);
assertThat(result.get(0)).isEqualTo(6L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonSet(String path) {
JsonPath myPath = JsonPath.of(path);
JsonParser parser = redis.getJsonParser();
JsonObject bikeRecord = parser.createJsonObject();
JsonObject bikeSpecs = parser.createJsonObject();
JsonArray bikeColors = parser.createJsonArray();
bikeSpecs.put("material", parser.createJsonValue("null"));
bikeSpecs.put("weight", parser.createJsonValue("11"));
bikeColors.add(parser.createJsonValue("\"yellow\""));
bikeColors.add(parser.createJsonValue("\"orange\""));
bikeRecord.put("id", parser.createJsonValue("\"bike:43\""));
bikeRecord.put("model", parser.createJsonValue("\"DesertFox\""));
bikeRecord.put("description", parser.createJsonValue("\"The DesertFox is a versatile bike for all terrains\""));
bikeRecord.put("price", parser.createJsonValue("\"1299\""));
bikeRecord.put("specs", bikeSpecs);
bikeRecord.put("colors", bikeColors);
String result = redis.jsonSet(BIKES_INVENTORY, myPath, bikeRecord);
assertThat(result).isEqualTo("OK");
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonSetStringAndGet(String path) {
JsonPath myPath = JsonPath.of(path);
// Set using String overload, then get and verify
String payload = "{\"id\":\"bike:99\",\"model\":\"Stringy\"}";
String res = redis.jsonSet(BIKES_INVENTORY, myPath, payload);
assertThat(res).isEqualTo("OK");
List<JsonValue> got = redis.jsonGet(BIKES_INVENTORY, myPath);
assertThat(got).hasSize(1);
// For $-prefixed paths, RedisJSON returns arrays of matches
if (path.startsWith("$")) {
assertThat(got.get(0).isJsonArray()).isTrue();
assertThat(got.get(0).asJsonArray().size()).isEqualTo(1);
assertThat(got.get(0).asJsonArray().get(0).isJsonObject()).isTrue();
assertThat(got.get(0).asJsonArray().get(0).asJsonObject().get("id").asString()).isEqualTo("bike:99");
assertThat(got.get(0).asJsonArray().get(0).asJsonObject().get("model").asString()).isEqualTo("Stringy");
} else {
assertThat(got.get(0).isJsonObject()).isTrue();
assertThat(got.get(0).asJsonObject().get("id").asString()).isEqualTo("bike:99");
assertThat(got.get(0).asJsonObject().get("model").asString()).isEqualTo("Stringy");
}
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { "..mountain_bikes[1].colors[1]", "$..mountain_bikes[1].colors[1]" })
void jsonStrappend(String path) {
JsonParser parser = redis.getJsonParser();
JsonPath myPath = JsonPath.of(path);
JsonValue element = parser.createJsonValue("\"-light\"");
List<Long> result = redis.jsonStrappend(BIKES_INVENTORY, myPath, element);
assertThat(result).isNotNull();
assertThat(result).hasSize(1);
assertThat(result.get(0)).isEqualTo(11L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { BIKE_COLORS_V1 + "[1]", BIKE_COLORS_V2 + "[1]" })
void jsonStrlen(String path) {
JsonPath myPath = JsonPath.of(path);
List<Long> result = redis.jsonStrlen(BIKES_INVENTORY, myPath);
assertThat(result).isNotNull();
assertThat(result).hasSize(1);
assertThat(result.get(0)).isEqualTo(5L);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { "$..complete", "..complete" })
void jsonToggle(String path) {
JsonPath myPath = JsonPath.of(path);
List<Long> result = redis.jsonToggle(BIKES_INVENTORY, myPath);
assertThat(result).isNotNull();
assertThat(result).hasSize(1);
if (path.startsWith("$")) {
assertThat(result.get(0)).isEqualTo(1L);
} else {
// seems that for JSON.TOGGLE when we use a V1 path the resulting value is a list of string values and not a
// list of integer values as per the documentation
assertThat(result).isNotEmpty();
}
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1 + "[2:3]", MOUNTAIN_BIKES_V2 + "[2:3]" })
void jsonDel(String path) {
JsonPath myPath = JsonPath.of(path);
Long value = redis.jsonDel(BIKES_INVENTORY, myPath);
assertThat(value).isEqualTo(1);
}
@ParameterizedTest(name = "With {0} as path")
@ValueSource(strings = { MOUNTAIN_BIKES_V1, MOUNTAIN_BIKES_V2 })
void jsonType(String path) {
JsonPath myPath = JsonPath.of(path);
JsonType jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.ARRAY);
}
@Test
void jsonAllTypes() {
JsonPath myPath = JsonPath.of("$..mountain_bikes[1]");
JsonType jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.OBJECT);
myPath = JsonPath.of("$..mountain_bikes[0:1].price");
jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.INTEGER);
myPath = JsonPath.of("$..weight");
jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.NUMBER);
myPath = JsonPath.of("$..complete");
jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.BOOLEAN);
myPath = JsonPath.of("$..inventory.owner");
jsonType = redis.jsonType(BIKES_INVENTORY, myPath).get(0);
assertThat(jsonType).isEqualTo(JsonType.UNKNOWN);
}
@Test
void jsonGetToObject() {
JsonPath myPath = JsonPath.of("$..mountain_bikes[1]");
JsonValue value = redis.jsonGet(BIKES_INVENTORY, myPath).get(0);
assertThat(value).isNotNull();
assertThat(value.isNull()).isFalse();
assertThat(value.asJsonArray().get(0).isJsonObject()).isTrue();
MountainBike bike = value.asJsonArray().get(0).asJsonObject().toObject(MountainBike.class);
assertThat(bike).isNotNull();
assertThat(bike).isInstanceOf(MountainBike.class);
assertThat(bike.id).isEqualTo("bike:2");
assertThat(bike.model).isEqualTo("Quaoar");
assertThat(bike.description).contains("Redesigned for the 2020 model year, this bike impressed");
}
static | RedisJsonIntegrationTests |
java | google__dagger | javatests/artifacts/dagger-android/simple/app/src/main/java/dagger/android/simple/SimpleApplication.java | {
"start": 1419,
"end": 1807
} | interface ____ extends AndroidInjector.Factory<SimpleApplication> {}
}
@Inject @Model String model;
@Override
public void onCreate() {
super.onCreate();
Log.i(TAG, "Injected with model: " + model);
}
@Override
protected AndroidInjector<SimpleApplication> applicationInjector() {
return DaggerSimpleApplication_SimpleComponent.factory().create(this);
}
}
| Factory |
java | elastic__elasticsearch | modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ConditionalWritesUnsupportedRestIT.java | {
"start": 1983,
"end": 3137
} | class ____ extends AbstractRepositoryS3RestTestCase {
private static final String PREFIX = getIdentifierPrefix("RepositoryS3BasicCredentialsRestIT");
private static final String BUCKET = PREFIX + "bucket";
private static final String BASE_PATH = PREFIX + "base_path";
private static final String ACCESS_KEY = PREFIX + "access-key";
private static final String SECRET_KEY = PREFIX + "secret-key";
private static final String CLIENT = "no_conditional_writes_client";
private static final Supplier<String> regionSupplier = new DynamicRegionSupplier();
private static final S3HttpFixture s3Fixture = new S3HttpFixture(
true,
BUCKET,
BASE_PATH,
fixedAccessKey(ACCESS_KEY, regionSupplier, "s3")
) {
@Override
@SuppressForbidden("HttpExchange and Headers are ok here")
protected HttpHandler createHandler() {
return new AssertNoConditionalWritesHandler(asInstanceOf(S3HttpHandler.class, super.createHandler()));
}
};
@SuppressForbidden("HttpExchange and Headers are ok here")
private static | RepositoryS3ConditionalWritesUnsupportedRestIT |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryOptionalGetTest.java | {
"start": 7166,
"end": 7729
} | class ____ {
private void home() {
Optional<String> op1 = Optional.of("hello");
Optional<String> op2 = Optional.of("hello");
op1.ifPresent(x -> System.out.println(op2.get()));
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void genericOptionalVars_differentMethodGet_doesNothing() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import java.util.Optional;
public | Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java | {
"start": 2544,
"end": 2731
} | class ____ anonymous and thus cannot be static
Object bar() {
return new Object() {};
}
// enums are already static
| is |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsIoUtils.java | {
"start": 1316,
"end": 2592
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(AbfsIoUtils.class);
private AbfsIoUtils() {
}
/**
* Dump the headers of a request/response to the log at DEBUG level.
* @param origin header origin for log
* @param headers map of headers.
*/
public static void dumpHeadersToDebugLog(final String origin,
final Map<String, List<String>> headers) {
if (LOG.isDebugEnabled()) {
LOG.debug("{}", origin);
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
String key = entry.getKey();
if (key == null) {
key = "HTTP Response";
}
List<String> valuesList = entry.getValue();
if (valuesList == null) {
valuesList = Collections.emptyList();
} else {
valuesList = valuesList.stream()
.map(v -> v == null ? "" : v) // replace null with empty string
.collect(Collectors.toList());
}
String values = StringUtils.join(";", valuesList);
if (key.contains("Cookie")) {
values = "*cookie info*";
}
if (key.equals("sig")) {
values = "XXXX";
}
LOG.debug(" {}={}",
key,
values);
}
}
}
}
| AbfsIoUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java | {
"start": 8880,
"end": 9090
} | interface ____ {}
""")
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.ThreadSafe;
@ThreadSafe
| MyInterface |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/aggregate/correlate/AsyncCorrelateRunnerTest.java | {
"start": 6621,
"end": 7131
} | class ____ implements AsyncFunction<RowData, Object> {
@Override
public void asyncInvoke(RowData input, ResultFuture<Object> resultFuture) throws Exception {
int val = input.getInt(0);
if (val == 0) {
throw new RuntimeException("Error!!!!");
} else {
resultFuture.completeExceptionally(new RuntimeException("Other Error!"));
}
}
}
/** Test result future. */
public static final | ExceptionFunction |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/transaction/jdbc/JdbcTransactionFactoryUnitTest.java | {
"start": 1571,
"end": 4562
} | class ____ extends TransactionFactoryBase {
@Mock
private Properties properties;
@Mock
private Connection connection;
@Mock
private DataSource dataSource;
private TransactionFactory transactionFactory;
@BeforeEach
void setup() {
this.transactionFactory = new JdbcTransactionFactory();
}
@Test
@Override
public void shouldSetProperties() throws Exception {
when(properties.getProperty("skipSetAutoCommitOnClose")).thenReturn("true");
transactionFactory.setProperties(properties);
assertTrue((Boolean) getValue(transactionFactory.getClass().getDeclaredField("skipSetAutoCommitOnClose"),
transactionFactory));
}
@Test
@Override
public void shouldNewTransactionWithConnection() throws SQLException {
Transaction result = transactionFactory.newTransaction(connection);
assertNotNull(result);
assertInstanceOf(JdbcTransaction.class, result);
assertEquals(connection, result.getConnection());
}
@Test
@Override
public void shouldNewTransactionWithDataSource() throws Exception {
when(dataSource.getConnection()).thenReturn(connection);
Transaction result = transactionFactory.newTransaction(dataSource, TransactionIsolationLevel.READ_COMMITTED, false);
assertNotNull(result);
assertInstanceOf(JdbcTransaction.class, result);
assertEquals(connection, result.getConnection());
verify(connection).setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
assertEquals(dataSource, getValue(result.getClass().getDeclaredField("dataSource"), result));
assertEquals(TransactionIsolationLevel.READ_COMMITTED,
getValue(result.getClass().getDeclaredField("level"), result));
assertEquals(false, getValue(result.getClass().getDeclaredField("autoCommit"), result));
assertEquals(false, getValue(result.getClass().getDeclaredField("skipSetAutoCommitOnClose"), result));
}
@Test
void shouldNewTransactionWithDataSourceAndCustomProperties() throws Exception {
when(dataSource.getConnection()).thenReturn(connection);
when(properties.getProperty("skipSetAutoCommitOnClose")).thenReturn("true");
transactionFactory.setProperties(properties);
Transaction result = transactionFactory.newTransaction(dataSource, TransactionIsolationLevel.READ_COMMITTED, true);
assertNotNull(result);
assertInstanceOf(JdbcTransaction.class, result);
assertEquals(connection, result.getConnection());
verify(connection).setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
assertEquals(dataSource, getValue(result.getClass().getDeclaredField("dataSource"), result));
assertEquals(TransactionIsolationLevel.READ_COMMITTED,
getValue(result.getClass().getDeclaredField("level"), result));
assertEquals(true, getValue(result.getClass().getDeclaredField("autoCommit"), result));
assertEquals(true, getValue(result.getClass().getDeclaredField("skipSetAutoCommitOnClose"), result));
}
}
| JdbcTransactionFactoryUnitTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java | {
"start": 955,
"end": 8081
} | class ____ extends AbstractChunkedSerializingTestCase<FieldCapabilitiesResponse> {
@Override
protected FieldCapabilitiesResponse doParseInstance(XContentParser parser) throws IOException {
return FieldCapsUtils.parseFieldCapsResponse(parser);
}
@Override
protected FieldCapabilitiesResponse createTestInstance() {
// merged responses
Map<String, Map<String, FieldCapabilities>> responses = new HashMap<>();
String[] fields = generateRandomStringArray(5, 10, false, true);
assertNotNull(fields);
for (String field : fields) {
Map<String, FieldCapabilities> typesToCapabilities = new HashMap<>();
String[] types = generateRandomStringArray(5, 10, false, false);
assertNotNull(types);
for (String type : types) {
typesToCapabilities.put(type, FieldCapabilitiesTests.randomFieldCaps(field));
}
responses.put(field, typesToCapabilities);
}
int numIndices = randomIntBetween(1, 10);
String[] indices = new String[numIndices];
for (int i = 0; i < numIndices; i++) {
indices[i] = randomAlphaOfLengthBetween(5, 10);
}
return FieldCapabilitiesResponse.builder().withIndices(indices).withFields(responses).build();
}
@Override
protected Writeable.Reader<FieldCapabilitiesResponse> instanceReader() {
return FieldCapabilitiesResponse::new;
}
@Override
protected FieldCapabilitiesResponse mutateInstance(FieldCapabilitiesResponse response) {
Map<String, Map<String, FieldCapabilities>> mutatedResponses = new HashMap<>(response.get());
int mutation = response.get().isEmpty() ? 0 : randomIntBetween(0, 2);
switch (mutation) {
case 0 -> {
String toAdd = randomAlphaOfLength(10);
mutatedResponses.put(
toAdd,
Collections.singletonMap(randomAlphaOfLength(10), FieldCapabilitiesTests.randomFieldCaps(toAdd))
);
}
case 1 -> {
String toRemove = randomFrom(mutatedResponses.keySet());
mutatedResponses.remove(toRemove);
}
case 2 -> {
String toReplace = randomFrom(mutatedResponses.keySet());
mutatedResponses.put(
toReplace,
Collections.singletonMap(randomAlphaOfLength(10), FieldCapabilitiesTests.randomFieldCaps(toReplace))
);
}
}
// TODO pass real list
return FieldCapabilitiesResponse.builder().withFields(mutatedResponses).build();
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Disallow random fields from being inserted under the 'fields' key, as this
// map only contains field names, and also under 'fields.FIELD_NAME', as these
// maps only contain type names.
return field -> field.matches("fields(\\.\\w+)?");
}
public void testToXContent() throws IOException {
FieldCapabilitiesResponse response = createSimpleResponse();
assertEquals("""
{
"indices": [ "index1", "index2", "index3", "index4" ],
"fields": {
"rating": {
"keyword": {
"type": "keyword",
"metadata_field": false,
"searchable": false,
"aggregatable": true,
"time_series_dimension": true,
"indices": [ "index3", "index4" ],
"non_searchable_indices": [ "index4" ]
},
"long": {
"type": "long",
"metadata_field": false,
"searchable": true,
"aggregatable": false,
"time_series_metric": "counter",
"indices": [ "index1", "index2" ],
"non_aggregatable_indices": [ "index1" ],
"non_dimension_indices": [ "index4" ]
}
},
"title": {
"text": {
"type": "text",
"metadata_field": false,
"searchable": true,
"aggregatable": false
}
}
},
"failed_indices": 2,
"failures": [
{
"indices": [ "errorindex", "errorindex2" ],
"failure": {
"error": {
"root_cause": [ { "type": "illegal_argument_exception", "reason": "test" } ],
"type": "illegal_argument_exception",
"reason": "test"
}
}
}
]
}""".replaceAll("\\s+", ""), Strings.toString(response));
}
private static FieldCapabilitiesResponse createSimpleResponse() {
Map<String, FieldCapabilities> titleCapabilities = new HashMap<>();
titleCapabilities.put("text", new FieldCapabilitiesBuilder("title", "text").isAggregatable(false).build());
Map<String, FieldCapabilities> ratingCapabilities = new HashMap<>();
ratingCapabilities.put(
"long",
new FieldCapabilitiesBuilder("rating", "long").isAggregatable(false)
.metricType(TimeSeriesParams.MetricType.COUNTER)
.indices("index1", "index2")
.nonAggregatableIndices("index1")
.nonDimensionIndices("index4")
.build()
);
ratingCapabilities.put(
"keyword",
new FieldCapabilitiesBuilder("rating", "keyword").isSearchable(false)
.isDimension(true)
.indices("index3", "index4")
.nonSearchableIndices("index4")
.build()
);
Map<String, Map<String, FieldCapabilities>> responses = new HashMap<>();
responses.put("title", titleCapabilities);
responses.put("rating", ratingCapabilities);
List<FieldCapabilitiesFailure> failureMap = List.of(
new FieldCapabilitiesFailure(new String[] { "errorindex", "errorindex2" }, new IllegalArgumentException("test"))
);
return FieldCapabilitiesResponse.builder()
.withIndices(new String[] { "index1", "index2", "index3", "index4" })
.withFields(responses)
.withFailures(failureMap)
.build();
}
public void testChunking() {
AbstractChunkedSerializingTestCase.assertChunkCount(
FieldCapabilitiesResponseTests.createResponseWithFailures(),
instance -> instance.getFailures().isEmpty() ? 2 : (3 + instance.get().size() + instance.getFailures().size())
);
AbstractChunkedSerializingTestCase.assertChunkCount(createTestInstance(), instance -> 2 + instance.get().size());
}
}
| MergedFieldCapabilitiesResponseTests |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging-kafka/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/kafka/CheckpointEntityId.java | {
"start": 252,
"end": 1930
} | class ____ implements Serializable {
private static final long serialVersionUID = -259817999246156947L;
@Column(name = "consumer_group_id", insertable = false)
private String consumerGroupId;
private String topic;
private int partition;
public CheckpointEntityId() {
}
public CheckpointEntityId(String consumerGroupId, TopicPartition topicPartition) {
this.consumerGroupId = consumerGroupId;
this.topic = topicPartition.topic();
this.partition = topicPartition.partition();
}
public String getConsumerGroupId() {
return consumerGroupId;
}
public void setConsumerGroupId(String consumerGroupId) {
this.consumerGroupId = consumerGroupId;
}
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public int getPartition() {
return partition;
}
public void setPartition(int partition) {
this.partition = partition;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (!(o instanceof CheckpointEntityId))
return false;
CheckpointEntityId that = (CheckpointEntityId) o;
return partition == that.getPartition() && Objects.equals(consumerGroupId, that.getConsumerGroupId())
&& Objects.equals(topic, that.getTopic());
}
@Override
public int hashCode() {
return Objects.hash(consumerGroupId, topic, partition);
}
@Override
public String toString() {
return consumerGroupId + ':' + topic + ':' + partition;
}
}
| CheckpointEntityId |
java | google__guice | core/test/com/google/inject/ImplicitBindingTest.java | {
"start": 1071,
"end": 1326
} | class ____ extends TestCase {
public void testCircularDependency() throws CreationException {
Injector injector = Guice.createInjector();
Foo foo = injector.getInstance(Foo.class);
assertSame(foo, foo.bar.foo);
}
static | ImplicitBindingTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java | {
"start": 26538,
"end": 29246
} | class ____ {
final CryptoProtocolVersion protocolVersion;
final CipherSuite suite;
final String ezKeyName;
final KeyProviderCryptoExtension.EncryptedKeyVersion edek;
EncryptionKeyInfo(
CryptoProtocolVersion protocolVersion, CipherSuite suite,
String ezKeyName, KeyProviderCryptoExtension.EncryptedKeyVersion edek) {
this.protocolVersion = protocolVersion;
this.suite = suite;
this.ezKeyName = ezKeyName;
this.edek = edek;
}
}
/**
* Get the current key version name for the given EZ. This will first drain
* the provider's local cache, then generate a new edek.
* <p>
* The encryption key version of the newly generated edek will be used as
* the target key version of this re-encryption - meaning all edeks'
* keyVersion are compared with it, and only sent to the KMS for re-encryption
* when the version is different.
* <p>
* Note: KeyProvider has a getCurrentKey interface, but that is under
* a different ACL. HDFS should not try to operate on additional ACLs, but
* rather use the generate ACL it already has.
*/
static String getCurrentKeyVersion(final FSDirectory dir,
final FSPermissionChecker pc, final String zone) throws IOException {
assert dir.getProvider() != null;
assert !dir.hasReadLock();
final String keyName = FSDirEncryptionZoneOp.getKeyNameForZone(dir,
pc, zone);
if (keyName == null) {
throw new IOException(zone + " is not an encryption zone.");
}
// drain the local cache of the key provider.
// Do not invalidateCache on the server, since that's the responsibility
// when rolling the key version.
dir.getProvider().drain(keyName);
final EncryptedKeyVersion edek;
try {
edek = dir.getProvider().generateEncryptedKey(keyName);
} catch (GeneralSecurityException gse) {
throw new IOException(gse);
}
Preconditions.checkNotNull(edek);
return edek.getEncryptionKeyVersionName();
}
/**
* Resolve the zone to an inode, find the encryption zone info associated with
* that inode, and return the key name. Does not contact the KMS.
*/
static String getKeyNameForZone(final FSDirectory dir,
final FSPermissionChecker pc, final String zone) throws IOException {
assert dir.getProvider() != null;
final INodesInPath iip;
dir.getFSNamesystem().readLock(RwLockMode.FS);
try {
iip = dir.resolvePath(pc, zone, DirOp.READ);
dir.ezManager.checkEncryptionZoneRoot(iip.getLastINode(), zone);
return dir.ezManager.getKeyName(iip);
} finally {
dir.getFSNamesystem().readUnlock(RwLockMode.FS, "getKeyNameForZone");
}
}
}
| EncryptionKeyInfo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cache/QueryCacheIncompleteTest.java | {
"start": 1332,
"end": 2529
} | class ____ {
private Long adminId;
@BeforeAll
public void setUp(SessionFactoryScope scope) {
adminId = scope.fromTransaction(
session -> {
Admin admin = new Admin();
admin.setAge( 42 );
session.persist( admin );
return admin.getId();
}
);
}
@Test
void testQueryWithEmbeddableParameter(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
// load uninitialized proxy
session.getReference( Admin.class, adminId );
// load entity
var multiLoader = session.byMultipleIds( Admin.class );
multiLoader.with( CacheMode.NORMAL );
multiLoader.multiLoad( adminId );
// store in query cache
Admin admin = queryAdmin( session );
assertThat( admin.getAge() ).isEqualTo( 42 );
}
);
scope.inTransaction(
session -> {
// use query cache
Admin admin = queryAdmin( session );
assertThat( admin.getAge() ).isEqualTo( 42 );
}
);
}
private Admin queryAdmin(Session s) {
return s.createQuery( "from Admin", Admin.class ).setCacheable( true ).getSingleResult();
}
@Entity(name = "Admin")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public static | QueryCacheIncompleteTest |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 17699,
"end": 18041
} | class ____<T, S> implements Dao<T, S> {
protected T object;
protected S otherObject;
protected AbstractDaoImpl(T object, S otherObject) {
this.object = object;
this.otherObject = otherObject;
}
// @Transactional(readOnly = true)
@Override
public S loadFromParent() {
return otherObject;
}
}
static | AbstractDaoImpl |
java | netty__netty | transport/src/main/java/io/netty/channel/DefaultSelectStrategy.java | {
"start": 740,
"end": 1102
} | class ____ implements SelectStrategy {
static final SelectStrategy INSTANCE = new DefaultSelectStrategy();
private DefaultSelectStrategy() { }
@Override
public int calculateStrategy(IntSupplier selectSupplier, boolean hasTasks) throws Exception {
return hasTasks ? selectSupplier.get() : SelectStrategy.SELECT;
}
}
| DefaultSelectStrategy |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/SimpleBean.java | {
"start": 189,
"end": 706
} | class ____ {
private String val;
private Counter counter;
@Inject
SimpleBean(Counter counter) {
this.counter = counter;
}
@PostConstruct
void superCoolInit() {
val = "foo";
}
@Logging
@Simple
String foo(String anotherVal) {
return val;
}
String bar() {
return new StringBuilder(val).reverse().toString();
}
@Simple
void baz(Integer dummy) {
}
Counter getCounter() {
return counter;
}
}
| SimpleBean |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/DynamicGraphQLClientInjectionWithQuarkusConfigTest.java | {
"start": 1106,
"end": 2793
} | class ____ {
static String url = "http://" + System.getProperty("quarkus.http.host", "localhost") + ":" +
System.getProperty("quarkus.http.test-port", "8081") + "/graphql";
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestingGraphQLApi.class, Person.class, PersonDto.class)
.addAsResource(new StringAsset("quarkus.smallrye-graphql-client.people.url=" + url + "\n" +
"quarkus.smallrye-graphql-client.people.header.My-Header=My-Value"),
"application.properties")
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
@Inject
@GraphQLClient("people")
DynamicGraphQLClient client;
@Test
public void checkInjectedClient() {
Document query = document(
Operation.operation("PeopleQuery", field("people", field("firstName"), field("lastName"))));
List<Person> people = client.executeAsync(query)
.await().atMost(Duration.ofSeconds(30)).getList(Person.class, "people");
assertEquals("John", people.get(0).getFirstName());
assertEquals("Arthur", people.get(1).getFirstName());
}
@Test
public void checkHeaders() throws ExecutionException, InterruptedException {
Document query = document(
Operation.operation(field("returnHeader", arg("key", "My-Header"))));
String header = client.executeSync(query).getData().getString("returnHeader");
assertEquals("My-Value", header);
}
}
| DynamicGraphQLClientInjectionWithQuarkusConfigTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/configuration/CheckpointingOptions.java | {
"start": 4076,
"end": 47187
} | class ____ of a %s. "
+ "If a factory is specified it is instantiated via its "
+ "zero argument constructor and its %s "
+ " method is called.",
TextElement.code("CheckpointStorageFactory"),
TextElement.code(
"CheckpointStorageFactory#createFromConfig(ReadableConfig, ClassLoader)"))
.linebreak()
.text(
"Recognized shortcut names are 'jobmanager' and 'filesystem'.")
.linebreak()
.text(
"'execution.checkpointing.storage' and 'execution.checkpointing.dir' are usually combined to configure the checkpoint location."
+ " By default, the checkpoint meta data and actual program state will be stored in the JobManager's memory directly."
+ " When 'execution.checkpointing.storage' is set to 'jobmanager', if 'execution.checkpointing.dir' is configured,"
+ " the meta data of checkpoints will be persisted to the path specified by 'execution.checkpointing.dir'."
+ " Otherwise, the meta data will be stored in the JobManager's memory."
+ " When 'execution.checkpointing.storage' is set to 'filesystem', a valid path must be configured to 'execution.checkpointing.dir',"
+ " and the checkpoint meta data and actual program state will both be persisted to the path.")
.build());
/** The maximum number of completed checkpoints to retain. */
@Documentation.Section(Documentation.Sections.COMMON_CHECKPOINTING)
public static final ConfigOption<Integer> MAX_RETAINED_CHECKPOINTS =
ConfigOptions.key("execution.checkpointing.num-retained")
.intType()
.defaultValue(1)
.withDeprecatedKeys("state.checkpoints.num-retained")
.withDescription("The maximum number of completed checkpoints to retain.");
/* Option whether to clean individual checkpoint's operatorstates in parallel. If enabled,
* operator states are discarded in parallel using the ExecutorService passed to the cleaner.
* This speeds up checkpoints cleaning, but adds load to the IO.
*/
@Documentation.Section(Documentation.Sections.COMMON_CHECKPOINTING)
public static final ConfigOption<Boolean> CLEANER_PARALLEL_MODE =
ConfigOptions.key("execution.checkpointing.cleaner.parallel-mode")
.booleanType()
.defaultValue(true)
.withDeprecatedKeys("state.checkpoint.cleaner.parallel-mode")
.withDescription(
"Option whether to discard a checkpoint's states in parallel using"
+ " the ExecutorService passed into the cleaner");
/**
* Option whether to create incremental checkpoints, if possible. For an incremental checkpoint,
* only a diff from the previous checkpoint is stored, rather than the complete checkpoint
* state.
*
* <p>Once enabled, the state size shown in web UI or fetched from rest API only represents the
* delta checkpoint size instead of full checkpoint size.
*
* <p>Some state backends may not support incremental checkpoints and ignore this option.
*/
@Documentation.Section(Documentation.Sections.COMMON_CHECKPOINTING)
public static final ConfigOption<Boolean> INCREMENTAL_CHECKPOINTS =
ConfigOptions.key("execution.checkpointing.incremental")
.booleanType()
.defaultValue(false)
.withDeprecatedKeys("state.backend.incremental")
.withDescription(
"Option whether to create incremental checkpoints, if possible. For"
+ " an incremental checkpoint, only a diff from the previous checkpoint is stored, rather than the"
+ " complete checkpoint state. Once enabled, the state size shown in web UI or fetched from rest API"
+ " only represents the delta checkpoint size instead of full checkpoint size."
+ " Some state backends may not support incremental checkpoints and ignore this option.");
/**
* The config parameter defining the root directories for storing file-based state for local
* recovery.
*
* <p>Local recovery currently only covers keyed state backends. Currently, MemoryStateBackend
* does not support local recovery and ignore this option.
*/
@Documentation.Section(Documentation.Sections.COMMON_CHECKPOINTING)
public static final ConfigOption<String> LOCAL_RECOVERY_TASK_MANAGER_STATE_ROOT_DIRS =
ConfigOptions.key("execution.checkpointing.local-backup.dirs")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("taskmanager.state.local.root-dirs")
.withDescription(
Description.builder()
.text(
"The config parameter defining the root directories for storing file-based "
+ "state for local recovery. Local recovery currently only covers keyed "
+ "state backends. If not configured it will default to <WORKING_DIR>/localState. "
+ "The <WORKING_DIR> can be configured via %s",
TextElement.code(
ClusterOptions
.TASK_MANAGER_PROCESS_WORKING_DIR_BASE
.key()))
.build());
// ------------------------------------------------------------------------
// Options specific to the file-system-based state backends
// ------------------------------------------------------------------------
/**
* The default directory for savepoints. Used by the state backends that write savepoints to
* file systems (HashMapStateBackend, EmbeddedRocksDBStateBackend).
*/
@Documentation.Section(value = Documentation.Sections.COMMON_CHECKPOINTING, position = 4)
public static final ConfigOption<String> SAVEPOINT_DIRECTORY =
ConfigOptions.key("execution.checkpointing.savepoint-dir")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("state.savepoints.dir", "savepoints.state.backend.fs.dir")
.withDescription(
"The default directory for savepoints. Used by the state backends that write savepoints to"
+ " file systems (HashMapStateBackend, EmbeddedRocksDBStateBackend).");
/**
* The default directory used for storing the data files and meta data of checkpoints in a Flink
* supported filesystem. The storage path must be accessible from all participating
* processes/nodes(i.e. all TaskManagers and JobManagers). If {@link #CHECKPOINT_STORAGE} is set
* to 'jobmanager', only the meta data of checkpoints will be stored in this directory.
*/
@Documentation.Section(value = Documentation.Sections.COMMON_CHECKPOINTING, position = 3)
public static final ConfigOption<String> CHECKPOINTS_DIRECTORY =
ConfigOptions.key("execution.checkpointing.dir")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("state.checkpoints.dir", "state.backend.fs.checkpointdir")
.withDescription(
"The default directory used for storing the data files and meta data of checkpoints "
+ "in a Flink supported filesystem. The storage path must be accessible from all participating processes/nodes"
+ "(i.e. all TaskManagers and JobManagers). If the '"
+ CHECKPOINT_STORAGE.key()
+ "' is set to 'jobmanager', only the meta data of checkpoints will be stored in this directory.");
/**
* Whether to create sub-directories named by job id to store the data files and meta data of
* checkpoints. The default value is true to enable user could run several jobs with the same
* checkpoint directory at the same time. If this value is set to false, pay attention not to
* run several jobs with the same directory simultaneously.
*/
@Documentation.Section(Documentation.Sections.EXPERT_CHECKPOINTING)
public static final ConfigOption<Boolean> CREATE_CHECKPOINT_SUB_DIR =
ConfigOptions.key("execution.checkpointing.create-subdir")
.booleanType()
.defaultValue(true)
.withDeprecatedKeys("state.checkpoints.create-subdir")
.withDescription(
Description.builder()
.text(
"Whether to create sub-directories named by job id under the '%s' to store the data files and meta data "
+ "of checkpoints. The default value is true to enable user could run several jobs with the same "
+ "checkpoint directory at the same time. If this value is set to false, pay attention not to "
+ "run several jobs with the same directory simultaneously. ",
TextElement.code(CHECKPOINTS_DIRECTORY.key()))
.linebreak()
.text(
"WARNING: This is an advanced configuration. If set to false, users must ensure that no multiple jobs are run "
+ "with the same checkpoint directory, and that no files exist other than those necessary for the "
+ "restoration of the current job when starting a new job.")
.build());
/**
* The minimum size of state data files. All state chunks smaller than that are stored inline in
* the root checkpoint metadata file.
*/
@Documentation.Section(Documentation.Sections.EXPERT_CHECKPOINTING)
public static final ConfigOption<MemorySize> FS_SMALL_FILE_THRESHOLD =
ConfigOptions.key("execution.checkpointing.data-inline-threshold")
.memoryType()
.defaultValue(MemorySize.parse("20kb"))
.withDescription(
"The minimum size of state data files. All state chunks smaller than that are stored"
+ " inline in the root checkpoint metadata file. The max memory threshold for this configuration is 1MB.")
.withDeprecatedKeys(
"state.storage.fs.memory-threshold",
"state.backend.fs.memory-threshold");
/**
* The default size of the write buffer for the checkpoint streams that write to file systems.
*/
@Documentation.Section(Documentation.Sections.EXPERT_CHECKPOINTING)
public static final ConfigOption<Integer> FS_WRITE_BUFFER_SIZE =
ConfigOptions.key("execution.checkpointing.write-buffer-size")
.intType()
.defaultValue(4 * 1024)
.withDescription(
String.format(
"The default size of the write buffer for the checkpoint streams that write to file systems. "
+ "The actual write buffer size is determined to be the maximum of the value of this option and option '%s'.",
FS_SMALL_FILE_THRESHOLD.key()))
.withDeprecatedKeys(
"state.storage.fs.write-buffer-size",
"state.backend.fs.write-buffer-size");
/**
* This option configures local backup for the state backend, which indicates whether to make
* backup checkpoint on local disk. If not configured, fallback to {@link
* StateRecoveryOptions#LOCAL_RECOVERY}. By default, local backup is deactivated. Local backup
* currently only covers keyed state backends (including both the EmbeddedRocksDBStateBackend
* and the HashMapStateBackend).
*/
@Documentation.Section(value = Documentation.Sections.COMMON_CHECKPOINTING)
public static final ConfigOption<Boolean> LOCAL_BACKUP_ENABLED =
ConfigOptions.key("execution.checkpointing.local-backup.enabled")
.booleanType()
.defaultValue(StateRecoveryOptions.LOCAL_RECOVERY.defaultValue())
.withFallbackKeys(StateRecoveryOptions.LOCAL_RECOVERY.key())
.withDeprecatedKeys("state.backend.local-recovery")
.withDescription(
"This option configures local backup for the state backend, "
+ "which indicates whether to make backup checkpoint on local disk. "
+ "If not configured, fallback to "
+ StateRecoveryOptions.LOCAL_RECOVERY.key()
+ ". By default, local backup is deactivated. Local backup currently only "
+ "covers keyed state backends (including both the EmbeddedRocksDBStateBackend and the HashMapStateBackend).");
// ------------------------------------------------------------------------
// Options related to file merging
// ------------------------------------------------------------------------
/**
* Whether to enable merging multiple checkpoint files into one, which will greatly reduce the
* number of small checkpoint files. See FLIP-306 for details.
*
* <p>Note: This is an experimental feature under evaluation, make sure you're aware of the
* possible effects of enabling it.
*/
@Experimental
@Documentation.Section(value = Documentation.Sections.CHECKPOINT_FILE_MERGING, position = 1)
public static final ConfigOption<Boolean> FILE_MERGING_ENABLED =
ConfigOptions.key("execution.checkpointing.file-merging.enabled")
.booleanType()
.defaultValue(false)
.withDescription(
"Whether to enable merging multiple checkpoint files into one, which will greatly reduce"
+ " the number of small checkpoint files. This is an experimental feature under evaluation, "
+ "make sure you're aware of the possible effects of enabling it.");
/**
* Whether to allow merging data of multiple checkpoints into one physical file. If this option
* is set to false, only merge files within checkpoint boundaries. Otherwise, it is possible for
* the logical files of different checkpoints to share the same physical file.
*/
@Experimental
@Documentation.Section(value = Documentation.Sections.CHECKPOINT_FILE_MERGING, position = 2)
public static final ConfigOption<Boolean> FILE_MERGING_ACROSS_BOUNDARY =
ConfigOptions.key("execution.checkpointing.file-merging.across-checkpoint-boundary")
.booleanType()
.defaultValue(false)
.withDescription(
Description.builder()
.text(
"Only relevant if %s is enabled.",
TextElement.code(FILE_MERGING_ENABLED.key()))
.linebreak()
.text(
"Whether to allow merging data of multiple checkpoints into one physical file. "
+ "If this option is set to false, "
+ "only merge files within checkpoint boundaries. "
+ "Otherwise, it is possible for the logical files of different "
+ "checkpoints to share the same physical file.")
.build());
/** The max size of a physical file for merged checkpoints. */
@Experimental
@Documentation.Section(value = Documentation.Sections.CHECKPOINT_FILE_MERGING, position = 3)
public static final ConfigOption<MemorySize> FILE_MERGING_MAX_FILE_SIZE =
ConfigOptions.key("execution.checkpointing.file-merging.max-file-size")
.memoryType()
.defaultValue(MemorySize.parse("32MB"))
.withDescription("Max size of a physical file for merged checkpoints.");
/**
* Whether to use Blocking or Non-Blocking pool for merging physical files. A Non-Blocking pool
* will always provide usable physical file without blocking. It may create many physical files
* if poll file frequently. When poll a small file from a Blocking pool, it may be blocked until
* the file is returned.
*/
@Experimental
@Documentation.Section(value = Documentation.Sections.CHECKPOINT_FILE_MERGING, position = 4)
public static final ConfigOption<Boolean> FILE_MERGING_POOL_BLOCKING =
ConfigOptions.key("execution.checkpointing.file-merging.pool-blocking")
.booleanType()
.defaultValue(false)
.withDescription(
"Whether to use Blocking or Non-Blocking pool for merging physical files. "
+ "A Non-Blocking pool will always provide usable physical file without blocking. It may create many physical files if poll file frequently. "
+ "When poll a small file from a Blocking pool, it may be blocked until the file is returned.");
/**
* The upper limit of the file pool size based on the number of subtasks within each TM (only
* for merging private state at Task Manager level).
*
* <p>TODO: remove '@Documentation.ExcludeFromDocumentation' after the feature is implemented.
*/
@Experimental @Documentation.ExcludeFromDocumentation
public static final ConfigOption<Integer> FILE_MERGING_MAX_SUBTASKS_PER_FILE =
ConfigOptions.key("execution.checkpointing.file-merging.max-subtasks-per-file")
.intType()
.defaultValue(4)
.withDescription(
"The upper limit of the file pool size based on the number of subtasks within each TM"
+ "(only for merging private state at Task Manager level).");
/**
* Space amplification stands for the magnification of the occupied space compared to the amount
* of valid data. The more space amplification is, the more waste of space will be. This configs
* a space amplification above which a re-uploading for physical files will be triggered to
* reclaim space.
*/
@Experimental
@Documentation.Section(value = Documentation.Sections.CHECKPOINT_FILE_MERGING, position = 6)
public static final ConfigOption<Float> FILE_MERGING_MAX_SPACE_AMPLIFICATION =
ConfigOptions.key("execution.checkpointing.file-merging.max-space-amplification")
.floatType()
.defaultValue(2f)
.withDescription(
"Space amplification stands for the magnification of the occupied space compared to the amount of valid data. "
+ "The more space amplification is, the more waste of space will be. This configs a space amplification "
+ "above which a re-uploading for physical files will be triggered to reclaim space. Any value below 1f "
+ "means disabling the space control.");
/**
* The checkpointing mode (exactly-once vs. at-least-once).
*
* <p><strong>Note:</strong> Instead of accessing this configuration option directly with {@code
* config.get(CHECKPOINTING_CONSISTENCY_MODE)}, use {@link #getCheckpointingMode(Configuration)}
* which handles the case when checkpointing is disabled and provides the appropriate default
* behavior.
*/
public static final ConfigOption<CheckpointingMode> CHECKPOINTING_CONSISTENCY_MODE =
ConfigOptions.key("execution.checkpointing.mode")
.enumType(CheckpointingMode.class)
.defaultValue(CheckpointingMode.EXACTLY_ONCE)
.withDescription("The checkpointing mode (exactly-once vs. at-least-once).");
public static final ConfigOption<Duration> CHECKPOINTING_TIMEOUT =
ConfigOptions.key("execution.checkpointing.timeout")
.durationType()
.defaultValue(Duration.ofMinutes(10))
.withDescription(
"The maximum time that a checkpoint may take before being discarded.");
public static final ConfigOption<Integer> MAX_CONCURRENT_CHECKPOINTS =
ConfigOptions.key("execution.checkpointing.max-concurrent-checkpoints")
.intType()
.defaultValue(1)
.withDescription(
"The maximum number of checkpoint attempts that may be in progress at the same time. If "
+ "this value is n, then no checkpoints will be triggered while n checkpoint attempts are currently in "
+ "flight. For the next checkpoint to be triggered, one checkpoint attempt would need to finish or "
+ "expire.");
public static final ConfigOption<Duration> MIN_PAUSE_BETWEEN_CHECKPOINTS =
ConfigOptions.key("execution.checkpointing.min-pause")
.durationType()
.defaultValue(Duration.ZERO)
.withDescription(
Description.builder()
.text(
"The minimal pause between checkpointing attempts. This setting defines how soon the"
+ "checkpoint coordinator may trigger another checkpoint after it becomes possible to trigger"
+ "another checkpoint with respect to the maximum number of concurrent checkpoints"
+ "(see %s).",
TextElement.code(MAX_CONCURRENT_CHECKPOINTS.key()))
.linebreak()
.linebreak()
.text(
"If the maximum number of concurrent checkpoints is set to one, this setting makes effectively "
+ "sure that a minimum amount of time passes where no checkpoint is in progress at all.")
.build());
public static final ConfigOption<Integer> TOLERABLE_FAILURE_NUMBER =
ConfigOptions.key("execution.checkpointing.tolerable-failed-checkpoints")
.intType()
.defaultValue(0)
.withDescription(
"The tolerable checkpoint consecutive failure number. If set to 0, that means "
+ "we do not tolerance any checkpoint failure. This only applies to the following failure reasons: IOException on the "
+ "Job Manager, failures in the async phase on the Task Managers and checkpoint expiration due to a timeout. Failures "
+ "originating from the sync phase on the Task Managers are always forcing failover of an affected task. Other types of "
+ "checkpoint failures (such as checkpoint being subsumed) are being ignored.");
public static final ConfigOption<ExternalizedCheckpointRetention>
EXTERNALIZED_CHECKPOINT_RETENTION =
ConfigOptions.key("execution.checkpointing.externalized-checkpoint-retention")
.enumType(ExternalizedCheckpointRetention.class)
.defaultValue(
ExternalizedCheckpointRetention.NO_EXTERNALIZED_CHECKPOINTS)
.withDescription(
Description.builder()
.text(
"Externalized checkpoints write their meta data out to persistent storage and are not "
+ "automatically cleaned up when the owning job fails or is suspended (terminating with job "
+ "status %s or %s). In this case, you have to manually clean up the checkpoint state, both the "
+ "meta data and actual program state.",
TextElement.code("JobStatus#FAILED"),
TextElement.code("JobStatus#SUSPENDED"))
.linebreak()
.linebreak()
.text(
"The mode defines how an externalized checkpoint should be cleaned up on job cancellation. If "
+ "you choose to retain externalized checkpoints on cancellation you have to handle checkpoint "
+ "clean up manually when you cancel the job as well (terminating with job status %s).",
TextElement.code("JobStatus#CANCELED"))
.linebreak()
.linebreak()
.text(
"The target directory for externalized checkpoints is configured via %s.",
TextElement.code(
CheckpointingOptions
.CHECKPOINTS_DIRECTORY
.key()))
.build());
public static final ConfigOption<Duration> CHECKPOINTING_INTERVAL_DURING_BACKLOG =
ConfigOptions.key("execution.checkpointing.interval-during-backlog")
.durationType()
.noDefaultValue()
.withDescription(
Description.builder()
.text(
"If it is not null and any source reports isProcessingBacklog=true, "
+ "it is the interval in which checkpoints are periodically scheduled.")
.linebreak()
.linebreak()
.text(
"Checkpoint triggering may be delayed by the settings %s and %s.",
TextElement.code(MAX_CONCURRENT_CHECKPOINTS.key()),
TextElement.code(MIN_PAUSE_BETWEEN_CHECKPOINTS.key()))
.linebreak()
.linebreak()
.text(
"Note: if it is not null, the value must either be 0, "
+ "which means the checkpoint is disabled during backlog, "
+ "or be larger than or equal to execution.checkpointing.interval.")
.build());
public static final ConfigOption<Duration> CHECKPOINTING_INTERVAL =
ConfigOptions.key("execution.checkpointing.interval")
.durationType()
.noDefaultValue()
.withDescription(
Description.builder()
.text(
"Gets the interval in which checkpoints are periodically scheduled.")
.linebreak()
.linebreak()
.text(
"This setting defines the base interval. Checkpoint triggering may be delayed by the settings "
+ "%s, %s and %s",
TextElement.code(MAX_CONCURRENT_CHECKPOINTS.key()),
TextElement.code(MIN_PAUSE_BETWEEN_CHECKPOINTS.key()),
TextElement.code(
CHECKPOINTING_INTERVAL_DURING_BACKLOG.key()))
.build());
/**
* Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure.
*
* <p><strong>Note:</strong> Instead of accessing this configuration option directly with {@code
* config.get(ENABLE_UNALIGNED)}, use {@link #isUnalignedCheckpointEnabled(Configuration)} which
* validates that the checkpointing mode is EXACTLY_ONCE before checking this setting. Unaligned
* checkpoints are only supported with exactly-once semantics.
*/
public static final ConfigOption<Boolean> ENABLE_UNALIGNED =
ConfigOptions.key("execution.checkpointing.unaligned.enabled")
.booleanType()
.defaultValue(false)
.withDeprecatedKeys("execution.checkpointing.unaligned")
.withDescription(
Description.builder()
.text(
"Enables unaligned checkpoints, which greatly reduce checkpointing times under backpressure.")
.linebreak()
.linebreak()
.text(
"Unaligned checkpoints contain data stored in buffers as part of the checkpoint state, which "
+ "allows checkpoint barriers to overtake these buffers. Thus, the checkpoint duration becomes "
+ "independent of the current throughput as checkpoint barriers are effectively not embedded into "
+ "the stream of data anymore.")
.linebreak()
.linebreak()
.text(
"Unaligned checkpoints can only be enabled if %s is %s and if %s is 1",
TextElement.code(CHECKPOINTING_CONSISTENCY_MODE.key()),
TextElement.code(
CheckpointingMode.EXACTLY_ONCE.toString()),
TextElement.code(MAX_CONCURRENT_CHECKPOINTS.key()))
.build());
public static final ConfigOption<Duration> ALIGNED_CHECKPOINT_TIMEOUT =
ConfigOptions.key("execution.checkpointing.aligned-checkpoint-timeout")
.durationType()
.defaultValue(Duration.ofSeconds(0L))
.withDeprecatedKeys("execution.checkpointing.alignment-timeout")
.withDescription(
Description.builder()
.text(
"Only relevant if %s is enabled.",
TextElement.code(ENABLE_UNALIGNED.key()))
.linebreak()
.linebreak()
.text(
"If timeout is 0, checkpoints will always start unaligned.")
.linebreak()
.linebreak()
.text(
"If timeout has a positive value, checkpoints will start aligned. "
+ "If during checkpointing, checkpoint start delay exceeds this timeout, alignment "
+ "will timeout and checkpoint barrier will start working as unaligned checkpoint.")
.build());
public static final ConfigOption<Boolean> FORCE_UNALIGNED =
ConfigOptions.key("execution.checkpointing.unaligned.forced")
.booleanType()
.defaultValue(false)
.withDescription(
Description.builder()
.text(
"Forces unaligned checkpoints, particularly allowing them for iterative jobs.")
.build());
/**
* Allows unaligned checkpoints to skip timers that are currently being fired.
*
* <p><strong>Note:</strong> Instead of accessing this configuration option directly with {@code
* config.get(ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS)}, use {@link
* #isUnalignedCheckpointInterruptibleTimersEnabled(Configuration)} which validates that
* unaligned checkpoints are enabled before checking this setting.
*/
@Experimental
public static final ConfigOption<Boolean> ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS =
ConfigOptions.key("execution.checkpointing.unaligned.interruptible-timers.enabled")
.booleanType()
.defaultValue(false)
.withDescription(
"Allows unaligned checkpoints to skip timers that are currently being fired."
+ " For this feature to be enabled, it must be also supported by the operator."
+ " Currently this is supported by all TableStreamOperators and CepOperator.");
public static final ConfigOption<Boolean> ENABLE_CHECKPOINTS_AFTER_TASKS_FINISH =
ConfigOptions.key("execution.checkpointing.checkpoints-after-tasks-finish")
.booleanType()
.defaultValue(true)
.withDeprecatedKeys(
"execution.checkpointing.checkpoints-after-tasks-finish.enabled")
.withDescription(
Description.builder()
.text(
"Feature toggle for enabling checkpointing even if some of tasks"
+ " have finished. Before you enable it, please take a look at %s ",
link(
"{{.Site.BaseURL}}{{.Site.LanguagePrefix}}/docs/dev/datastream/fault-tolerance/checkpointing/#checkpointing-with-parts-of-the-graph-finished",
"the important considerations"))
.build());
// TODO: deprecated
// Currently, both two file merging mechanism can work simultaneously:
// 1. If UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE=1 and
// execution.checkpointing.file-merging.enabled: true, only the unified file merging mechanism
// takes
// effect.
// 2. if UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE>1 and
// execution.checkpointing.file-merging.enabled: false, only the current mechanism takes effect.
// 3. if UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE>1 and
// execution.checkpointing.file-merging.enabled: true, both two mechanism take effect.
public static final ConfigOption<Integer> UNALIGNED_MAX_SUBTASKS_PER_CHANNEL_STATE_FILE =
key("execution.checkpointing.unaligned.max-subtasks-per-channel-state-file")
.intType()
.defaultValue(5)
.withDescription(
"Defines the maximum number of subtasks that share the same channel state file. "
+ "It can reduce the number of small files when enable unaligned checkpoint. "
+ "Each subtask will create a new channel state file when this is configured to 1.");
/**
* Determines whether checkpointing is enabled based on the configuration.
*
* <p>Checkpointing is considered enabled if a valid checkpointing interval is configured (i.e.,
* the interval value is greater than 0 milliseconds). If no checkpointing interval is specified
* or if the interval is 0 or negative, checkpointing is considered disabled.
*
* @param config the configuration to check
* @return {@code true} if checkpointing is enabled, {@code false} otherwise
*/
@Internal
public static boolean isCheckpointingEnabled(Configuration config) {
if (config.get(ExecutionOptions.RUNTIME_MODE) == RuntimeExecutionMode.BATCH) {
return false;
}
return config.getOptional(CheckpointingOptions.CHECKPOINTING_INTERVAL)
.map(Duration::toMillis)
.map(interval -> interval > 0)
.orElse(false);
}
/**
* Gets the checkpointing mode from the configuration.
*
* <p>If checkpointing is enabled, this method returns the configured consistency mode ({@link
* CheckpointingMode#EXACTLY_ONCE} or {@link CheckpointingMode#AT_LEAST_ONCE}). If checkpointing
* is disabled, it returns {@link CheckpointingMode#AT_LEAST_ONCE} as the default mode since the
* "at-least-once" input handler is slightly more efficient when checkpoints are not being
* performed.
*
* @param config the configuration to check
* @return the checkpointing mode based on the configuration
* @see #isCheckpointingEnabled(Configuration)
*/
@Internal
public static CheckpointingMode getCheckpointingMode(Configuration config) {
if (isCheckpointingEnabled(config)) {
return config.get(CHECKPOINTING_CONSISTENCY_MODE);
} else {
// the "at-least-once" input handler is slightly cheaper (in the absence of
// checkpoints), so we use that one if checkpointing is not enabled
return CheckpointingMode.AT_LEAST_ONCE;
}
}
/**
* Determines whether unaligned checkpoints are enabled based on the configuration.
*
* <p>Unaligned checkpoints can only be enabled when the checkpointing mode is set to {@link
* CheckpointingMode#EXACTLY_ONCE}. If the mode is {@link CheckpointingMode#AT_LEAST_ONCE},
* unaligned checkpoints are not supported and this method will return {@code false}.
*
* <p>When the checkpointing mode is exactly-once, this method returns the value of the {@link
* #ENABLE_UNALIGNED} configuration option.
*
* @param config the configuration to check
* @return {@code true} if unaligned checkpoints are enabled and supported, {@code false}
* otherwise
* @see #getCheckpointingMode(Configuration)
*/
@Internal
public static boolean isUnalignedCheckpointEnabled(Configuration config) {
if (getCheckpointingMode(config) != CheckpointingMode.EXACTLY_ONCE) {
return false;
}
return config.get(ENABLE_UNALIGNED);
}
/**
* Determines whether unaligned checkpoints with interruptible timers are enabled based on the
* configuration.
*
* <p>Unaligned checkpoints with interruptible timers can only be enabled when:
*
* <ol>
* <li>Unaligned checkpoints are enabled (see {@link
* #isUnalignedCheckpointEnabled(Configuration)})
* <li>The {@link #ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS} option is set to {@code true}
* </ol>
*
* <p>If unaligned checkpoints are not enabled, this method will return {@code false} regardless
* of the interruptible timers setting.
*
* @param config the configuration to check
* @return {@code true} if unaligned checkpoints with interruptible timers are enabled, {@code
* false} otherwise
* @see #isUnalignedCheckpointEnabled(Configuration)
*/
@Internal
public static boolean isUnalignedCheckpointInterruptibleTimersEnabled(Configuration config) {
if (!isUnalignedCheckpointEnabled(config)) {
return false;
}
return config.get(ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS);
}
}
| name |
java | google__dagger | javatests/dagger/hilt/android/ActivityInjectedSavedStateViewModelTest.java | {
"start": 3301,
"end": 3626
} | class ____ extends FragmentActivity {
@Inject Object someObject;
@Override
protected void onCreate(Bundle savedInstanceState) {
assertThat(someObject).isNull(); // not yet injected
super.onCreate(savedInstanceState);
}
}
@Module
@InstallIn(ActivityComponent.class)
static final | SuperActivity |
java | apache__spark | common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/FinalizeShuffleMerge.java | {
"start": 1083,
"end": 2867
} | class ____ extends BlockTransferMessage {
public final String appId;
public final int appAttemptId;
public final int shuffleId;
public final int shuffleMergeId;
public FinalizeShuffleMerge(
String appId,
int appAttemptId,
int shuffleId,
int shuffleMergeId) {
this.appId = appId;
this.appAttemptId = appAttemptId;
this.shuffleId = shuffleId;
this.shuffleMergeId = shuffleMergeId;
}
@Override
protected BlockTransferMessage.Type type() {
return Type.FINALIZE_SHUFFLE_MERGE;
}
@Override
public int hashCode() {
return Objects.hash(appId, appAttemptId, shuffleId, shuffleMergeId);
}
@Override
public String toString() {
return "FinalizeShuffleMerge[appId=" + appId + ",attemptId=" + appAttemptId +
",shuffleId=" + shuffleId + ",shuffleMergeId=" + shuffleMergeId + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof FinalizeShuffleMerge o) {
return Objects.equals(appId, o.appId)
&& appAttemptId == o.appAttemptId
&& shuffleId == o.shuffleId
&& shuffleMergeId == o.shuffleMergeId;
}
return false;
}
@Override
public int encodedLength() {
return Encoders.Strings.encodedLength(appId) + 4 + 4 + 4;
}
@Override
public void encode(ByteBuf buf) {
Encoders.Strings.encode(buf, appId);
buf.writeInt(appAttemptId);
buf.writeInt(shuffleId);
buf.writeInt(shuffleMergeId);
}
public static FinalizeShuffleMerge decode(ByteBuf buf) {
String appId = Encoders.Strings.decode(buf);
int attemptId = buf.readInt();
int shuffleId = buf.readInt();
int shuffleMergeId = buf.readInt();
return new FinalizeShuffleMerge(appId, attemptId, shuffleId, shuffleMergeId);
}
}
| FinalizeShuffleMerge |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 428577,
"end": 434726
} | class ____ extends YamlDeserializerBase<HeadDefinition> {
public HeadDefinitionDeserializer() {
super(HeadDefinition.class);
}
@Override
protected HeadDefinition newInstance() {
return new HeadDefinition();
}
@Override
protected boolean setProperty(HeadDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "apiDocs": {
String val = asText(node);
target.setApiDocs(val);
break;
}
case "bindingMode": {
String val = asText(node);
target.setBindingMode(val);
break;
}
case "clientRequestValidation": {
String val = asText(node);
target.setClientRequestValidation(val);
break;
}
case "clientResponseValidation": {
String val = asText(node);
target.setClientResponseValidation(val);
break;
}
case "consumes": {
String val = asText(node);
target.setConsumes(val);
break;
}
case "deprecated": {
String val = asText(node);
target.setDeprecated(val);
break;
}
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "enableCORS": {
String val = asText(node);
target.setEnableCORS(val);
break;
}
case "enableNoContentResponse": {
String val = asText(node);
target.setEnableNoContentResponse(val);
break;
}
case "outType": {
String val = asText(node);
target.setOutType(val);
break;
}
case "param": {
java.util.List<org.apache.camel.model.rest.ParamDefinition> val = asFlatList(node, org.apache.camel.model.rest.ParamDefinition.class);
target.setParams(val);
break;
}
case "path": {
String val = asText(node);
target.setPath(val);
break;
}
case "produces": {
String val = asText(node);
target.setProduces(val);
break;
}
case "responseMessage": {
java.util.List<org.apache.camel.model.rest.ResponseMessageDefinition> val = asFlatList(node, org.apache.camel.model.rest.ResponseMessageDefinition.class);
target.setResponseMsgs(val);
break;
}
case "routeId": {
String val = asText(node);
target.setRouteId(val);
break;
}
case "security": {
java.util.List<org.apache.camel.model.rest.SecurityDefinition> val = asFlatList(node, org.apache.camel.model.rest.SecurityDefinition.class);
target.setSecurity(val);
break;
}
case "skipBindingOnErrorCode": {
String val = asText(node);
target.setSkipBindingOnErrorCode(val);
break;
}
case "streamCache": {
String val = asText(node);
target.setStreamCache(val);
break;
}
case "to": {
org.apache.camel.model.ToDefinition val = asType(node, org.apache.camel.model.ToDefinition.class);
target.setTo(val);
break;
}
case "type": {
String val = asText(node);
target.setType(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "header",
inline = true,
types = org.apache.camel.model.language.HeaderExpression.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Header",
description = "Gets a header from the Exchange.",
deprecated = false,
properties = {
@YamlProperty(name = "expression", type = "string", required = true, description = "The expression value in your chosen language syntax", displayName = "Expression"),
@YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"),
@YamlProperty(name = "trim", type = "boolean", defaultValue = "true", description = "Whether to trim the value to remove leading and trailing whitespaces and line breaks", displayName = "Trim")
}
)
public static | HeadDefinitionDeserializer |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/statement/MySqlLockTableStatement.java | {
"start": 1160,
"end": 1697
} | class ____ extends MySqlStatementImpl implements SQLLockTableStatement {
private List<Item> items = new ArrayList<Item>();
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof H2ASTVisitor) {
return;
}
super.accept0(visitor);
}
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, items);
}
visitor.endVisit(this);
}
public static | MySqlLockTableStatement |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/ProcessTableFunctionTestUtils.java | {
"start": 35814,
"end": 36294
} | class ____ extends ProcessTableFunction<Row>
implements ChangelogFunction {
void collectUpdate(Context ctx, Row r) {
collect(
Row.ofKind(
r.getKind(),
r.getField(0),
r.getField(1),
toModeSummary(ctx.getChangelogMode())));
}
}
/** Testing function. */
public static | ChangelogProcessTableFunctionBase |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/BeanDefinitionPropertyValueCodeGeneratorDelegatesTests.java | {
"start": 7316,
"end": 8488
} | class ____ {
@Test
void generateWhenSimpleResolvableType() {
ResolvableType resolvableType = ResolvableType.forClass(String.class);
compile(resolvableType, (instance, compiled) -> assertThat(instance)
.isEqualTo(resolvableType));
}
@Test
void generateWhenNoneResolvableType() {
ResolvableType resolvableType = ResolvableType.NONE;
compile(resolvableType, (instance, compiled) ->
assertThat(instance).isEqualTo(resolvableType));
}
@Test
void generateWhenGenericResolvableType() {
ResolvableType resolvableType = ResolvableType
.forClassWithGenerics(List.class, String.class);
compile(resolvableType, (instance, compiled) ->
assertThat(instance).isEqualTo(resolvableType));
}
@Test
void generateWhenNestedGenericResolvableType() {
ResolvableType stringList = ResolvableType.forClassWithGenerics(List.class,
String.class);
ResolvableType resolvableType = ResolvableType.forClassWithGenerics(Map.class,
ResolvableType.forClass(Integer.class), stringList);
compile(resolvableType, (instance, compiled) -> assertThat(instance)
.isEqualTo(resolvableType));
}
}
@Nested
| ResolvableTypeTests |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/io/VFS.java | {
"start": 3334,
"end": 3510
} | class ____ add.
*/
public static void addImplClass(Class<? extends VFS> clazz) {
if (clazz != null) {
USER_IMPLEMENTATIONS.add(clazz);
}
}
/**
* Get a | to |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/spi/SqmQuery.java | {
"start": 717,
"end": 4786
} | interface ____<R> extends CommonQueryContract {
String getQueryString();
SqmStatement<R> getSqmStatement();
ParameterMetadata getParameterMetadata();
QueryOptions getQueryOptions();
@Override
SqmQuery<R> setParameter(String name, Object value);
@Override
<P> SqmQuery<R> setParameter(String name, P value, Class<P> type);
@Override
<P> SqmQuery<R> setParameter(String name, P value, Type<P> type);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(String name, Instant value, TemporalType temporalType);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(String name, Calendar value, TemporalType temporalType);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(String name, Date value, TemporalType temporalType);
@Override
SqmQuery<R> setParameter(int position, Object value);
@Override
<P> SqmQuery<R> setParameter(int position, P value, Class<P> type);
@Override
<P> SqmQuery<R> setParameter(int position, P value, Type<P> type);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(int position, Instant value, TemporalType temporalType);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(int position, Date value, TemporalType temporalType);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(int position, Calendar value, TemporalType temporalType);
@Override
<T> SqmQuery<R> setParameter(QueryParameter<T> parameter, T value);
@Override
<P> SqmQuery<R> setParameter(QueryParameter<P> parameter, P value, Class<P> type);
@Override
<P> SqmQuery<R> setParameter(QueryParameter<P> parameter, P val, Type<P> type);
@Override
<T> SqmQuery<R> setParameter(Parameter<T> param, T value);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(Parameter<Calendar> param, Calendar value, TemporalType temporalType);
@Override @Deprecated(since = "7")
SqmQuery<R> setParameter(Parameter<Date> param, Date value, TemporalType temporalType);
@Override
SqmQuery<R> setParameterList(String name, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> SqmQuery<R> setParameterList(String name, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(String name, Collection<? extends P> values, Type<P> type);
@Override
SqmQuery<R> setParameterList(String name, Object[] values);
@Override
<P> SqmQuery<R> setParameterList(String name, P[] values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(String name, P[] values, Type<P> type);
@Override
SqmQuery<R> setParameterList(int position, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> SqmQuery<R> setParameterList(int position, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(int position, Collection<? extends P> values, Type<P> type);
@Override
SqmQuery<R> setParameterList(int position, Object[] values);
@Override
<P> SqmQuery<R> setParameterList(int position, P[] values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(int position, P[] values, Type<P> type);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Type<P> type);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, P[] values);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, P[] values, Class<P> javaType);
@Override
<P> SqmQuery<R> setParameterList(QueryParameter<P> parameter, P[] values, Type<P> type);
@Override
SqmQuery<R> setProperties(Object bean);
@Override
SqmQuery<R> setProperties(@SuppressWarnings("rawtypes") Map bean);
@Override @Deprecated(since = "7")
SqmQuery<R> setHibernateFlushMode(FlushMode flushMode);
@Override
SqmQuery<R> setQueryFlushMode(QueryFlushMode queryFlushMode);
}
| SqmQuery |
java | apache__hadoop | hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java | {
"start": 1293,
"end": 4687
} | class ____ implements Mapper, Reducer {
public static final Logger LOG = LoggerFactory.getLogger("datajoin.job");
private SortedMap<Object, Long> longCounters = null;
private SortedMap<Object, Double> doubleCounters = null;
/**
* Set the given counter to the given value
*
* @param name
* the counter name
* @param value
* the value for the counter
*/
protected void setLongValue(Object name, long value) {
this.longCounters.put(name, Long.valueOf(value));
}
/**
* Set the given counter to the given value
*
* @param name
* the counter name
* @param value
* the value for the counter
*/
protected void setDoubleValue(Object name, double value) {
this.doubleCounters.put(name, new Double(value));
}
/**
*
* @param name
* the counter name
* @return return the value of the given counter.
*/
protected Long getLongValue(Object name) {
return this.longCounters.get(name);
}
/**
*
* @param name
* the counter name
* @return return the value of the given counter.
*/
protected Double getDoubleValue(Object name) {
return this.doubleCounters.get(name);
}
/**
* Increment the given counter by the given incremental value If the counter
* does not exist, one is created with value 0.
*
* @param name
* the counter name
* @param inc
* the incremental value
* @return the updated value.
*/
protected Long addLongValue(Object name, long inc) {
Long val = this.longCounters.get(name);
Long retv = null;
if (val == null) {
retv = Long.valueOf(inc);
} else {
retv = Long.valueOf(val.longValue() + inc);
}
this.longCounters.put(name, retv);
return retv;
}
/**
* Increment the given counter by the given incremental value If the counter
* does not exist, one is created with value 0.
*
* @param name
* the counter name
* @param inc
* the incremental value
* @return the updated value.
*/
protected Double addDoubleValue(Object name, double inc) {
Double val = this.doubleCounters.get(name);
Double retv = null;
if (val == null) {
retv = new Double(inc);
} else {
retv = new Double(val.doubleValue() + inc);
}
this.doubleCounters.put(name, retv);
return retv;
}
/**
* log the counters
*
*/
protected void report() {
LOG.info(getReport());
}
/**
* log the counters
*
*/
protected String getReport() {
StringBuilder sb = new StringBuilder();
Iterator iter = this.longCounters.entrySet().iterator();
while (iter.hasNext()) {
Entry e = (Entry) iter.next();
sb.append(e.getKey().toString()).append("\t").append(e.getValue())
.append("\n");
}
iter = this.doubleCounters.entrySet().iterator();
while (iter.hasNext()) {
Entry e = (Entry) iter.next();
sb.append(e.getKey().toString()).append("\t").append(e.getValue())
.append("\n");
}
return sb.toString();
}
/**
* Initializes a new instance from a {@link JobConf}.
*
* @param job
* the configuration
*/
public void configure(JobConf job) {
this.longCounters = new TreeMap<Object, Long>();
this.doubleCounters = new TreeMap<Object, Double>();
}
}
| JobBase |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2AuthorizationConsentAuthenticationProviderTests.java | {
"start": 3151,
"end": 29791
} | class ____ {
private static final String AUTHORIZATION_URI = "https://provider.com/oauth2/authorize";
private static final String STATE = "state";
private static final OAuth2TokenType STATE_TOKEN_TYPE = new OAuth2TokenType(OAuth2ParameterNames.STATE);
private RegisteredClientRepository registeredClientRepository;
private OAuth2AuthorizationService authorizationService;
private OAuth2AuthorizationConsentService authorizationConsentService;
private OAuth2AuthorizationConsentAuthenticationProvider authenticationProvider;
private TestingAuthenticationToken principal;
@BeforeEach
public void setUp() {
this.registeredClientRepository = mock(RegisteredClientRepository.class);
this.authorizationService = mock(OAuth2AuthorizationService.class);
this.authorizationConsentService = mock(OAuth2AuthorizationConsentService.class);
this.authenticationProvider = new OAuth2AuthorizationConsentAuthenticationProvider(
this.registeredClientRepository, this.authorizationService, this.authorizationConsentService);
this.principal = new TestingAuthenticationToken("principalName", "password");
this.principal.setAuthenticated(true);
AuthorizationServerSettings authorizationServerSettings = AuthorizationServerSettings.builder()
.issuer("https://provider.com")
.build();
AuthorizationServerContextHolder
.setContext(new TestAuthorizationServerContext(authorizationServerSettings, null));
}
@Test
public void constructorWhenRegisteredClientRepositoryNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationConsentAuthenticationProvider(null, this.authorizationService,
this.authorizationConsentService))
.withMessage("registeredClientRepository cannot be null");
}
@Test
public void constructorWhenAuthorizationServiceNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationConsentAuthenticationProvider(this.registeredClientRepository,
null, this.authorizationConsentService))
.withMessage("authorizationService cannot be null");
}
@Test
public void constructorWhenAuthorizationConsentServiceNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> new OAuth2AuthorizationConsentAuthenticationProvider(this.registeredClientRepository,
this.authorizationService, null))
.withMessage("authorizationConsentService cannot be null");
}
@Test
public void supportsWhenTypeOAuth2AuthorizationConsentAuthenticationTokenThenReturnTrue() {
assertThat(this.authenticationProvider.supports(OAuth2AuthorizationConsentAuthenticationToken.class)).isTrue();
}
@Test
public void setAuthorizationCodeGeneratorWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.authenticationProvider.setAuthorizationCodeGenerator(null))
.withMessage("authorizationCodeGenerator cannot be null");
}
@Test
public void setAuthorizationConsentCustomizerWhenNullThenThrowIllegalArgumentException() {
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> this.authenticationProvider.setAuthorizationConsentCustomizer(null))
.withMessage("authorizationConsentCustomizer cannot be null");
}
@Test
public void authenticateWhenInvalidStateThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, registeredClient.getScopes(),
null);
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(null);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_REQUEST,
OAuth2ParameterNames.STATE, null));
}
@Test
public void authenticateWhenPrincipalNotAuthenticatedThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, registeredClient.getScopes(),
null);
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
this.principal.setAuthenticated(false);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_REQUEST,
OAuth2ParameterNames.STATE, null));
}
@Test
public void authenticateWhenInvalidPrincipalThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName().concat("-other"))
.build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, registeredClient.getScopes(),
null);
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_REQUEST,
OAuth2ParameterNames.STATE, null));
}
@Test
public void authenticateWhenInvalidClientIdThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
given(this.authorizationService.findByToken(eq("state"), eq(STATE_TOKEN_TYPE))).willReturn(authorization);
RegisteredClient otherRegisteredClient = TestRegisteredClients.registeredClient2().build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, otherRegisteredClient.getClientId(), this.principal, STATE,
registeredClient.getScopes(), null);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_REQUEST,
OAuth2ParameterNames.CLIENT_ID, null));
}
@Test
public void authenticateWhenDoesNotMatchClientThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
RegisteredClient otherRegisteredClient = TestRegisteredClients.registeredClient2().build();
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(otherRegisteredClient)
.principalName(this.principal.getName())
.build();
given(this.authorizationService.findByToken(eq("state"), eq(STATE_TOKEN_TYPE))).willReturn(authorization);
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, registeredClient.getScopes(),
null);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_REQUEST,
OAuth2ParameterNames.CLIENT_ID, null));
}
@Test
public void authenticateWhenScopeNotRequestedThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
Set<String> authorizedScopes = new HashSet<>(authorizationRequest.getScopes());
authorizedScopes.add("scope-not-requested");
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, authorizedScopes, null);
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.INVALID_SCOPE,
OAuth2ParameterNames.SCOPE, authorizationRequest.getRedirectUri()));
}
@Test
public void authenticateWhenNotApprovedThenThrowOAuth2AuthorizationCodeRequestAuthenticationException() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, new HashSet<>(), null); // No
// scopes
// approved
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.ACCESS_DENIED,
OAuth2ParameterNames.CLIENT_ID, authorizationRequest.getRedirectUri()));
verify(this.authorizationService).remove(eq(authorization));
}
@Test
public void authenticateWhenApproveAllThenReturnAuthorizationCode() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
Set<String> authorizedScopes = authorizationRequest.getScopes();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, authorizedScopes, null); // Approve
// all
// scopes
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
OAuth2AuthorizationCodeRequestAuthenticationToken authenticationResult = (OAuth2AuthorizationCodeRequestAuthenticationToken) this.authenticationProvider
.authenticate(authentication);
assertAuthorizationConsentRequestWithAuthorizationCodeResult(registeredClient, authorization,
authenticationResult);
}
@Test
public void authenticateWhenCustomAuthorizationConsentCustomizerThenUsed() {
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
Set<String> authorizedScopes = authorizationRequest.getScopes();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, authorizedScopes, null); // Approve
// all
// scopes
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
@SuppressWarnings("unchecked")
Consumer<OAuth2AuthorizationConsentAuthenticationContext> authorizationConsentCustomizer = mock(Consumer.class);
this.authenticationProvider.setAuthorizationConsentCustomizer(authorizationConsentCustomizer);
OAuth2AuthorizationCodeRequestAuthenticationToken authenticationResult = (OAuth2AuthorizationCodeRequestAuthenticationToken) this.authenticationProvider
.authenticate(authentication);
assertAuthorizationConsentRequestWithAuthorizationCodeResult(registeredClient, authorization,
authenticationResult);
ArgumentCaptor<OAuth2AuthorizationConsentAuthenticationContext> authenticationContextCaptor = ArgumentCaptor
.forClass(OAuth2AuthorizationConsentAuthenticationContext.class);
verify(authorizationConsentCustomizer).accept(authenticationContextCaptor.capture());
OAuth2AuthorizationConsentAuthenticationContext authenticationContext = authenticationContextCaptor.getValue();
assertThat(authenticationContext.<Authentication>getAuthentication()).isEqualTo(authentication);
assertThat(authenticationContext.getAuthorizationConsent()).isNotNull();
assertThat(authenticationContext.getRegisteredClient()).isEqualTo(registeredClient);
assertThat(authenticationContext.getAuthorization()).isEqualTo(authorization);
assertThat(authenticationContext.getAuthorizationRequest()).isEqualTo(authorizationRequest);
}
private void assertAuthorizationConsentRequestWithAuthorizationCodeResult(RegisteredClient registeredClient,
OAuth2Authorization authorization, OAuth2AuthorizationCodeRequestAuthenticationToken authenticationResult) {
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
Set<String> authorizedScopes = authorizationRequest.getScopes();
ArgumentCaptor<OAuth2AuthorizationConsent> authorizationConsentCaptor = ArgumentCaptor
.forClass(OAuth2AuthorizationConsent.class);
verify(this.authorizationConsentService).save(authorizationConsentCaptor.capture());
OAuth2AuthorizationConsent authorizationConsent = authorizationConsentCaptor.getValue();
assertThat(authorizationConsent.getRegisteredClientId()).isEqualTo(authorization.getRegisteredClientId());
assertThat(authorizationConsent.getPrincipalName()).isEqualTo(authorization.getPrincipalName());
assertThat(authorizationConsent.getAuthorities()).hasSize(authorizedScopes.size());
assertThat(authorizationConsent.getScopes()).containsExactlyInAnyOrderElementsOf(authorizedScopes);
ArgumentCaptor<OAuth2Authorization> authorizationCaptor = ArgumentCaptor.forClass(OAuth2Authorization.class);
verify(this.authorizationService).save(authorizationCaptor.capture());
OAuth2Authorization updatedAuthorization = authorizationCaptor.getValue();
assertThat(updatedAuthorization.getRegisteredClientId()).isEqualTo(authorization.getRegisteredClientId());
assertThat(updatedAuthorization.getPrincipalName()).isEqualTo(authorization.getPrincipalName());
assertThat(updatedAuthorization.getAuthorizationGrantType())
.isEqualTo(authorization.getAuthorizationGrantType());
assertThat(updatedAuthorization.<Authentication>getAttribute(Principal.class.getName()))
.isEqualTo(authorization.<Authentication>getAttribute(Principal.class.getName()));
assertThat(updatedAuthorization
.<OAuth2AuthorizationRequest>getAttribute(OAuth2AuthorizationRequest.class.getName()))
.isEqualTo(authorizationRequest);
OAuth2Authorization.Token<OAuth2AuthorizationCode> authorizationCode = updatedAuthorization
.getToken(OAuth2AuthorizationCode.class);
assertThat(authorizationCode).isNotNull();
assertThat(updatedAuthorization.<String>getAttribute(OAuth2ParameterNames.STATE)).isNull();
assertThat(updatedAuthorization.getAuthorizedScopes()).isEqualTo(authorizedScopes);
assertThat(authenticationResult.getClientId()).isEqualTo(registeredClient.getClientId());
assertThat(authenticationResult.getPrincipal()).isEqualTo(this.principal);
assertThat(authenticationResult.getAuthorizationUri()).isEqualTo(authorizationRequest.getAuthorizationUri());
assertThat(authenticationResult.getRedirectUri()).isEqualTo(authorizationRequest.getRedirectUri());
assertThat(authenticationResult.getScopes()).isEqualTo(authorizedScopes);
assertThat(authenticationResult.getState()).isEqualTo(authorizationRequest.getState());
assertThat(authenticationResult.getAuthorizationCode()).isEqualTo(authorizationCode.getToken());
assertThat(authenticationResult.isAuthenticated()).isTrue();
}
@Test
public void authenticateWhenApproveNoneAndRevokePreviouslyApprovedThenAuthorizationConsentRemoved() {
String previouslyApprovedScope = "message.read";
String requestedScope = "message.write";
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().scopes((scopes) -> {
scopes.clear();
scopes.add(previouslyApprovedScope);
scopes.add(requestedScope);
}).build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, new HashSet<>(), null); // No
// scopes
// approved
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
OAuth2AuthorizationConsent previousAuthorizationConsent = OAuth2AuthorizationConsent
.withId(authorization.getRegisteredClientId(), authorization.getPrincipalName())
.scope(previouslyApprovedScope)
.build();
given(this.authorizationConsentService.findById(eq(authorization.getRegisteredClientId()),
eq(authorization.getPrincipalName())))
.willReturn(previousAuthorizationConsent);
// Revoke all (including previously approved)
this.authenticationProvider.setAuthorizationConsentCustomizer(
(authorizationConsentContext) -> authorizationConsentContext.getAuthorizationConsent()
.authorities(Set::clear));
assertThatExceptionOfType(OAuth2AuthorizationCodeRequestAuthenticationException.class)
.isThrownBy(() -> this.authenticationProvider.authenticate(authentication))
.satisfies((ex) -> assertAuthenticationException(ex, OAuth2ErrorCodes.ACCESS_DENIED,
OAuth2ParameterNames.CLIENT_ID, authorizationRequest.getRedirectUri()));
verify(this.authorizationConsentService).remove(eq(previousAuthorizationConsent));
verify(this.authorizationService).remove(eq(authorization));
}
@Test
public void authenticateWhenApproveSomeAndPreviouslyApprovedThenAuthorizationConsentUpdated() {
String previouslyApprovedScope = "message.read";
String requestedScope = "message.write";
String otherPreviouslyApprovedScope = "other.scope";
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().scopes((scopes) -> {
scopes.clear();
scopes.add(previouslyApprovedScope);
scopes.add(requestedScope);
}).build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationRequest authorizationRequest = authorization
.getAttribute(OAuth2AuthorizationRequest.class.getName());
Set<String> requestedScopes = authorizationRequest.getScopes();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, requestedScopes, null);
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
OAuth2AuthorizationConsent previousAuthorizationConsent = OAuth2AuthorizationConsent
.withId(authorization.getRegisteredClientId(), authorization.getPrincipalName())
.scope(previouslyApprovedScope)
.scope(otherPreviouslyApprovedScope)
.build();
given(this.authorizationConsentService.findById(eq(authorization.getRegisteredClientId()),
eq(authorization.getPrincipalName())))
.willReturn(previousAuthorizationConsent);
OAuth2AuthorizationCodeRequestAuthenticationToken authenticationResult = (OAuth2AuthorizationCodeRequestAuthenticationToken) this.authenticationProvider
.authenticate(authentication);
ArgumentCaptor<OAuth2AuthorizationConsent> authorizationConsentCaptor = ArgumentCaptor
.forClass(OAuth2AuthorizationConsent.class);
verify(this.authorizationConsentService).save(authorizationConsentCaptor.capture());
OAuth2AuthorizationConsent updatedAuthorizationConsent = authorizationConsentCaptor.getValue();
assertThat(updatedAuthorizationConsent.getRegisteredClientId())
.isEqualTo(previousAuthorizationConsent.getRegisteredClientId());
assertThat(updatedAuthorizationConsent.getPrincipalName())
.isEqualTo(previousAuthorizationConsent.getPrincipalName());
assertThat(updatedAuthorizationConsent.getScopes()).containsExactlyInAnyOrder(previouslyApprovedScope,
otherPreviouslyApprovedScope, requestedScope);
ArgumentCaptor<OAuth2Authorization> authorizationCaptor = ArgumentCaptor.forClass(OAuth2Authorization.class);
verify(this.authorizationService).save(authorizationCaptor.capture());
OAuth2Authorization updatedAuthorization = authorizationCaptor.getValue();
assertThat(updatedAuthorization.getAuthorizedScopes()).isEqualTo(requestedScopes);
assertThat(authenticationResult.getScopes()).isEqualTo(requestedScopes);
}
@Test
public void authenticateWhenApproveNoneAndPreviouslyApprovedThenAuthorizationConsentNotUpdated() {
String previouslyApprovedScope = "message.read";
String requestedScope = "message.write";
RegisteredClient registeredClient = TestRegisteredClients.registeredClient().scopes((scopes) -> {
scopes.clear();
scopes.add(previouslyApprovedScope);
scopes.add(requestedScope);
}).build();
given(this.registeredClientRepository.findByClientId(eq(registeredClient.getClientId())))
.willReturn(registeredClient);
OAuth2Authorization authorization = TestOAuth2Authorizations.authorization(registeredClient)
.principalName(this.principal.getName())
.build();
OAuth2AuthorizationConsentAuthenticationToken authentication = new OAuth2AuthorizationConsentAuthenticationToken(
AUTHORIZATION_URI, registeredClient.getClientId(), this.principal, STATE, new HashSet<>(), null); // No
// scopes
// approved
given(this.authorizationService.findByToken(eq(authentication.getState()), eq(STATE_TOKEN_TYPE)))
.willReturn(authorization);
OAuth2AuthorizationConsent previousAuthorizationConsent = OAuth2AuthorizationConsent
.withId(authorization.getRegisteredClientId(), authorization.getPrincipalName())
.scope(previouslyApprovedScope)
.build();
given(this.authorizationConsentService.findById(eq(authorization.getRegisteredClientId()),
eq(authorization.getPrincipalName())))
.willReturn(previousAuthorizationConsent);
OAuth2AuthorizationCodeRequestAuthenticationToken authenticationResult = (OAuth2AuthorizationCodeRequestAuthenticationToken) this.authenticationProvider
.authenticate(authentication);
verify(this.authorizationConsentService, never()).save(any());
assertThat(authenticationResult.getScopes()).isEqualTo(Collections.singleton(previouslyApprovedScope));
}
private static void assertAuthenticationException(
OAuth2AuthorizationCodeRequestAuthenticationException authenticationException, String errorCode,
String parameterName, String redirectUri) {
OAuth2Error error = authenticationException.getError();
assertThat(error.getErrorCode()).isEqualTo(errorCode);
assertThat(error.getDescription()).contains(parameterName);
OAuth2AuthorizationCodeRequestAuthenticationToken authorizationCodeRequestAuthentication = authenticationException
.getAuthorizationCodeRequestAuthentication();
assertThat(authorizationCodeRequestAuthentication.getRedirectUri()).isEqualTo(redirectUri);
}
}
| OAuth2AuthorizationConsentAuthenticationProviderTests |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/strategies/DecimalDivideTypeStrategy.java | {
"start": 1876,
"end": 3047
} | class ____ implements TypeStrategy {
@Override
public Optional<DataType> inferType(CallContext callContext) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final LogicalType dividend = argumentDataTypes.get(0).getLogicalType();
final LogicalType divisor = argumentDataTypes.get(1).getLogicalType();
// a hack to make legacy types possible until we drop them
if (dividend instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(0));
}
if (divisor instanceof LegacyTypeInformationType) {
return Optional.of(argumentDataTypes.get(1));
}
if (!isDecimalComputation(dividend, divisor)) {
return Optional.empty();
}
final DecimalType decimalType =
LogicalTypeMerging.findDivisionDecimalType(
getPrecision(dividend),
getScale(dividend),
getPrecision(divisor),
getScale(divisor));
return Optional.of(fromLogicalToDataType(decimalType));
}
}
| DecimalDivideTypeStrategy |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/runtime/BooleanScriptFieldExistsQueryTests.java | {
"start": 565,
"end": 2059
} | class ____ extends AbstractBooleanScriptFieldQueryTestCase<BooleanScriptFieldExistsQuery> {
@Override
protected BooleanScriptFieldExistsQuery createTestInstance() {
return new BooleanScriptFieldExistsQuery(randomScript(), leafFactory, randomAlphaOfLength(5));
}
@Override
protected BooleanScriptFieldExistsQuery copy(BooleanScriptFieldExistsQuery orig) {
return new BooleanScriptFieldExistsQuery(orig.script(), leafFactory, orig.fieldName());
}
@Override
protected BooleanScriptFieldExistsQuery mutate(BooleanScriptFieldExistsQuery orig) {
if (randomBoolean()) {
new BooleanScriptFieldExistsQuery(randomValueOtherThan(orig.script(), this::randomScript), leafFactory, orig.fieldName());
}
return new BooleanScriptFieldExistsQuery(orig.script(), leafFactory, orig.fieldName() + "modified");
}
@Override
public void testMatches() {
assertTrue(createTestInstance().matches(between(1, Integer.MAX_VALUE), 0));
assertTrue(createTestInstance().matches(0, between(1, Integer.MAX_VALUE)));
assertTrue(createTestInstance().matches(between(1, Integer.MAX_VALUE), between(1, Integer.MAX_VALUE)));
assertFalse(createTestInstance().matches(0, 0));
}
@Override
protected void assertToString(BooleanScriptFieldExistsQuery query) {
assertThat(query.toString(query.fieldName()), equalTo("BooleanScriptFieldExistsQuery"));
}
}
| BooleanScriptFieldExistsQueryTests |
java | apache__camel | components/camel-bean/src/main/java/org/apache/camel/component/bean/BeanProcessor.java | {
"start": 1290,
"end": 4444
} | class ____ extends ServiceSupport implements AsyncProcessor, ErrorHandlerAware, IdAware {
private final DelegateBeanProcessor delegate;
private String id;
public BeanProcessor(Object pojo, CamelContext camelContext) {
this(new ConstantBeanHolder(
pojo, camelContext, ParameterMappingStrategyHelper.createParameterMappingStrategy(camelContext),
camelContext.getComponent("bean", BeanComponent.class)));
}
public BeanProcessor(Object pojo, BeanInfo beanInfo) {
this.delegate = new DelegateBeanProcessor(pojo, beanInfo);
}
public BeanProcessor(BeanHolder beanHolder) {
this.delegate = new DelegateBeanProcessor(beanHolder);
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public Processor getErrorHandler() {
return null;
}
@Override
public void setErrorHandler(Processor errorHandler) {
BeanHolder holder = delegate.getBeanHolder();
if (holder != null) {
holder.setErrorHandler(errorHandler);
}
}
@Override
public void process(Exchange exchange) throws Exception {
delegate.process(exchange);
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
return delegate.process(exchange, callback);
}
@Override
public CompletableFuture<Exchange> processAsync(Exchange exchange) {
return delegate.processAsync(exchange);
}
public Processor getProcessor() {
return delegate.getProcessor();
}
public BeanHolder getBeanHolder() {
return delegate.getBeanHolder();
}
public Object getBean() {
return delegate.getBean();
}
public String getMethod() {
return delegate.getMethod();
}
public void setMethod(String method) {
delegate.setMethod(method);
}
public BeanScope getScope() {
return delegate.getScope();
}
public void setScope(BeanScope scope) {
delegate.setScope(scope);
}
public boolean isShorthandMethod() {
return delegate.isShorthandMethod();
}
public void setShorthandMethod(boolean shorthandMethod) {
delegate.setShorthandMethod(shorthandMethod);
}
@Override
protected void doInit() throws Exception {
delegate.init();
}
@Override
protected void doResume() throws Exception {
delegate.resume();
}
@Override
protected void doSuspend() throws Exception {
delegate.suspend();
}
@Override
protected void doStart() throws Exception {
delegate.start();
}
@Override
protected void doStop() throws Exception {
delegate.doStop();
}
@Override
protected void doShutdown() throws Exception {
delegate.shutdown();
}
@Override
public void close() throws IOException {
delegate.close();
}
@Override
public String toString() {
return delegate.toString();
}
private static final | BeanProcessor |
java | junit-team__junit5 | documentation/src/test/java/example/extensions/RandomNumberExtension.java | {
"start": 983,
"end": 2908
} | class ____
implements BeforeAllCallback, TestInstancePostProcessor, ParameterResolver {
private final java.util.Random random = new java.util.Random(System.nanoTime());
/**
* Inject a random integer into static fields that are annotated with
* {@code @Random} and can be assigned an integer value.
*/
@Override
public void beforeAll(ExtensionContext context) {
Class<?> testClass = context.getRequiredTestClass();
injectFields(testClass, null, ModifierSupport::isStatic);
}
/**
* Inject a random integer into non-static fields that are annotated with
* {@code @Random} and can be assigned an integer value.
*/
@Override
public void postProcessTestInstance(Object testInstance, ExtensionContext context) {
Class<?> testClass = context.getRequiredTestClass();
injectFields(testClass, testInstance, ModifierSupport::isNotStatic);
}
/**
* Determine if the parameter is annotated with {@code @Random} and can be
* assigned an integer value.
*/
@Override
public boolean supportsParameter(ParameterContext pc, ExtensionContext ec) {
return pc.isAnnotated(Random.class) && isInteger(pc.getParameter().getType());
}
/**
* Resolve a random integer.
*/
@Override
public Integer resolveParameter(ParameterContext pc, ExtensionContext ec) {
return this.random.nextInt();
}
private void injectFields(Class<?> testClass, @Nullable Object testInstance,
Predicate<Field> predicate) {
predicate = predicate.and(field -> isInteger(field.getType()));
findAnnotatedFields(testClass, Random.class, predicate)
.forEach(field -> {
try {
field.setAccessible(true);
field.set(testInstance, this.random.nextInt());
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
});
}
private static boolean isInteger(Class<?> type) {
return type == Integer.class || type == int.class;
}
}
// end::user_guide[]
// @formatter:on
| RandomNumberExtension |
java | spring-projects__spring-boot | module/spring-boot-kafka/src/main/java/org/springframework/boot/kafka/autoconfigure/KafkaProperties.java | {
"start": 7031,
"end": 8819
} | class ____ {
private final Ssl ssl = new Ssl();
private final Security security = new Security();
/**
* Frequency with which the consumer offsets are auto-committed to Kafka if
* 'enable.auto.commit' is set to true.
*/
private @Nullable Duration autoCommitInterval;
/**
* What to do when there is no initial offset in Kafka or if the current offset no
* longer exists on the server.
*/
private @Nullable String autoOffsetReset;
/**
* List of host:port pairs to use for establishing the initial connections to the
* Kafka cluster. Overrides the global property, for consumers.
*/
private @Nullable List<String> bootstrapServers;
/**
* ID to pass to the server when making requests. Used for server-side logging.
*/
private @Nullable String clientId;
/**
* Whether the consumer's offset is periodically committed in the background.
*/
private @Nullable Boolean enableAutoCommit;
/**
* Maximum amount of time the server blocks before answering the fetch request if
* there isn't sufficient data to immediately satisfy the requirement given by
* "fetch-min-size".
*/
private @Nullable Duration fetchMaxWait;
/**
* Minimum amount of data the server should return for a fetch request.
*/
private @Nullable DataSize fetchMinSize;
/**
* Unique string that identifies the consumer group to which this consumer
* belongs.
*/
private @Nullable String groupId;
/**
* Expected time between heartbeats to the consumer coordinator.
*/
private @Nullable Duration heartbeatInterval;
/**
* Isolation level for reading messages that have been written transactionally.
*/
private IsolationLevel isolationLevel = IsolationLevel.READ_UNCOMMITTED;
/**
* Deserializer | Consumer |
java | spring-projects__spring-boot | module/spring-boot-data-cassandra/src/test/java/org/springframework/boot/data/cassandra/autoconfigure/DataCassandraReactiveRepositoriesAutoConfigurationTests.java | {
"start": 2110,
"end": 4603
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(CassandraAutoConfiguration.class,
DataCassandraRepositoriesAutoConfiguration.class, DataCassandraAutoConfiguration.class,
DataCassandraReactiveAutoConfiguration.class, DataCassandraReactiveRepositoriesAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class));
@Test
void testDefaultRepositoryConfiguration() {
this.contextRunner.withUserConfiguration(DefaultConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(ReactiveCityRepository.class);
assertThat(context).hasSingleBean(CqlSessionBuilder.class);
assertThat(getManagedTypes(context).toList()).hasSize(1);
});
}
@Test
void testNoRepositoryConfiguration() {
this.contextRunner.withUserConfiguration(EmptyConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(CqlSessionBuilder.class);
assertThat(getManagedTypes(context).toList()).isEmpty();
});
}
@Test
void doesNotTriggerDefaultRepositoryDetectionIfCustomized() {
this.contextRunner.withUserConfiguration(CustomizedConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(ReactiveCityRepository.class);
assertThat(getManagedTypes(context).toList()).hasSize(1).containsOnly(City.class);
});
}
@Test
void enablingImperativeRepositoriesDisablesReactiveRepositories() {
this.contextRunner.withUserConfiguration(DefaultConfiguration.class)
.withPropertyValues("spring.data.cassandra.repositories.type=imperative")
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityRepository.class));
}
@Test
void enablingNoRepositoriesDisablesReactiveRepositories() {
this.contextRunner.withUserConfiguration(DefaultConfiguration.class)
.withPropertyValues("spring.data.cassandra.repositories.type=none")
.run((context) -> assertThat(context).doesNotHaveBean(ReactiveCityRepository.class));
}
private ManagedTypes getManagedTypes(ApplicationContext context) {
CassandraMappingContext mappingContext = context.getBean(CassandraMappingContext.class);
Object field = ReflectionTestUtils.getField(mappingContext, "managedTypes");
assertThat(field).isNotNull();
return (ManagedTypes) field;
}
@Configuration(proxyBeanMethods = false)
@TestAutoConfigurationPackage(EmptyDataPackage.class)
@Import(CassandraMockConfiguration.class)
static | DataCassandraReactiveRepositoriesAutoConfigurationTests |
java | apache__maven | impl/maven-di/src/test/java/org/apache/maven/di/impl/InjectorImplTest.java | {
"start": 10523,
"end": 11067
} | class ____ {
@Provides
static ArrayList<String> newStringList() {
return new ArrayList<>(Arrays.asList("foo", "bar"));
}
@Provides
static String newStringOfList(List<String> list) {
return list.toString();
}
}
@Test
void testInjectConstructor() {
Injector injector = Injector.create().bindImplicit(InjectConstructorContainer.class);
assertNotNull(injector.getInstance(InjectConstructorContainer.Bean.class));
}
static | ProvidesContainer |
java | quarkusio__quarkus | independent-projects/tools/devtools-testing/src/test/java/io/quarkus/devtools/project/create/QuarkusPlatformReferencingArchivedUpstreamVersionTest.java | {
"start": 391,
"end": 8500
} | class ____ extends MultiplePlatformBomsTestBase {
private static final String DOWNSTREAM_PLATFORM_KEY = "io.downstream.platform";
private static final String UPSTREAM_PLATFORM_KEY = "io.upstream.platform";
@BeforeAll
public static void setup() throws Exception {
TestRegistryClientBuilder.newInstance()
//.debug()
.baseDir(configDir())
// registry
.newRegistry("downstream.registry.test")
.recognizedQuarkusVersions("*-downstream")
// platform key
.newPlatform(DOWNSTREAM_PLATFORM_KEY)
// 2.0 STREAM
.newStream("2.0")
// 2.0.4 release
.newRelease("2.0.4-downstream")
.quarkusVersion("2.2.2-downstream")
.upstreamQuarkusVersion("2.2.2")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
// foo platform member
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "2.0.4-downstream").release().stream().platform()
.newStream("1.0")
// 1.0.4 release
.newRelease("1.0.4-downstream")
.quarkusVersion("1.1.1-downstream")
.upstreamQuarkusVersion("1.1.1")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
// foo platform member
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "1.0.4-downstream").release()
.newMember("acme-e-bom").addExtension("io.acme", "ext-e", "1.0.4-downstream").release()
.stream().platform().registry()
.newNonPlatformCatalog("1.1.1-downstream").addExtension("io.acme", "ext-d", "4.0-downstream").registry()
.clientBuilder()
.newRegistry("upstream.registry.test")
// platform key
.newPlatform(UPSTREAM_PLATFORM_KEY)
// 2.0 STREAM
.newStream("2.0")
// 2.0.5 release
.newRelease("2.0.5")
.quarkusVersion("2.2.5")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "2.0.5").release()
.newMember("acme-e-bom").addExtension("io.acme", "ext-e", "2.0.5").release()
.newMember("acme-bar-bom").addExtension("io.acme", "ext-b", "2.0.5").release().stream()
// 2.0.4 release
.newArchivedRelease("2.0.4")
.quarkusVersion("2.2.2")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "2.0.4").release()
.newMember("acme-e-bom").addExtension("io.acme", "ext-e", "2.0.4").release()
.newMember("acme-bar-bom").addExtension("io.acme", "ext-b", "2.0.4").release().stream().platform()
// 1.0 STREAM
.newStream("1.0")
.newRelease("1.0.5")
.quarkusVersion("1.1.5")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "1.0.5").addExtension("io.acme", "ext-e", "1.0.5")
.release()
.newMember("acme-bar-bom").addExtension("io.acme", "ext-b", "1.0.5").release()
.stream()
.newArchivedRelease("1.0.4")
.quarkusVersion("1.1.1")
// default bom including quarkus-core + essential metadata
.addCoreMember().release()
.newMember("acme-foo-bom").addExtension("io.acme", "ext-a", "1.0.4").addExtension("io.acme", "ext-e", "1.0.4")
.release()
.newMember("acme-bar-bom").addExtension("io.acme", "ext-b", "1.0.4").release()
.stream().platform().registry()
.newNonPlatformCatalog("2.2.2").addExtension("io.acme", "ext-c", "5.1").addExtension("io.acme", "ext-d", "6.0")
.registry()
.clientBuilder()
.build();
enableRegistryClient();
}
protected String getMainPlatformKey() {
return DOWNSTREAM_PLATFORM_KEY;
}
@Test
public void addExtensionsFromAlreadyImportedPlatform() throws Exception {
final Path projectDir = newProjectDir("downstream-upstream-platform");
createProject(projectDir, Arrays.asList("ext-a"));
assertModel(projectDir,
toPlatformBomCoords("acme-foo-bom"),
List.of(ArtifactCoords.jar("io.acme", "ext-a", null)),
"2.0.4-downstream");
addExtensions(projectDir, Arrays.asList("ext-b", "ext-c", "ext-d", "ext-e"));
assertModel(projectDir,
List.of(mainPlatformBom(), platformMemberBomCoords("acme-foo-bom"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-bar-bom", "2.0.4"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-e-bom", "2.0.4")),
List.of(ArtifactCoords.jar("io.acme", "ext-a", null),
ArtifactCoords.jar("io.acme", "ext-b", null),
ArtifactCoords.jar("io.acme", "ext-e", null),
ArtifactCoords.jar("io.acme", "ext-c", "5.1"),
ArtifactCoords.jar("io.acme", "ext-d", "6.0")),
"2.0.4-downstream");
}
@Test
public void createWithExtensionsFromDifferentPlatforms() throws Exception {
final Path projectDir = newProjectDir("create-downstream-upstream-platform");
createProject(projectDir, Arrays.asList("ext-a", "ext-b"));
assertModel(projectDir,
List.of(mainPlatformBom(), platformMemberBomCoords("acme-foo-bom"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-bar-bom", "2.0.4")),
List.of(ArtifactCoords.jar("io.acme", "ext-a", null),
ArtifactCoords.jar("io.acme", "ext-b", null)),
"2.0.4-downstream");
}
@Test
public void createPreferringOlderStreamToNewerStreamCoveringLessExtensions() throws Exception {
final Path projectDir = newProjectDir("create-downstream-upstream-platform");
createProject(projectDir, Arrays.asList("ext-a", "ext-b", "ext-e"));
assertModel(projectDir,
List.of(mainPlatformBom(), platformMemberBomCoords("acme-foo-bom"), platformMemberBomCoords("acme-e-bom"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-bar-bom", "1.0.4")),
List.of(ArtifactCoords.jar("io.acme", "ext-a", null),
ArtifactCoords.jar("io.acme", "ext-b", null),
ArtifactCoords.jar("io.acme", "ext-e", null)),
"1.0.4-downstream");
}
@Test
public void createUsingStream2_0() throws Exception {
final Path projectDir = newProjectDir("created-using-downstream-stream");
createProject(projectDir, new PlatformStreamCoords(DOWNSTREAM_PLATFORM_KEY, "2.0"),
Arrays.asList("ext-a", "ext-b", "ext-e"));
assertModel(projectDir,
List.of(mainPlatformBom(), platformMemberBomCoords("acme-foo-bom"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-e-bom", "2.0.4"),
ArtifactCoords.pom(UPSTREAM_PLATFORM_KEY, "acme-bar-bom", "2.0.4")),
List.of(ArtifactCoords.jar("io.acme", "ext-a", null),
ArtifactCoords.jar("io.acme", "ext-b", null),
ArtifactCoords.jar("io.acme", "ext-e", null)),
"2.0.4-downstream");
}
}
| QuarkusPlatformReferencingArchivedUpstreamVersionTest |
java | apache__camel | components/camel-paho-mqtt5/src/test/java/org/apache/camel/component/paho/mqtt5/PahoMqtt5SendDynamicAwareTest.java | {
"start": 1125,
"end": 2659
} | class ____ extends CamelTestSupport {
PahoMqtt5SendDynamicAware pahoMqtt5SendDynamicAware;
public void doPostSetup() {
this.pahoMqtt5SendDynamicAware = new PahoMqtt5SendDynamicAware();
}
@Test
public void testUriParsing() throws Exception {
this.pahoMqtt5SendDynamicAware.setScheme("paho-mqtt5");
Exchange exchange = createExchangeWithBody("The Body");
SendDynamicAware.DynamicAwareEntry entry
= new SendDynamicAware.DynamicAwareEntry("paho-mqtt5:destination", "paho-mqtt5:${header.test}", null, null);
Processor processor = this.pahoMqtt5SendDynamicAware.createPreProcessor(createExchangeWithBody("Body"), entry);
processor.process(exchange);
assertEquals("destination", exchange.getMessage().getHeader(PahoMqtt5Constants.CAMEL_PAHO_OVERRIDE_TOPIC));
}
@Test
public void testSlashedUriParsing() throws Exception {
this.pahoMqtt5SendDynamicAware.setScheme("paho-mqtt5");
Exchange exchange = createExchangeWithBody("The Body");
SendDynamicAware.DynamicAwareEntry entry
= new SendDynamicAware.DynamicAwareEntry("paho-mqtt5://destination", "paho-mqtt5://${header.test}", null, null);
Processor processor = this.pahoMqtt5SendDynamicAware.createPreProcessor(createExchangeWithBody("Body"), entry);
processor.process(exchange);
assertEquals("destination", exchange.getMessage().getHeader(PahoMqtt5Constants.CAMEL_PAHO_OVERRIDE_TOPIC));
}
}
| PahoMqtt5SendDynamicAwareTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/SimpleConvertsAnnotationTest.java | {
"start": 1348,
"end": 2532
} | class ____ {
@Test
public void testSimpleConvertsUsage(SessionFactoryScope scope) {
final EntityPersister ep = scope.getSessionFactory().getMappingMetamodel().getEntityDescriptor(Entity1.class.getName());
final Type websitePropertyType = ep.getPropertyType( "website" );
final ConvertedBasicTypeImpl type = assertTyping(
ConvertedBasicTypeImpl.class,
websitePropertyType
);
final JpaAttributeConverter converter = (JpaAttributeConverter) type.getValueConverter();
assertTrue( UrlConverter.class.isAssignableFrom( converter.getConverterJavaType().getJavaTypeClass() ) );
resetFlags();
scope.inTransaction( session -> {
try {
session.persist( new Entity1(1, "1", URI.create("http://hibernate.org" ).toURL()) );
}
catch (MalformedURLException e) {
// Ignore
}
} );
assertTrue( convertToDatabaseColumnCalled );
scope.dropData();
}
static boolean convertToDatabaseColumnCalled = false;
static boolean convertToEntityAttributeCalled = false;
private void resetFlags() {
convertToDatabaseColumnCalled = false;
convertToEntityAttributeCalled = false;
}
@Converter(autoApply = false)
public static | SimpleConvertsAnnotationTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/manytomany/PageAdvertisement.java | {
"start": 297,
"end": 493
} | class ____ extends SubjectAdvertisement {
@Id
@GeneratedValue
private Integer id;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
}
| PageAdvertisement |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StYErrorTests.java | {
"start": 800,
"end": 1393
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(StYTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new StY(source, args.get(0));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "geo_point or cartesian_point"));
}
}
| StYErrorTests |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/controller/PartitionReassignmentReplicas.java | {
"start": 1156,
"end": 5225
} | class ____ {
private final List<Integer> removing;
private final List<Integer> adding;
private final List<Integer> replicas;
public PartitionReassignmentReplicas(
List<Integer> removing,
List<Integer> adding,
List<Integer> replicas
) {
this.removing = removing;
this.adding = adding;
this.replicas = replicas;
}
private static Set<Integer> calculateDifference(List<Integer> a, List<Integer> b) {
Set<Integer> result = new TreeSet<>(a);
result.removeAll(b);
return result;
}
PartitionReassignmentReplicas(
PartitionAssignment currentAssignment,
PartitionAssignment targetAssignment
) {
Set<Integer> removing = calculateDifference(currentAssignment.replicas(), targetAssignment.replicas());
this.removing = new ArrayList<>(removing);
Set<Integer> adding = calculateDifference(targetAssignment.replicas(), currentAssignment.replicas());
this.adding = new ArrayList<>(adding);
this.replicas = new ArrayList<>(targetAssignment.replicas());
this.replicas.addAll(removing);
}
List<Integer> removing() {
return removing;
}
List<Integer> adding() {
return adding;
}
List<Integer> replicas() {
return replicas;
}
boolean isReassignmentInProgress() {
return isReassignmentInProgress(
removing,
adding);
}
static boolean isReassignmentInProgress(PartitionRegistration part) {
return isReassignmentInProgress(
Replicas.toList(part.removingReplicas),
Replicas.toList(part.addingReplicas));
}
private static boolean isReassignmentInProgress(
List<Integer> removingReplicas,
List<Integer> addingReplicas
) {
return !removingReplicas.isEmpty() || !addingReplicas.isEmpty();
}
Optional<CompletedReassignment> maybeCompleteReassignment(List<Integer> targetIsr) {
// Check if there is a reassignment to complete.
if (!isReassignmentInProgress()) {
return Optional.empty();
}
List<Integer> newTargetIsr = new ArrayList<>(targetIsr);
List<Integer> newTargetReplicas = replicas;
if (!removing.isEmpty()) {
newTargetIsr = new ArrayList<>(targetIsr.size());
for (int replica : targetIsr) {
if (!removing.contains(replica)) {
newTargetIsr.add(replica);
}
}
if (newTargetIsr.isEmpty()) return Optional.empty();
newTargetReplicas = new ArrayList<>(replicas.size());
for (int replica : replicas) {
if (!removing.contains(replica)) {
newTargetReplicas.add(replica);
}
}
if (newTargetReplicas.isEmpty()) return Optional.empty();
}
if (!newTargetIsr.containsAll(newTargetReplicas)) return Optional.empty();
return Optional.of(
new CompletedReassignment(
newTargetReplicas,
newTargetIsr
)
);
}
record CompletedReassignment(List<Integer> replicas, List<Integer> isr) {
}
List<Integer> originalReplicas() {
List<Integer> replicas = new ArrayList<>(this.replicas);
replicas.removeAll(adding);
return replicas;
}
@Override
public int hashCode() {
return Objects.hash(removing, adding, replicas);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof PartitionReassignmentReplicas other)) return false;
return removing.equals(other.removing) &&
adding.equals(other.adding) &&
replicas.equals(other.replicas);
}
@Override
public String toString() {
return "PartitionReassignmentReplicas(" +
"removing=" + removing + ", " +
"adding=" + adding + ", " +
"replicas=" + replicas + ")";
}
}
| PartitionReassignmentReplicas |
java | grpc__grpc-java | grpclb/src/main/java/io/grpc/grpclb/GrpclbState.java | {
"start": 40148,
"end": 40893
} | class ____ implements RoundRobinEntry {
final PickResult result;
ErrorEntry(Status status) {
result = PickResult.withError(status);
}
@Override
public PickResult picked(Metadata headers) {
return result;
}
@Override
public int hashCode() {
return Objects.hashCode(result);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof ErrorEntry)) {
return false;
}
return Objects.equal(result, ((ErrorEntry) other).result);
}
@Override
public String toString() {
// This is printed in logs. Only include useful information.
return result.getStatus().toString();
}
}
@VisibleForTesting
static final | ErrorEntry |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/NestedCompositeIdWithOrderedUpdatesTest.java | {
"start": 3535,
"end": 4252
} | class ____
{
@ManyToOne(cascade={}, // cascade nothing
fetch=FetchType.LAZY,
optional=false)
private C c;
/**
* "key" won't work because h2 database considers it a reserved word, and hibernate doesn't escape it.
* furthermore, the variable name must be after {@link #c}, alphabetically, to account for hibernate's internal sorting.
*/
private String zkey;
public BId()
{
}
public BId(C c, String key)
{
this.c=c;
this.zkey=key;
}
public C getC()
{
return c;
}
public void setC(C c)
{
this.c=c;
}
public String getZkey()
{
return zkey;
}
public void setZkey(String zkey)
{
this.zkey=zkey;
}
}
@Entity(name = "C")
public static | BId |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/SerDeserTest.java | {
"start": 935,
"end": 2952
} | class ____ extends TestCase {
protected void setUp() throws Exception {
com.alibaba.fastjson.parser.ParserConfig.global.addAccept("com.alibaba.json.bvtVO.");
}
/** 用于被FastJson序列和反序列化的对象 */
private static Map<OptionKey, OptionValue<?>> options;
static {
options = new HashMap<OptionKey, OptionValue<?>>();
TempAttachMetaOption attach = new TempAttachMetaOption();
attach.setId(1000);
attach.setName("test_name");
attach.setPath("http://alibaba-inc.com/test.txt");
ArrayList<TempAttachMetaOption> attachList = new ArrayList<TempAttachMetaOption>();
attachList.add(attach);
// 设置value
OptionValue<ArrayList<TempAttachMetaOption>> optionValue = new OptionValue<ArrayList<TempAttachMetaOption>>();
optionValue.setValue(attachList);
options.put(OptionKey.TEMPALTE_ATTACH_META, optionValue);
}
public void test_for_yaolei() {
// 序列化toJSONString()
String jsonString = JSON.toJSONString(options);
System.out.println(jsonString);
{
// 反序列化parse()
HashMap<OptionKey, OptionValue<?>> deserOptions = (HashMap<OptionKey, OptionValue<?>>) JSON.parseObject(jsonString,
new TypeReference<HashMap<OptionKey, OptionValue<?>>>() {
});
System.out.println(deserOptions.get(OptionKey.TEMPALTE_ATTACH_META));
}
// 序列化toJSONString(,)
jsonString = JSON.toJSONString(options, SerializerFeature.WriteClassName);
System.out.println(jsonString);
// 反序列化parse()
HashMap<OptionKey, OptionValue<?>> deserOptions = (HashMap<OptionKey, OptionValue<?>>) JSON.parse(jsonString);
System.out.println(deserOptions.get(OptionKey.TEMPALTE_ATTACH_META));
}
}
| SerDeserTest |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/otlp/OtlpMetricsExportAutoConfigurationTests.java | {
"start": 1977,
"end": 6948
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(OtlpMetricsExportAutoConfiguration.class));
@Test
void backsOffWithoutAClock() {
this.contextRunner.run((context) -> assertThat(context).doesNotHaveBean(OtlpMeterRegistry.class));
}
@Test
void autoConfiguresConfigAndMeterRegistry() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(OtlpMeterRegistry.class)
.hasSingleBean(OtlpConfig.class));
}
@Test
void autoConfigurationCanBeDisabledWithDefaultsEnabledProperty() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.withPropertyValues("management.defaults.metrics.export.enabled=false")
.run((context) -> assertThat(context).doesNotHaveBean(OtlpMeterRegistry.class)
.doesNotHaveBean(OtlpConfig.class));
}
@Test
void autoConfigurationCanBeDisabledWithSpecificEnabledProperty() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.withPropertyValues("management.otlp.metrics.export.enabled=false")
.run((context) -> assertThat(context).doesNotHaveBean(OtlpMeterRegistry.class)
.doesNotHaveBean(OtlpConfig.class));
}
@Test
void allowsCustomConfigToBeUsed() {
this.contextRunner.withUserConfiguration(CustomConfigConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(OtlpMeterRegistry.class)
.hasSingleBean(OtlpConfig.class)
.hasBean("customConfig"));
}
@Test
void allowsPlatformThreadsToBeUsed() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class).run((context) -> {
assertThat(context).hasSingleBean(OtlpMeterRegistry.class);
OtlpMeterRegistry registry = context.getBean(OtlpMeterRegistry.class);
assertThat(registry).extracting("scheduledExecutorService")
.satisfies((executor) -> ScheduledExecutorServiceAssert.assertThat((ScheduledExecutorService) executor)
.usesPlatformThreads());
});
}
@Test
@EnabledForJreRange(min = JRE.JAVA_21)
void allowsVirtualThreadsToBeUsed() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.withPropertyValues("spring.threads.virtual.enabled=true")
.run((context) -> {
assertThat(context).hasSingleBean(OtlpMeterRegistry.class);
OtlpMeterRegistry registry = context.getBean(OtlpMeterRegistry.class);
assertThat(registry).extracting("scheduledExecutorService")
.satisfies(
(executor) -> ScheduledExecutorServiceAssert.assertThat((ScheduledExecutorService) executor)
.usesVirtualThreads());
});
}
@Test
void allowsRegistryToBeCustomized() {
this.contextRunner.withUserConfiguration(CustomRegistryConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(OtlpMeterRegistry.class)
.hasSingleBean(OtlpConfig.class)
.hasBean("customRegistry"));
}
@Test
void definesPropertiesBasedConnectionDetailsByDefault() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(PropertiesOtlpMetricsConnectionDetails.class));
}
@Test
void testConnectionFactoryWithOverridesWhenUsingCustomConnectionDetails() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class, ConnectionDetailsConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(OtlpMetricsConnectionDetails.class)
.doesNotHaveBean(PropertiesOtlpMetricsConnectionDetails.class);
OtlpConfig config = context.getBean(OtlpConfig.class);
assertThat(config.url()).isEqualTo("http://localhost:12345/v1/metrics");
});
}
@Test
void allowsCustomMetricsSenderToBeUsed() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class, CustomMetricsSenderConfiguration.class)
.run(this::assertHasCustomMetricsSender);
}
@Test
@EnabledForJreRange(min = JRE.JAVA_21)
void allowsCustomMetricsSenderToBeUsedWithVirtualThreads() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class, CustomMetricsSenderConfiguration.class)
.withPropertyValues("spring.threads.virtual.enabled=true")
.run(this::assertHasCustomMetricsSender);
}
@Test
void shouldBackOffIfSpringBootOpenTelemetryIsMissing() {
this.contextRunner.withUserConfiguration(BaseConfiguration.class)
.withClassLoader(new FilteredClassLoader("org.springframework.boot.opentelemetry"))
.run((context) -> assertThat(context).doesNotHaveBean(OtlpMetricsExportAutoConfiguration.class));
}
private void assertHasCustomMetricsSender(AssertableApplicationContext context) {
assertThat(context).hasSingleBean(OtlpMeterRegistry.class);
OtlpMeterRegistry registry = context.getBean(OtlpMeterRegistry.class);
assertThat(registry).extracting("metricsSender")
.satisfies((sender) -> assertThat(sender).isSameAs(CustomMetricsSenderConfiguration.customMetricsSender));
}
@Configuration(proxyBeanMethods = false)
static | OtlpMetricsExportAutoConfigurationTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsDataOutputStream.java | {
"start": 1412,
"end": 3742
} | class ____ extends FSDataOutputStream {
public HdfsDataOutputStream(DFSOutputStream out, FileSystem.Statistics stats,
long startPosition) throws IOException {
super(out, stats, startPosition);
}
public HdfsDataOutputStream(DFSOutputStream out, FileSystem.Statistics stats)
throws IOException {
this(out, stats, 0L);
}
public HdfsDataOutputStream(CryptoOutputStream out,
FileSystem.Statistics stats, long startPosition) throws IOException {
super(out, stats, startPosition);
Preconditions.checkArgument(
out.getWrappedStream() instanceof DFSOutputStream,
"CryptoOutputStream should wrap a DFSOutputStream");
}
public HdfsDataOutputStream(CryptoOutputStream out,
FileSystem.Statistics stats) throws IOException {
this(out, stats, 0L);
}
/**
* Get the actual number of replicas of the current block.
*
* This can be different from the designated replication factor of the file
* because the namenode does not maintain replication for the blocks which are
* currently being written to. Depending on the configuration, the client may
* continue to write to a block even if a few datanodes in the write pipeline
* have failed, or the client may add a new datanodes once a datanode has
* failed.
*
* @return the number of valid replicas of the current block
*/
public synchronized int getCurrentBlockReplication() throws IOException {
OutputStream wrappedStream = getWrappedStream();
if (wrappedStream instanceof CryptoOutputStream) {
wrappedStream = ((CryptoOutputStream) wrappedStream).getWrappedStream();
}
return ((DFSOutputStream) wrappedStream).getCurrentBlockReplication();
}
/**
* Sync buffered data to DataNodes (flush to disk devices).
*
* @param syncFlags
* Indicate the detailed semantic and actions of the hsync.
* @throws IOException
* @see FSDataOutputStream#hsync()
*/
public void hsync(EnumSet<SyncFlag> syncFlags) throws IOException {
OutputStream wrappedStream = getWrappedStream();
if (wrappedStream instanceof CryptoOutputStream) {
wrappedStream.flush();
wrappedStream = ((CryptoOutputStream) wrappedStream).getWrappedStream();
}
((DFSOutputStream) wrappedStream).hsync(syncFlags);
}
public | HdfsDataOutputStream |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java | {
"start": 11584,
"end": 11969
} | class ____ extends BaseTasksRequest<UnblockTestTasksRequest> {
UnblockTestTasksRequest() {}
UnblockTestTasksRequest(StreamInput in) throws IOException {
super(in);
}
@Override
public boolean match(Task task) {
return task instanceof TestTask && super.match(task);
}
}
public static | UnblockTestTasksRequest |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/collection/MultipleCollectionRefEntity2.java | {
"start": 596,
"end": 2571
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "ID", length = 10)
private Long id;
@Version
@Column(name = "VERSION", nullable = false)
private Integer version;
@Column(name = "TEXT", length = 50, nullable = false)
private String text;
@ManyToOne
@JoinColumn(name = "MCE_ID", nullable = false, insertable = false, updatable = false,
foreignKey = @ForeignKey(name = "FK_RE2_MCE"))
@NotAudited
private MultipleCollectionEntity multipleCollectionEntity;
@Column(name = "MCE_ID", insertable = false, updatable = false)
@NotAudited
private Long multipleCollectionEntityId;
public Long getId() {
return id;
}
public Integer getVersion() {
return version;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public MultipleCollectionEntity getMultipleCollectionEntity() {
return multipleCollectionEntity;
}
public void setMultipleCollectionEntity(MultipleCollectionEntity multipleCollectionEntity) {
this.multipleCollectionEntity = multipleCollectionEntity;
}
public Long getMultipleCollectionEntityId() {
return multipleCollectionEntityId;
}
public void setMultipleCollectionEntityId(Long multipleCollectionEntityId) {
this.multipleCollectionEntityId = multipleCollectionEntityId;
}
@Override
public String toString() {
return "MultipleCollectionRefEntity2 [id=" + id + ", text=" + text
+ ", multipleCollectionEntityId=" + multipleCollectionEntityId
+ "]";
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof MultipleCollectionRefEntity2) ) {
return false;
}
MultipleCollectionRefEntity2 that = (MultipleCollectionRefEntity2) o;
if ( text != null ? !text.equals( that.text ) : that.text != null ) {
return false;
}
return true;
}
@Override
public int hashCode() {
return text != null ? text.hashCode() : 0;
}
}
| MultipleCollectionRefEntity2 |
java | apache__logging-log4j2 | log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/resolver/TemplateResolvers.java | {
"start": 1271,
"end": 1430
} | class ____ {
private TemplateResolvers() {}
private static final String RESOLVER_FIELD_NAME = "$resolver";
private abstract static | TemplateResolvers |
java | apache__camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/ManagedRouteControllerMBean.java | {
"start": 946,
"end": 1558
} | interface ____ extends ManagedServiceMBean {
@ManagedAttribute(description = "Indicates whether the route controller is doing initial starting of the routes.")
boolean isStartingRoutes();
@ManagedAttribute(description = "Indicates if the route controller has routes that are currently unhealthy")
boolean isHasUnhealthyRoutes();
@ManagedAttribute(description = "Controlled Routes")
Collection<String> getControlledRoutes();
@ManagedAttribute(description = "Logging level used for logging route startup activity")
String getRouteStartupLoggingLevel();
}
| ManagedRouteControllerMBean |
java | apache__flink | flink-core/src/test/java/org/apache/flink/util/OutputTagTest.java | {
"start": 1042,
"end": 1828
} | class ____ {
@Test
void testNullRejected() {
assertThatThrownBy(() -> new OutputTag<Integer>(null))
.isInstanceOf(NullPointerException.class);
}
@Test
void testNullRejectedWithTypeInfo() {
assertThatThrownBy(() -> new OutputTag<>(null, BasicTypeInfo.INT_TYPE_INFO))
.isInstanceOf(NullPointerException.class);
}
@Test
void testEmptyStringRejected() {
assertThatThrownBy(() -> new OutputTag<Integer>(""))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testEmptyStringRejectedWithTypeInfo() {
assertThatThrownBy(() -> new OutputTag<>("", BasicTypeInfo.INT_TYPE_INFO))
.isInstanceOf(IllegalArgumentException.class);
}
}
| OutputTagTest |
java | google__dagger | javatests/dagger/internal/codegen/ProductionComponentProcessorTest.java | {
"start": 4038,
"end": 4382
} | interface ____ abstract class");
});
}
@Test public void nonModuleModule() {
Source componentFile =
CompilerTests.javaSource("test.NotAComponent",
"package test;",
"",
"import dagger.producers.ProductionComponent;",
"",
"@ProductionComponent(modules = Object.class)",
" | or |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/UserFunctionTests.java | {
"start": 559,
"end": 8506
} | class ____ extends ScriptTestCase {
public void testZeroArgumentUserFunction() {
String source = "def twofive() { return 25; } twofive()";
assertEquals(25, exec(source));
}
public void testUserFunctionDefCallRef() {
String source = """
String getSource() { 'source'; }
int myCompare(int a, int b) { getMulti() * Integer.compare(a, b) }
int getMulti() { return -1 }
def l = [1, 100, -100];
if (myCompare(10, 50) > 0) { l.add(50 + getMulti()) }
l.sort(this::myCompare);
if (l[0] == 100) { l.remove(l.size() - 1) ; l.sort((a, b) -> -1 * myCompare(a, b)) }\s
if (getSource().startsWith('sour')) { l.add(255); }
return l;""";
assertEquals(List.of(1, 49, 100, 255), exec(source));
assertBytecodeExists(source, "public &getSource()Ljava/lang/String");
assertBytecodeExists(source, "public &getMulti()I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&getMulti ()I");
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testChainedUserMethods() {
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
List l = [1, 100, -100];
l.sort(this::myCompare);
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambda() {
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
List l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsDef() {
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
def l = [1, 100, -100];
l.sort(this::myCompare);
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaDef() {
String source = """
int myCompare(int a, int b) { getMulti() * (a - b) }
int getMulti() { -1 }
def l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaCaptureDef() {
String source = """
int myCompare(int a, int b, int x, int m) { getMulti(m) * (a - b + x) }
int getMulti(int m) { -1 * m }
def l = [1, 100, -100];
int cx = 100;
int cm = 1;
l.sort((a, b) -> myCompare(a, b, cx, cm));
l;
""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testMethodReferenceInUserFunction() {
String source = """
int myCompare(int a, int b, String s) { Map m = ['f': 5]; a - b + m.computeIfAbsent(s, this::getLength) }
int getLength(String s) { s.length() }
def l = [1, 0, -2];
String s = 'g';
l.sort((a, b) -> myCompare(a, b, s));
l;
""";
assertEquals(List.of(-2, 1, 0), exec(source, Map.of("a", 1), false));
}
public void testUserFunctionVirtual() {
String source = "int myCompare(int x, int y) { return -1 * (x - y) }\n" + "return myCompare(100, 90);";
assertEquals(-10, exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionRef() {
String source = """
int myCompare(int x, int y) { return -1 * x - y }
List l = [1, 100, -100];
l.sort(this::myCompare);
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
}
public void testUserFunctionRefEmpty() {
String source = """
int myCompare(int x, int y) { return -1 * x - y }
[].sort((a, b) -> myCompare(a, b));
""";
assertNull(exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionCallInLambda() {
String source = """
int myCompare(int x, int y) { -1 * ( x - y ) }
List l = [1, 100, -100];
l.sort((a, b) -> myCompare(a, b));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionLambdaCapture() {
String source = """
int myCompare(Object o, int x, int y) { return o != null ? -1 * ( x - y ) : ( x - y ) }
List l = [1, 100, -100];
Object q = '';
l.sort((a, b) -> myCompare(q, a, b));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(Ljava/lang/Object;II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (Ljava/lang/Object;II)I");
}
public void testLambdaCapture() {
String source = """
List l = [1, 100, -100];
int q = -1;
l.sort((a, b) -> q * ( a - b ));
return l;""";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public static synthetic lambda$synthetic$0(ILjava/lang/Object;Ljava/lang/Object;)I");
}
public void testCallUserMethodFromStatementWithinLambda() {
String source = ""
+ "int test1() { return 1; }"
+ "void test(Map params) { "
+ " int i = 0;"
+ " params.forEach("
+ " (k, v) -> { if (i == 0) { test1() } else { 20 } }"
+ " );"
+ "}"
+ "test(params)";
assertNull(exec(source, Map.of("a", 5), false));
assertBytecodeExists(source, "public synthetic lambda$synthetic$0(ILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
}
public void testCallUserMethodFromStatementWithinNestedLambda() {
String source = ""
+ "int test1() { return 1; }"
+ "void test(Map params) { "
+ " int i = 0;"
+ " int j = 5;"
+ " params.replaceAll( "
+ " (n, m) -> {"
+ " m.forEach("
+ " (k, v) -> { if (i == 0) { test1() } else { 20 } }"
+ " );"
+ " return ['aaa': j];"
+ " }"
+ " );"
+ "}"
+ "Map myParams = new HashMap(params);"
+ "test(myParams);"
+ "myParams['a']['aaa']";
assertEquals(5, exec(source, Map.of("a", Map.of("b", 1)), false));
assertBytecodeExists(source, "public synthetic lambda$synthetic$1(IILjava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
}
}
| UserFunctionTests |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/testcomponents/listener/TestListener.java | {
"start": 1152,
"end": 1601
} | class ____ implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent sce) {
sce.getServletContext()
.addFilter("listenerAddedFilter", new ListenerAddedFilter())
.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), true, "/*");
sce.getServletContext().setAttribute("listenerAttribute", "alpha");
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
}
static | TestListener |
java | apache__camel | catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/RoasterJavaRestDslTest.java | {
"start": 1372,
"end": 5399
} | class ____ extends CamelTestSupport {
@Test
void parseRestConfiguration() throws Exception {
JavaClassSource clazz = (JavaClassSource) Roaster
.parse(new File("src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java"));
List<RestConfigurationDetails> list = RestDslParser.parseRestConfiguration(clazz,
"src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java", true);
assertEquals(1, list.size());
RestConfigurationDetails details = list.get(0);
assertEquals("27", details.getLineNumber());
assertEquals("41", details.getLineNumberEnd());
assertEquals("src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java", details.getFileName());
assertEquals("configure", details.getMethodName());
assertEquals("org.apache.camel.parser.java.MyRestDslRouteBuilder", details.getClassName());
assertEquals("1234", details.getPort());
assertEquals("myapi", details.getContextPath());
assertEquals("jetty", details.getComponent());
assertEquals("json", details.getBindingMode());
assertEquals("swagger", details.getApiComponent());
assertEquals("myapi/swagger", details.getApiContextPath());
assertEquals("localhost", details.getApiHost());
assertEquals("true", details.getSkipBindingOnErrorCode());
assertEquals("https", details.getScheme());
assertEquals("allLocalIp", details.getHostNameResolver());
assertEquals(1, details.getComponentProperties().size());
assertEquals("123", details.getComponentProperties().get("foo"));
assertEquals(1, details.getEndpointProperties().size());
assertEquals("false", details.getEndpointProperties().get("pretty"));
assertEquals(1, details.getEndpointProperties().size());
assertEquals("456", details.getConsumerProperties().get("bar"));
assertEquals(2, details.getCorsHeaders().size());
assertEquals("value1", details.getCorsHeaders().get("key1"));
assertEquals("value2", details.getCorsHeaders().get("key2"));
}
@Test
void parseRestService() throws Exception {
JavaClassSource clazz = (JavaClassSource) Roaster
.parse(new File("src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java"));
List<RestServiceDetails> list = RestDslParser.parseRestService(clazz,
"src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java", true);
assertEquals(1, list.size());
RestServiceDetails details = list.get(0);
assertEquals("43", details.getLineNumber());
assertEquals("49", details.getLineNumberEnd());
assertEquals("src/test/java/org/apache/camel/parser/java/MyRestDslRouteBuilder.java", details.getFileName());
assertEquals("configure", details.getMethodName());
assertEquals("org.apache.camel.parser.java.MyRestDslRouteBuilder", details.getClassName());
assertEquals("/foo", details.getPath());
assertEquals("my foo service", details.getDescription());
assertEquals("json", details.getProduces());
assertEquals("json", details.getProduces());
assertEquals(2, details.getVerbs().size());
assertEquals("get", details.getVerbs().get(0).getMethod());
assertEquals("{id}", details.getVerbs().get(0).getPath());
assertEquals("get by id", details.getVerbs().get(0).getDescription());
assertEquals("log:id", details.getVerbs().get(0).getTo());
assertEquals("false", details.getVerbs().get(0).getApiDocs());
assertEquals("post", details.getVerbs().get(1).getMethod());
assertEquals("post something", details.getVerbs().get(1).getDescription());
assertEquals("xml", details.getVerbs().get(1).getBindingMode());
assertEquals("log:post", details.getVerbs().get(1).getTo());
assertNull(details.getVerbs().get(1).getPath());
}
}
| RoasterJavaRestDslTest |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/timeline/TimelineHashSet.java | {
"start": 1098,
"end": 1228
} | class ____ external synchronization. Null values are not supported.
*
* @param <T> The value type of the set.
*/
public | requires |
java | alibaba__nacos | maintainer-client/src/main/java/com/alibaba/nacos/maintainer/client/utils/RequestUtil.java | {
"start": 1017,
"end": 4929
} | class ____ {
/**
* Transfer {@link Service} to HTTP API request parameters.
*
* @param service {@link Service} object
* @return HTTP API request parameters
*/
public static Map<String, String> toParameters(Service service) {
Map<String, String> params = new HashMap<>(8);
params.put("namespaceId", service.getNamespaceId());
params.put("groupName", service.getGroupName());
params.put("serviceName", service.getName());
params.put("metadata", JacksonUtils.toJson(service.getMetadata()));
params.put("ephemeral", String.valueOf(service.isEphemeral()));
params.put("protectThreshold", String.valueOf(service.getProtectThreshold()));
params.put("selector", JacksonUtils.toJson(service.getSelector()));
return params;
}
/**
* Transfer {@link Service} and {@link Instance} to HTTP API request parameters.
*
* @param service {@link Service} object
* @param instance {@link Instance} object
* @return HTTP API request parameters
*/
public static Map<String, String> toParameters(Service service, Instance instance) {
Map<String, String> params = new HashMap<>(11);
params.put("namespaceId", service.getNamespaceId());
params.put("groupName", service.getGroupName());
params.put("serviceName", service.getName());
params.put("clusterName", instance.getClusterName());
params.put("ip", instance.getIp());
params.put("port", String.valueOf(instance.getPort()));
params.put("weight", String.valueOf(instance.getWeight()));
params.put("healthy", String.valueOf(instance.isHealthy()));
params.put("enabled", String.valueOf(instance.isEnabled()));
params.put("metadata", JacksonUtils.toJson(instance.getMetadata()));
params.put("ephemeral", String.valueOf(instance.isEphemeral()));
return params;
}
/**
* Transfer {@link Service}, list of {@link Instance} and new Metadata map to HTTP API request parameters.
*
* @param service {@link Service} object
* @param instances list of {@link Instance}
* @param newMetadata new Metadata map
* @return HTTP API request parameters
*/
public static Map<String, String> toParameters(Service service, List<Instance> instances,
Map<String, String> newMetadata) {
Map<String, String> params = new HashMap<>(6);
params.put("namespaceId", service.getNamespaceId());
params.put("groupName", service.getGroupName());
params.put("serviceName", service.getName());
params.put("instances", JacksonUtils.toJson(instances));
params.put("consistencyType", instances.get(0).isEphemeral() ? "ephemeral" : "persist");
params.put("metadata", JacksonUtils.toJson(newMetadata));
return params;
}
/**
* Transfer {@link Service}, list of {@link ClusterInfo} to HTTP API request parameters.
*
* @param service {@link Service} object
* @param cluster list of {@link ClusterInfo}
* @return HTTP API request parameters
*/
public static Map<String, String> toParameters(Service service, ClusterInfo cluster) {
Map<String, String> params = new HashMap<>(8);
params.put("namespaceId", service.getNamespaceId());
params.put("groupName", service.getGroupName());
params.put("serviceName", service.getName());
params.put("clusterName", cluster.getClusterName());
params.put("checkPort", String.valueOf(cluster.getHealthyCheckPort()));
params.put("useInstancePort4Check", String.valueOf(cluster.isUseInstancePortForCheck()));
params.put("healthChecker", JacksonUtils.toJson(cluster.getHealthChecker()));
params.put("metadata", JacksonUtils.toJson(cluster.getMetadata()));
return params;
}
}
| RequestUtil |
java | apache__flink | flink-connectors/flink-hadoop-compatibility/src/test/java/org/apache/flink/test/hadoopcompatibility/mapred/HadoopReduceFunctionITCase.java | {
"start": 8720,
"end": 9127
} | class ____
implements MapFunction<Tuple2<IntWritable, Text>, Tuple2<IntWritable, Text>> {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<IntWritable, Text> map(Tuple2<IntWritable, Text> v) throws Exception {
v.f0 = new IntWritable(v.f0.get() / 5);
return v;
}
}
/** Test mapper. */
public static | Mapper1 |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/annotation/AnnotationCacheOperationSourceTests.java | {
"start": 13199,
"end": 13599
} | class ____ {
@Cacheable("custom")
public void methodLevelCacheName() {
}
@Cacheable(keyGenerator = "custom")
public void methodLevelKeyGenerator() {
}
@Cacheable(cacheManager = "custom")
public void methodLevelCacheManager() {
}
@Cacheable(cacheResolver = "custom")
public void methodLevelCacheResolver() {
}
}
@CacheConfigFoo
private static | AnnotatedClassWithFullDefault |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/mixins/TestMixinSerForFields.java | {
"start": 1024,
"end": 2765
} | class ____ {
// Let's remove 'a'
@JsonIgnore
public String a;
// also: add a dummy field that is NOT to match anything
@JsonProperty public String xyz;
}
/*
/**********************************************************
/* Unit tests
/**********************************************************
*/
@Test
public void testFieldMixInsTopLevel() throws IOException
{
ObjectMapper mapper = new ObjectMapper();
Map<String,Object> result;
BaseClass bean = new BaseClass("1", "2");
// first: with no mix-ins:
result = writeAndMap(mapper, bean);
assertEquals(1, result.size());
assertEquals("1", result.get("a"));
// and then with simple mix-in
mapper = jsonMapperBuilder()
.addMixIn(BaseClass.class, MixIn.class)
.build();
result = writeAndMap(mapper, bean);
assertEquals(2, result.size());
assertEquals("1", result.get("a"));
assertEquals("2", result.get("banana"));
}
@Test
public void testMultipleFieldMixIns() throws IOException
{
// ordering here shouldn't matter really...
HashMap<Class<?>,Class<?>> mixins = new HashMap<Class<?>,Class<?>>();
mixins.put(SubClass.class, MixIn.class);
mixins.put(BaseClass.class, MixIn2.class);
ObjectMapper mapper = jsonMapperBuilder()
.addMixIns(mixins)
.build();
Map<String,Object> result = writeAndMap(mapper, new SubClass("1", "2"));
assertEquals(1, result.size());
// 'a' should be suppressed; 'b' mapped to 'banana'
assertEquals("2", result.get("banana"));
}
}
| MixIn2 |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetomany/IndexColumnListTest.java | {
"start": 4354,
"end": 5631
} | class ____ {
@Id
private Integer id;
@OneToMany(mappedBy = "parent")
@OrderColumn(name = "`index`")
private List<Child> children = new ArrayList<Child>();
Parent() {
}
Parent(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
}
public void addChild(Child child) {
if ( child.getParent() != null ) {
removeChild( child );
}
child.setParent( this );
getChildren().add( child );
}
public void removeChild(Child child) {
if ( child != null ) {
final Parent p = child.getParent();
if ( p != null ) {
p.getChildren().remove( child );
child.setParent( null );
}
}
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
Parent parent = (Parent) o;
return id != null ? id.equals( parent.id ) : parent.id == null;
}
@Override
public int hashCode() {
return id != null ? id.hashCode() : 0;
}
}
@Audited
@Entity(name = "Child")
public static | Parent |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/LogAndSkipOnInvalidTimestampTest.java | {
"start": 1187,
"end": 2079
} | class ____ extends TimestampExtractorTest {
@Test
public void extractMetadataTimestamp() {
testExtractMetadataTimestamp(new LogAndSkipOnInvalidTimestamp());
}
@Test
public void logAndSkipOnInvalidTimestamp() {
final long invalidMetadataTimestamp = -42;
final TimestampExtractor extractor = new LogAndSkipOnInvalidTimestamp();
final long timestamp = extractor.extract(
new ConsumerRecord<>(
"anyTopic",
0,
0,
invalidMetadataTimestamp,
TimestampType.NO_TIMESTAMP_TYPE,
0,
0,
null,
null,
new RecordHeaders(),
Optional.empty()),
0
);
assertThat(timestamp, is(invalidMetadataTimestamp));
}
}
| LogAndSkipOnInvalidTimestampTest |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedValuesReader.java | {
"start": 2785,
"end": 2846
} | interface ____ {
/**
* A functional | IntegerOutputWriter |
java | elastic__elasticsearch | x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java | {
"start": 2264,
"end": 82783
} | class ____ extends AbstractRemoteClusterSecurityTestCase {
private static final AtomicReference<Map<String, Object>> API_KEY_MAP_REF = new AtomicReference<>();
private static final AtomicReference<Map<String, Object>> REST_API_KEY_MAP_REF = new AtomicReference<>();
private static final AtomicBoolean SSL_ENABLED_REF = new AtomicBoolean();
private static final AtomicBoolean NODE1_RCS_SERVER_ENABLED = new AtomicBoolean();
private static final AtomicBoolean NODE2_RCS_SERVER_ENABLED = new AtomicBoolean();
private static final AtomicInteger INVALID_SECRET_LENGTH = new AtomicInteger();
static {
fulfillingCluster = ElasticsearchCluster.local()
.name("fulfilling-cluster")
.nodes(3)
.module("x-pack-autoscaling")
.module("x-pack-esql")
.module("x-pack-enrich")
.module("x-pack-ml")
.module("ingest-common")
.apply(commonClusterConfig)
.setting("remote_cluster.port", "0")
.setting("xpack.ml.enabled", "false")
.setting("xpack.security.remote_cluster_server.ssl.enabled", () -> String.valueOf(SSL_ENABLED_REF.get()))
.setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key")
.setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt")
.setting("xpack.security.authc.token.enabled", "true")
.keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password")
.node(0, spec -> spec.setting("remote_cluster_server.enabled", "true"))
.node(1, spec -> spec.setting("remote_cluster_server.enabled", () -> String.valueOf(NODE1_RCS_SERVER_ENABLED.get())))
.node(2, spec -> spec.setting("remote_cluster_server.enabled", () -> String.valueOf(NODE2_RCS_SERVER_ENABLED.get())))
.build();
queryCluster = ElasticsearchCluster.local()
.name("query-cluster")
.module("x-pack-autoscaling")
.module("x-pack-esql")
.module("x-pack-enrich")
.module("x-pack-ml")
.module("ingest-common")
.apply(commonClusterConfig)
.setting("xpack.ml.enabled", "false")
.setting("xpack.security.remote_cluster_client.ssl.enabled", () -> String.valueOf(SSL_ENABLED_REF.get()))
.setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt")
.setting("xpack.security.authc.token.enabled", "true")
.keystore("cluster.remote.my_remote_cluster.credentials", () -> {
if (API_KEY_MAP_REF.get() == null) {
final Map<String, Object> apiKeyMap = createCrossClusterAccessApiKey("""
{
"search": [
{
"names": ["index*", "alias*", "not_found_index", "employees", "employees2"]
},
{
"names": ["employees3"],
"query": {"term" : {"department" : "engineering"}}
}
]
}""");
API_KEY_MAP_REF.set(apiKeyMap);
}
return (String) API_KEY_MAP_REF.get().get("encoded");
})
// Define a bogus API key for another remote cluster
.keystore("cluster.remote.invalid_remote.credentials", randomEncodedApiKey())
// Define remote with a REST API key to observe expected failure
.keystore("cluster.remote.wrong_api_key_type.credentials", () -> {
if (REST_API_KEY_MAP_REF.get() == null) {
initFulfillingClusterClient();
final var createApiKeyRequest = new Request("POST", "/_security/api_key");
createApiKeyRequest.setJsonEntity("""
{
"name": "rest_api_key"
}""");
try {
final Response createApiKeyResponse = performRequestWithAdminUser(fulfillingClusterClient, createApiKeyRequest);
assertOK(createApiKeyResponse);
REST_API_KEY_MAP_REF.set(responseAsMap(createApiKeyResponse));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
return (String) REST_API_KEY_MAP_REF.get().get("encoded");
})
// Define a remote with invalid API key secret length
.keystore(
"cluster.remote.invalid_secret_length.credentials",
() -> Base64.getEncoder()
.encodeToString(
(UUIDs.base64UUID() + ":" + randomAlphaOfLength(INVALID_SECRET_LENGTH.get())).getBytes(StandardCharsets.UTF_8)
)
)
.rolesFile(Resource.fromClasspath("roles.yml"))
.user(REMOTE_METRIC_USER, PASS.toString(), "read_remote_shared_metrics", false)
.build();
}
@ClassRule
// Use a RuleChain to ensure that fulfilling cluster is started before query cluster
// `SSL_ENABLED_REF` is used to control the SSL-enabled setting on the test clusters
// We set it here, since randomization methods are not available in the static initialize context above
public static TestRule clusterRule = RuleChain.outerRule(new RunnableTestRuleAdapter(() -> {
SSL_ENABLED_REF.set(usually());
NODE1_RCS_SERVER_ENABLED.set(randomBoolean());
NODE2_RCS_SERVER_ENABLED.set(randomBoolean());
INVALID_SECRET_LENGTH.set(randomValueOtherThan(22, () -> randomIntBetween(0, 99)));
})).around(fulfillingCluster).around(queryCluster);
public void populateData() throws Exception {
CheckedConsumer<RestClient, IOException> setupEnrich = client -> {
Request createIndex = new Request("PUT", "countries");
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"emp_id": { "type": "keyword" },
"country": { "type": "text" }
}
}
}
""");
assertOK(performRequestWithAdminUser(client, createIndex));
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "countries" } }
{ "emp_id": "1", "country": "usa"}
{ "index": { "_index": "countries" } }
{ "emp_id": "2", "country": "canada"}
{ "index": { "_index": "countries" } }
{ "emp_id": "3", "country": "germany"}
{ "index": { "_index": "countries" } }
{ "emp_id": "4", "country": "spain"}
{ "index": { "_index": "countries" } }
{ "emp_id": "5", "country": "japan"}
{ "index": { "_index": "countries" } }
{ "emp_id": "6", "country": "france"}
{ "index": { "_index": "countries" } }
{ "emp_id": "7", "country": "usa"}
{ "index": { "_index": "countries" } }
{ "emp_id": "8", "country": "canada"}
{ "index": { "_index": "countries" } }
{ "emp_id": "9", "country": "usa"}
"""));
assertOK(performRequestWithAdminUser(client, bulkRequest));
Request createEnrich = new Request("PUT", "/_enrich/policy/countries");
createEnrich.setJsonEntity("""
{
"match": {
"indices": "countries",
"match_field": "emp_id",
"enrich_fields": ["country"]
}
}
""");
assertOK(performRequestWithAdminUser(client, createEnrich));
assertOK(performRequestWithAdminUser(client, new Request("PUT", "_enrich/policy/countries/_execute")));
performRequestWithAdminUser(client, new Request("DELETE", "/countries"));
};
// Fulfilling cluster
setupEnrich.accept(fulfillingClusterClient);
String employeesMapping = """
{
"mappings": {
"properties": {
"emp_id": { "type": "keyword" },
"department": { "type": "keyword" }
}
}
}
""";
Request createIndex = new Request("PUT", "employees");
createIndex.setJsonEntity(employeesMapping);
assertOK(performRequestAgainstFulfillingCluster(createIndex));
Request createIndex2 = new Request("PUT", "employees2");
createIndex2.setJsonEntity(employeesMapping);
assertOK(performRequestAgainstFulfillingCluster(createIndex2));
Request createIndex3 = new Request("PUT", "employees3");
createIndex3.setJsonEntity(employeesMapping);
assertOK(performRequestAgainstFulfillingCluster(createIndex3));
Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "employees" } }
{ "emp_id": "1", "department" : "engineering" }
{ "index": { "_index": "employees" } }
{ "emp_id": "3", "department" : "sales" }
{ "index": { "_index": "employees" } }
{ "emp_id": "5", "department" : "marketing" }
{ "index": { "_index": "employees" } }
{ "emp_id": "7", "department" : "engineering" }
{ "index": { "_index": "employees" } }
{ "emp_id": "9", "department" : "sales" }
{ "index": { "_index": "employees2" } }
{ "emp_id": "11", "department" : "engineering" }
{ "index": { "_index": "employees2" } }
{ "emp_id": "13", "department" : "sales" }
{ "index": { "_index": "employees3" } }
{ "emp_id": "21", "department" : "engineering" }
{ "index": { "_index": "employees3" } }
{ "emp_id": "23", "department" : "sales" }
{ "index": { "_index": "employees3" } }
{ "emp_id": "25", "department" : "engineering" }
{ "index": { "_index": "employees3" } }
{ "emp_id": "27", "department" : "sales" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
// Querying cluster
// Index some documents, to use them in a mixed-cluster search
setupEnrich.accept(client());
createIndex = new Request("PUT", "employees");
createIndex.setJsonEntity(employeesMapping);
assertOK(adminClient().performRequest(createIndex));
createIndex2 = new Request("PUT", "employees2");
createIndex2.setJsonEntity(employeesMapping);
assertOK(adminClient().performRequest(createIndex2));
createIndex3 = new Request("PUT", "employees3");
createIndex3.setJsonEntity(employeesMapping);
assertOK(adminClient().performRequest(createIndex3));
bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "employees" } }
{ "emp_id": "2", "department" : "management" }
{ "index": { "_index": "employees"} }
{ "emp_id": "4", "department" : "engineering" }
{ "index": { "_index": "employees" } }
{ "emp_id": "6", "department" : "marketing"}
{ "index": { "_index": "employees"} }
{ "emp_id": "8", "department" : "support"}
{ "index": { "_index": "employees2"} }
{ "emp_id": "10", "department" : "management"}
{ "index": { "_index": "employees2"} }
{ "emp_id": "12", "department" : "engineering"}
{ "index": { "_index": "employees3"} }
{ "emp_id": "20", "department" : "management"}
{ "index": { "_index": "employees3"} }
{ "emp_id": "22", "department" : "engineering"}
"""));
assertOK(client().performRequest(bulkRequest));
// Create user role with privileges for remote and local indices
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [
{
"names": ["employees"],
"privileges": ["read"]
}
],
"cluster": [ "monitor_enrich", "manage_own_api_key" ],
"remote_indices": [
{
"names": ["employees"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
],
"remote_cluster": [
{
"privileges": ["monitor_enrich"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
final var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER);
putUserRequest.setJsonEntity("""
{
"password": "x-pack-test-password",
"roles" : ["remote_search"]
}""");
assertOK(adminClient().performRequest(putUserRequest));
}
private static String populateOtherUser() throws IOException {
String otherUser = REMOTE_SEARCH_USER + "_other";
final var putUserRequest = new Request("PUT", "/_security/user/" + otherUser);
putUserRequest.setJsonEntity("""
{
"password": "x-pack-test-password",
"roles" : ["remote_search"]
}""");
assertOK(adminClient().performRequest(putUserRequest));
return otherUser;
}
private void performRequestWithAdminUserIgnoreNotFound(RestClient targetFulfillingClusterClient, Request request) throws IOException {
try {
performRequestWithAdminUser(targetFulfillingClusterClient, request);
} catch (ResponseException e) {
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
throw e;
}
logger.info("Ignored \"not found\" exception", e);
}
}
@After
public void wipeData() throws Exception {
CheckedConsumer<RestClient, IOException> wipe = client -> {
performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees"));
performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees2"));
performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/employees3"));
performRequestWithAdminUserIgnoreNotFound(client, new Request("DELETE", "/_enrich/policy/countries"));
};
wipe.accept(fulfillingClusterClient);
wipe.accept(client());
}
@SuppressWarnings("unchecked")
public void testCrossClusterQuery() throws Exception {
configureRemoteCluster();
populateData();
Map<String, Object> esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client());
Object ccsLastUsedTimestampAtStartOfTest = null;
if (esqlCcsLicenseFeatureUsage.isEmpty() == false) {
// some test runs will have a usage value already, so capture that to compare at end of test
ccsLastUsedTimestampAtStartOfTest = esqlCcsLicenseFeatureUsage.get("last_used");
}
// query remote cluster only
Request request = esqlRequest("""
FROM my_remote_cluster:employees
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department""");
Response response = performRequestWithRemoteSearchUser(request);
assertRemoteOnlyResults(response);
// same as above but authenticate with API key
response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey());
assertRemoteOnlyResults(response);
// query remote and local cluster
response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees,employees
| SORT emp_id ASC
| LIMIT 10"""));
assertRemoteAndLocalResults(response);
// update role to include both employees and employees2 for the remote cluster
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["employees*"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
response = adminClient().performRequest(putRoleRequest);
assertOK(response);
// query remote cluster only - but also include employees2 which the user now access
response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees,my_remote_cluster:employees2
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department"""));
assertRemoteOnlyAgainst2IndexResults(response);
// check that the esql-ccs license feature is now present and that the last_used field has been updated
esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client());
assertThat(esqlCcsLicenseFeatureUsage.size(), equalTo(5));
Object lastUsed = esqlCcsLicenseFeatureUsage.get("last_used");
assertNotNull("lastUsed should not be null", lastUsed);
if (ccsLastUsedTimestampAtStartOfTest != null) {
assertThat(lastUsed.toString(), not(equalTo(ccsLastUsedTimestampAtStartOfTest.toString())));
}
}
@SuppressWarnings("unchecked")
public void testCrossClusterQueryWithRemoteDLSAndFLS() throws Exception {
configureRemoteCluster();
populateData();
// ensure user has access to the employees3 index
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["employees*"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
Response response = adminClient().performRequest(putRoleRequest);
assertOK(response);
response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees3
| SORT emp_id ASC
| LIMIT 10
| KEEP emp_id, department"""));
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(2, values.size());
List<String> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
// the APIKey has DLS set to : "query": {"term" : {"department" : "engineering"}}
assertThat(flatList, containsInAnyOrder("21", "25", "engineering", "engineering"));
// add DLS to the remote indices in the role to restrict access to only emp_id = 21
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["employees*"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"],
"query": {"term" : {"emp_id" : "21"}}
}
]
}""");
response = adminClient().performRequest(putRoleRequest);
assertOK(response);
response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees3
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department"""));
assertOK(response);
responseAsMap = entityAsMap(response);
columns = (List<?>) responseAsMap.get("columns");
values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(1, values.size());
flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
// the APIKey has DLS set to : "query": {"term" : {"department" : "engineering"}}
// AND this role has DLS set to: "query": {"term" : {"emp_id" : "21"}}
assertThat(flatList, containsInAnyOrder("21", "engineering"));
// add FLS to the remote indices in the role to restrict access to only access department
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["employees*"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"],
"query": {"term" : {"emp_id" : "21"}},
"field_security": {"grant": [ "department" ]}
}
]
}""");
response = adminClient().performRequest(putRoleRequest);
assertOK(response);
response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees3
| LIMIT 2
"""));
assertOK(response);
responseAsMap = entityAsMap(response);
columns = (List<?>) responseAsMap.get("columns");
values = (List<?>) responseAsMap.get("values");
assertEquals(1, columns.size());
assertEquals(1, values.size());
flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
// the APIKey has DLS set to : "query": {"term" : {"department" : "engineering"}}
// AND this role has DLS set to: "query": {"term" : {"emp_id" : "21"}}
// AND this role has FLS set to: "field_security": {"grant": [ "department" ]}
assertThat(flatList, containsInAnyOrder("engineering"));
}
/**
* Note: invalid_remote is "invalid" because it has a bogus API key
*/
@SuppressWarnings("unchecked")
public void testCrossClusterQueryAgainstInvalidRemote() throws Exception {
configureRemoteCluster();
populateData();
final boolean skipUnavailable = randomBoolean();
// avoids getting 404 errors
updateClusterSettings(
randomBoolean()
? Settings.builder()
.put("cluster.remote.invalid_remote.seeds", fulfillingCluster.getRemoteClusterServerEndpoint(0))
.put("cluster.remote.invalid_remote.skip_unavailable", Boolean.toString(skipUnavailable))
.build()
: Settings.builder()
.put("cluster.remote.invalid_remote.mode", "proxy")
.put("cluster.remote.invalid_remote.skip_unavailable", Boolean.toString(skipUnavailable))
.put("cluster.remote.invalid_remote.proxy_address", fulfillingCluster.getRemoteClusterServerEndpoint(0))
.build()
);
// invalid remote with local index should return local results
{
var q = "FROM invalid_remote:employees,employees | SORT emp_id DESC | LIMIT 10";
if (skipUnavailable) {
Response response = performRequestWithRemoteSearchUser(esqlRequest(q));
// this does not yet happen because field-caps returns nothing for this cluster, rather
// than an error, so the current code cannot detect that error. Follow on PR will handle this.
assertLocalOnlyResultsAndSkippedRemote(response);
} else {
// errors from invalid remote should throw an exception if the cluster is marked with skip_unavailable=false
ResponseException error = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(q)));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
// TODO: in follow on PR, figure out why this is returning the wrong error - should be "cannot connect to invalid_remote"
assertThat(error.getMessage(), containsString("Unknown index [invalid_remote:employees]"));
}
}
{
var q = "FROM invalid_remote:employees | SORT emp_id DESC | LIMIT 10";
// errors from invalid remote should be ignored if the cluster is marked with skip_unavailable=true
if (skipUnavailable) {
// expected response:
// {"took":1,"columns":[],"values":[],"_clusters":{"total":1,"successful":0,"running":0,"skipped":1,"partial":0,
// "failed":0,"details":{"invalid_remote":{"status":"skipped","indices":"employees","took":1,"_shards":
// {"total":0,"successful":0,"skipped":0,"failed":0},"failures":[{"shard":-1,"index":null,"reason":
// {"type":"remote_transport_exception",
// "reason":"[connect_transport_exception - unable to connect to remote cluster]"}}]}}}}
Response response = performRequestWithRemoteSearchUser(esqlRequest(q));
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertThat(columns.size(), equalTo(1));
Map<String, ?> column1 = (Map<String, ?>) columns.get(0);
assertThat(column1.get("name").toString(), equalTo("<no-fields>"));
assertThat(values.size(), equalTo(0));
Map<String, ?> clusters = (Map<String, ?>) responseAsMap.get("_clusters");
Map<String, ?> details = (Map<String, ?>) clusters.get("details");
Map<String, ?> invalidRemoteEntry = (Map<String, ?>) details.get("invalid_remote");
assertThat(invalidRemoteEntry.get("status").toString(), equalTo("skipped"));
List<?> failures = (List<?>) invalidRemoteEntry.get("failures");
assertThat(failures.size(), equalTo(1));
Map<String, ?> failuresMap = (Map<String, ?>) failures.get(0);
Map<String, ?> reason = (Map<String, ?>) failuresMap.get("reason");
assertThat(reason.get("type").toString(), equalTo("remote_transport_exception"));
assertThat(reason.get("reason").toString(), containsString("unable to connect to remote cluster"));
} else {
// errors from invalid remote should throw an exception if the cluster is marked with skip_unavailable=false
ResponseException error = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(q)));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(401));
// TODO: in follow on PR, figure out why this is returning the wrong error - should be "cannot connect to invalid_remote"
assertThat(error.getMessage(), containsString("unable to find apikey"));
}
}
}
@SuppressWarnings("unchecked")
public void testCrossClusterQueryWithOnlyRemotePrivs() throws Exception {
configureRemoteCluster();
populateData();
// Query cluster
var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["employees"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
// query appropriate privs
Response response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department"""));
assertRemoteOnlyResults(response);
// without the remote index priv
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"remote_indices": [
{
"names": ["idontexist"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
ResponseException error = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department""")));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
assertThat(error.getMessage(), containsString("Unknown index [my_remote_cluster:employees]"));
// no local privs at all will fail
final var putRoleNoLocalPrivs = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleNoLocalPrivs.setJsonEntity("""
{
"indices": [],
"remote_indices": [
{
"names": ["employees"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleNoLocalPrivs));
error = expectThrows(ResponseException.class, () -> { performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department""")); });
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
error.getMessage(),
containsString(
"action [indices:data/read/esql] is unauthorized for user [remote_search_user] with effective roles [remote_search], "
+ "this action is granted by the index privileges [read,read_cross_cluster,all]"
)
);
// query remote cluster only - but also include employees2 which the user does not have access to
error = expectThrows(ResponseException.class, () -> { performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees,my_remote_cluster:employees2
| SORT emp_id ASC
| LIMIT 2
| KEEP emp_id, department""")); });
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
error.getMessage(),
containsString(
"action [indices:data/read/esql] is unauthorized for user [remote_search_user] with effective roles "
+ "[remote_search], this action is granted by the index privileges [read,read_cross_cluster,all]"
)
);
// query remote and local cluster - but also include employees2 which the user does not have access to
error = expectThrows(ResponseException.class, () -> { performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees,my_remote_cluster:employees2,employees,employees2
| SORT emp_id ASC
| LIMIT 10""")); });
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
error.getMessage(),
containsString(
"action [indices:data/read/esql] is unauthorized for user [remote_search_user] with effective roles "
+ "[remote_search], this action is granted by the index privileges [read,read_cross_cluster,all]"
)
);
}
@SuppressWarnings("unchecked")
public void testCrossClusterEnrich() throws Exception {
boolean isProxyMode = randomBoolean();
boolean skipUnavailable = randomBoolean();
configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, isProxyMode, skipUnavailable);
populateData();
// Query cluster
{
// ESQL with enrich is okay when user has access to enrich polices
Request request = esqlRequest("""
FROM my_remote_cluster:employees,employees
| ENRICH countries
| STATS size=count(*) by country
| SORT size DESC
| LIMIT 2""");
Response response = performRequestWithRemoteSearchUser(request);
assertWithEnrich(response);
// same as above but authenticate with API key
response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey());
assertWithEnrich(response);
// Query cluster
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
// no remote_cluster privs should fail the request
putRoleRequest.setJsonEntity("""
{
"indices": [
{
"names": ["employees"],
"privileges": ["read"]
}
],
"cluster": [ "monitor_enrich" ],
"remote_indices": [
{
"names": ["employees"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
ResponseException error = expectThrows(ResponseException.class, () -> { performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees,employees
| ENRICH countries
| STATS size=count(*) by country
| SORT size DESC
| LIMIT 2""")); });
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
error.getMessage(),
containsString(
"action [cluster:monitor/xpack/enrich/esql/resolve_policy] towards remote cluster is unauthorized for user "
+ "[remote_search_user] with assigned roles [remote_search] authenticated by API key id ["
)
);
assertThat(
error.getMessage(),
containsString(
"this action is granted by the cluster privileges "
+ "[cross_cluster_search,monitor_enrich,manage_enrich,monitor,manage,all]"
)
);
}
}
@SuppressWarnings("unchecked")
public void testCrossClusterEnrichWithOnlyRemotePrivs() throws Exception {
configureRemoteCluster();
populateData();
// Query cluster
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
// local cross_cluster_search cluster priv is required for enrich
// ideally, remote only enrichment wouldn't need this local privilege, however remote only enrichment is not currently supported
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read_cross_cluster"]}],
"cluster": ["cross_cluster_search"],
"remote_indices": [
{
"names": ["employees"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
],
"remote_cluster": [
{
"privileges": ["monitor_enrich"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
// Query cluster
// ESQL with enrich is okay when user has access to enrich polices
Response response = performRequestWithRemoteSearchUser(esqlRequest("""
FROM my_remote_cluster:employees
| ENRICH countries
| STATS size=count(*) by country
| SORT size DESC
| LIMIT 3"""));
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(3, values.size());
List<?> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<?>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
assertThat(flatList, containsInAnyOrder(1, 1, 3, "usa", "germany", "japan"));
}
private void createAliases() throws Exception {
Request createAlias = new Request("POST", "_aliases");
createAlias.setJsonEntity("""
{
"actions": [
{
"add": {
"index": "employees",
"alias": "alias-employees"
}
},
{
"add": {
"index": "employees",
"alias": "alias-engineering",
"filter": { "match": { "department": "engineering" }}
}
},
{
"add": {
"index": "employees",
"alias": "alias-management",
"filter": { "match": { "department": "management" }}
}
},
{
"add": {
"index": "employees2",
"alias": "alias-employees2"
}
}
]
}
""");
assertOK(performRequestAgainstFulfillingCluster(createAlias));
}
private void removeAliases() throws Exception {
var removeAlias = new Request("POST", "/_aliases/");
removeAlias.setJsonEntity("""
{
"actions": [
{
"remove": {
"index": "employees",
"alias": "alias-employees"
}
},
{
"remove": {
"index": "employees",
"alias": "alias-engineering"
}
},
{
"remove": {
"index": "employees",
"alias": "alias-management"
}
},
{
"remove": {
"index": "employees2",
"alias": "alias-employees2"
}
}
]
}
""");
assertOK(performRequestAgainstFulfillingCluster(removeAlias));
}
public void testAlias() throws Exception {
configureRemoteCluster();
populateData();
createAliases();
var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": [""], "privileges": ["read"]}],
"cluster": ["cross_cluster_search"],
"remote_indices": [
{
"names": ["alias-engineering"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
},
{
"names": ["employees2"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
},
{
"names": ["employees3"],
"privileges": ["view_index_metadata", "read_cross_cluster"],
"clusters": ["my_remote_cluster"]
}
]
}""");
assertOK(adminClient().performRequest(putRoleRequest));
// query `employees2`
for (String index : List.of("*:employees2", "*:employee*")) {
Request request = esqlRequest("FROM " + index + " | KEEP emp_id | SORT emp_id | LIMIT 100");
Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> ids = (List<?>) responseAsMap.get("values");
assertThat(ids, equalTo(List.of(List.of("11"), List.of("13"))));
}
// query `employees2` and `alias-engineering`
for (var index : List.of("*:employees2,*:alias-engineering", "*:emp*,*:alias-engineering", "*:emp*,my*:alias*")) {
Request request = esqlRequest("FROM " + index + " | KEEP emp_id | SORT emp_id | LIMIT 100");
Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> ids = (List<?>) responseAsMap.get("values");
assertThat(ids, equalTo(List.of(List.of("1"), List.of("11"), List.of("13"), List.of("7"))));
}
// none
for (var index : List.of("*:employees1", "*:employees3", "*:employees1,employees3", "*:alias-employees,*:alias-management")) {
Request request = esqlRequest("FROM " + index + " | KEEP emp_id | SORT emp_id | LIMIT 100");
ResponseException error = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(request));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
assertThat(error.getMessage(), containsString("Unknown index [" + index + "]"));
}
for (var index : List.of(
Tuple.tuple("*:employee*,*:alias-employees,*:employees3", "alias-employees,employees3"),
Tuple.tuple("*:alias*,my*:employees1", "employees1"),
Tuple.tuple("*:alias*,my*:employees3", "employees3")
)) {
Request request = esqlRequest("FROM " + index.v1() + " | KEEP emp_id | SORT emp_id | LIMIT 100");
ResponseException error = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(request));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(
error.getMessage(),
containsString("unauthorized for user [remote_search_user] with assigned roles [remote_search]")
);
assertThat(error.getMessage(), containsString("user [test_user] on indices [" + index.v2() + "]"));
}
// query `alias-engineering`
Request request = esqlRequest("FROM *:alias* | KEEP emp_id | SORT emp_id | LIMIT 100");
Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> ids = (List<?>) responseAsMap.get("values");
assertThat(ids, equalTo(List.of(List.of("1"), List.of("7"))));
removeAliases();
}
@SuppressWarnings("unchecked")
public void testSearchesAgainstNonMatchingIndices() throws Exception {
boolean skipUnavailable = randomBoolean();
configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), skipUnavailable);
populateData();
{
final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE);
putRoleRequest.setJsonEntity("""
{
"indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}],
"cluster": [ "manage_own_api_key" ],
"remote_indices": [
{
"names": ["employees*"],
"privileges": ["read"],
"clusters": ["my_remote_cluster"]
}
]
}""");
Response response = adminClient().performRequest(putRoleRequest);
assertOK(response);
}
String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey();
// sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote
{
Request request = esqlRequest("""
FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2
| SORT emp_id ASC
| LIMIT 5
| KEEP emp_id, department""");
CheckedConsumer<Response, Exception> verifier = resp -> {
assertOK(resp);
Map<String, Object> map = responseAsMap(resp);
assertThat(((List<?>) map.get("columns")).size(), greaterThanOrEqualTo(1));
assertThat(((List<?>) map.get("values")).size(), greaterThanOrEqualTo(1));
assertExpectedClustersForMissingIndicesTests(
map,
List.of(
// local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched
new ExpectedCluster("(local)", "nomatch*", "successful", null),
new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null)
)
);
};
final Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
verifier.accept(performRequestWithRemoteSearchUser(request));
verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey));
}
// missing concrete local index is an error
{
String q = "FROM employees_nomatch,my_remote_cluster:employees";
Request limit1 = esqlRequest(q + " | LIMIT 1");
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1));
assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]"));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]"));
Request limit0 = esqlRequest(q + " | LIMIT 0");
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0));
assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]"));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]"));
}
// missing concrete remote index is fatal error when skip_unavailable=false
{
String q = "FROM employees,my_remote_cluster:employees_nomatch";
CheckedBiConsumer<Response, Boolean, Exception> verifier = (resp, limit0) -> {
assertOK(resp);
Map<String, Object> map = responseAsMap(resp);
assertThat(((List<?>) map.get("columns")).size(), greaterThanOrEqualTo(1));
assertThat(((List<?>) map.get("values")).size(), limit0 ? equalTo(0) : greaterThanOrEqualTo(1));
assertExpectedClustersForMissingIndicesTests(
map,
List.of(
new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null),
new ExpectedCluster(
REMOTE_CLUSTER_ALIAS,
"employees_nomatch",
"skipped",
0,
new ExpectedFailure("verification_exception", List.of("Unknown index", "my_remote_cluster:employees_nomatch"))
)
)
);
};
Request limit1 = esqlRequest(q + " | LIMIT 1");
if (skipUnavailable == false) {
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
e = expectThrows(
ResponseException.class,
() -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)
);
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
} else {
verifier.accept(performRequestWithRemoteSearchUser(limit1), false);
}
Request limit0 = esqlRequest(q + " | LIMIT 0");
if (skipUnavailable == false) {
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
e = expectThrows(
ResponseException.class,
() -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)
);
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
} else {
verifier.accept(performRequestWithRemoteSearchUser(limit0), true);
}
}
// since there is at least one matching index in the query, the missing wildcarded local index is not an error
{
String q = "FROM employees_nomatch*,my_remote_cluster:employees";
CheckedBiConsumer<Response, Boolean, Exception> verifier = (response, limit0) -> {
assertOK(response);
Map<String, Object> map = responseAsMap(response);
assertThat(((List<?>) map.get("columns")).size(), greaterThanOrEqualTo(1));
if (limit0) {
assertThat(((List<?>) map.get("values")).size(), equalTo(0));
} else {
assertThat(((List<?>) map.get("values")).size(), greaterThanOrEqualTo(1));
}
assertExpectedClustersForMissingIndicesTests(
map,
List.of(
// local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched
new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0),
new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null)
)
);
};
Request limit1 = esqlRequest(q + " | LIMIT 1");
verifier.accept(performRequestWithRemoteSearchUser(limit1), false);
verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false);
Request limit0 = esqlRequest(q + " | LIMIT 0");
verifier.accept(performRequestWithRemoteSearchUser(limit0), true);
verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true);
}
// an error is thrown if there are no matching indices at all
{
// with non-matching concrete index
String q = "FROM my_remote_cluster:employees_nomatch";
Request limit1 = esqlRequest(q + " | LIMIT 1");
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
Request limit0 = esqlRequest(q + " | LIMIT 0");
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
}
// an error is thrown if there are no matching indices at all
{
String localExpr = randomFrom("nomatch", "nomatch*");
String remoteExpr = randomFrom("nomatch", "nomatch*");
String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr);
Request limit1 = esqlRequest(q + " | LIMIT 1");
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1));
assertThat(e.getMessage(), containsString("Unknown index"));
assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr)));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index"));
assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr)));
Request limit0 = esqlRequest(q + " | LIMIT 0");
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0));
assertThat(e.getMessage(), containsString("Unknown index"));
assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr)));
e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey));
assertThat(e.getMessage(), containsString("Unknown index"));
assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr)));
}
// error since the remote cluster specified a concrete index that is not found
{
String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*";
Request limit1 = esqlRequest(q + " | LIMIT 1");
Request limit0 = esqlRequest(q + " | LIMIT 0");
/* Example error:
*{"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards
* remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated by
* API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by
* the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster]
* towards remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated
* by API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by the
* index privileges [read,all]"},"status":403}"
*/
var userErrors = List.of("unauthorized for user [remote_search_user]", "of user [test_user]", "on indices [employees_nomatch]");
/* Example error:
* {"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards
* remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated by
* API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by
* the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster]
* towards remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated
* by API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by the
* index privileges [read,all]"},"status":403}"
*/
var keyErrors = List.of("unauthorized for API key id", "of user [remote_search_user]", "on indices [employees_nomatch]");
if (skipUnavailable == false) {
ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1));
assertThat(e.getMessage(), containsString("security_exception"));
for (String error : userErrors) {
assertThat(e.getMessage(), containsString(error));
}
e = expectThrows(
ResponseException.class,
() -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)
);
assertThat(e.getMessage(), containsString("security_exception"));
for (String error : keyErrors) {
assertThat(e.getMessage(), containsString(error));
}
// TODO: in follow on PR, add support for throwing a VerificationException for this scenario - no exception is currently
// thrown
// Request limit0 = esqlRequest(q + " | LIMIT 0");
// e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0));
// assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]"));
} else {
TriConsumer<Response, Boolean, Collection<String>> verifier = (response, isLimit0, errors) -> {
assertOK(response);
Map<String, Object> map;
try {
map = responseAsMap(response);
} catch (Exception e) {
throw new RuntimeException(e);
}
assertThat(((List<?>) map.get("columns")).size(), greaterThanOrEqualTo(1));
if (isLimit0) {
assertThat(((List<?>) map.get("values")).size(), equalTo(0));
} else {
assertThat(((List<?>) map.get("values")).size(), greaterThanOrEqualTo(1));
}
assertExpectedClustersForMissingIndicesTests(
map,
List.of(
new ExpectedCluster("(local)", "employees", "successful", isLimit0 ? 0 : null),
// FIXME: this actually should produce SUCCESS since "employees" exists, but not implemented yet
new ExpectedCluster(
REMOTE_CLUSTER_ALIAS,
"employees_nomatch,employees*",
// TODO: LIMIT 0 produces "successful" here since no runtime check is performed. This is probably wrong.
isLimit0 ? "successful" : "skipped",
0,
isLimit0 ? null : new ExpectedFailure("security_exception", errors)
)
)
);
};
verifier.apply(performRequestWithRemoteSearchUser(limit1), false, userErrors);
verifier.apply(performRequestWithRemoteSearchUser(limit0), true, userErrors);
verifier.apply(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false, keyErrors);
verifier.apply(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true, keyErrors);
}
}
}
public void testCrossClusterAsyncQuery() throws Exception {
assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot());
configureRemoteCluster();
populateData();
String otherUser = populateOtherUser();
// Adding a delay there so that the async query is not completed before we check the status
Request request = esqlRequestAsync("""
FROM employees, *:employees
| SORT emp_id ASC
| LIMIT 10
| WHERE delay(10ms)
| KEEP emp_id, department""");
Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
assumeTrue("Query finished too fast, can not test", (boolean) responseAsMap.get("is_running"));
String asyncId = (String) responseAsMap.get("id");
response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId));
assertOK(response);
responseAsMap = entityAsMap(response);
assertThat(responseAsMap.get("is_running"), equalTo(true));
// Other user can't see the async query
ResponseException error = expectThrows(
ResponseException.class,
() -> performRequestWithUser(esqlAsyncGetRequest(asyncId), otherUser)
);
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404));
assertThat(error.getMessage(), containsString("resource_not_found_exception"));
// Clean up
response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId));
assertOK(response);
}
public void testCrossClusterAsyncQueryStop() throws Exception {
assumeTrue("delay() is only available in snapshot builds", Build.current().isSnapshot());
configureRemoteCluster();
populateData();
String otherUser = populateOtherUser();
// query remote cluster only
Request request = esqlRequestAsync("""
FROM employees, *:employees
| SORT emp_id ASC
| LIMIT 10
| WHERE delay(10ms)
| KEEP emp_id, department""");
Response response = performRequestWithRemoteSearchUser(request);
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
assertThat(responseAsMap.get("is_running"), equalTo(true));
String asyncId = (String) responseAsMap.get("id");
response = performRequestWithRemoteSearchUser(esqlAsyncGetRequest(asyncId));
assertOK(response);
responseAsMap = entityAsMap(response);
assertThat(responseAsMap.get("is_running"), equalTo(true));
// Other user can't see the async query
ResponseException error = expectThrows(
ResponseException.class,
() -> performRequestWithUser(esqlAsyncStopRequest(asyncId), otherUser)
);
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(404));
assertThat(error.getMessage(), containsString("resource_not_found_exception"));
response = performRequestWithRemoteSearchUser(esqlAsyncStopRequest(asyncId));
assertOK(response);
responseAsMap = entityAsMap(response);
assertThat(responseAsMap.get("is_running"), equalTo(false));
// Clean up
response = performRequestWithRemoteSearchUser(esqlAsyncDeleteRequest(asyncId));
assertOK(response);
}
protected Request esqlRequest(String command) throws IOException {
XContentBuilder body = getBody(command, null);
Request request = new Request("POST", "_query");
request.setJsonEntity(org.elasticsearch.common.Strings.toString(body));
return request;
}
protected Request esqlRequestAsync(String command) throws IOException {
XContentBuilder body = getBody(command, Map.of("wait_for_completion_timeout", "1ms"));
Request request = new Request("POST", "_query/async");
request.setJsonEntity(org.elasticsearch.common.Strings.toString(body));
return request;
}
protected Request esqlAsyncGetRequest(String asyncID) {
Request request = new Request("GET", "_query/async/" + asyncID);
request.addParameter("wait_for_completion_timeout", "1ms");
return request;
}
protected Request esqlAsyncStopRequest(String asyncID) {
Request request = new Request("POST", "_query/async/" + asyncID + "/stop");
return request;
}
protected Request esqlAsyncDeleteRequest(String asyncID) {
Request request = new Request("DELETE", "_query/async/" + asyncID);
return request;
}
private static XContentBuilder getBody(String command, @Nullable Map<String, String> extraParams) throws IOException {
XContentBuilder body = JsonXContent.contentBuilder();
body.startObject();
body.field("query", command);
body.field("include_ccs_metadata", true);
if (Build.current().isSnapshot() && randomBoolean()) {
Settings.Builder settings = Settings.builder();
if (randomBoolean()) {
settings.put("page_size", between(1, 5));
}
if (randomBoolean()) {
settings.put("exchange_buffer_size", between(1, 2));
}
if (randomBoolean()) {
settings.put("data_partitioning", randomFrom("shard", "segment", "doc"));
}
if (randomBoolean()) {
settings.put("enrich_max_workers", between(1, 5));
}
Settings pragmas = settings.build();
if (pragmas != Settings.EMPTY) {
body.startObject("pragma");
body.value(pragmas);
body.endObject();
}
}
if (extraParams != null) {
extraParams.forEach((name, value) -> {
try {
body.field(name, value);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
body.endObject();
return body;
}
private Response performRequestWithRemoteSearchUser(final Request request) throws IOException {
request.setOptions(
RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS))
);
return client().performRequest(request);
}
private Response performRequestWithUser(final Request request, String user) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(user, PASS)));
return client().performRequest(request);
}
private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey));
return client().performRequest(request);
}
private String createRemoteSearchUserAPIKey() throws IOException {
final Request createApiKeyRequest = new Request("POST", "_security/api_key");
createApiKeyRequest.setJsonEntity("""
{
"name": "myapikey"
}""");
// ensure that the API key is created with the correct user
createApiKeyRequest.setOptions(
RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS))
);
Response response = client().performRequest(createApiKeyRequest);
assertOK(response);
final Map<String, Object> responseAsMap = responseAsMap(response);
final String encoded = (String) responseAsMap.get("encoded");
return encoded;
}
@SuppressWarnings("unchecked")
private void assertRemoteOnlyResults(Response response) throws IOException {
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(2, values.size());
List<String> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
assertThat(flatList, containsInAnyOrder("1", "3", "engineering", "sales"));
}
@SuppressWarnings("unchecked")
private void assertRemoteOnlyAgainst2IndexResults(Response response) throws IOException {
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(2, values.size());
List<String> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
assertThat(flatList, containsInAnyOrder("1", "11", "engineering", "engineering"));
}
@SuppressWarnings("unchecked")
private void assertLocalOnlyResultsAndSkippedRemote(Response response) throws IOException {
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(4, values.size());
List<String> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
// local results
assertThat(flatList, containsInAnyOrder("2", "4", "6", "8", "support", "management", "engineering", "marketing"));
Map<String, ?> clusters = (Map<String, ?>) responseAsMap.get("_clusters");
/*
clusters map:
{running=0, total=2, details={
invalid_remote={_shards={total=0, failed=0, successful=0, skipped=0}, took=176, indices=employees,
failures=[{reason={reason=Unable to connect to [invalid_remote], type=connect_transport_exception},
index=null, shard=-1}], status=skipped},
(local)={_shards={total=1, failed=0, successful=1, skipped=0}, took=298, indices=employees, status=successful}},
failed=0, partial=0, successful=1, skipped=1}
*/
assertThat((int) clusters.get("total"), equalTo(2));
assertThat((int) clusters.get("successful"), equalTo(1));
assertThat((int) clusters.get("skipped"), equalTo(1));
Map<String, ?> details = (Map<String, ?>) clusters.get("details");
Map<String, ?> invalidRemoteMap = (Map<String, ?>) details.get("invalid_remote");
assertThat(invalidRemoteMap.get("status").toString(), equalTo("skipped"));
List<?> failures = (List<?>) invalidRemoteMap.get("failures");
assertThat(failures.size(), equalTo(1));
Map<String, ?> failureMap = (Map<String, ?>) failures.get(0);
Map<String, ?> reasonMap = (Map<String, ?>) failureMap.get("reason");
assertThat(reasonMap.get("reason").toString(), containsString("Unable to connect to [invalid_remote]"));
assertThat(reasonMap.get("type").toString(), containsString("connect_transport_exception"));
Map<String, ?> localCluster = (Map<String, ?>) details.get("(local)");
assertThat(localCluster.get("status").toString(), equalTo("successful"));
}
@SuppressWarnings("unchecked")
private void assertRemoteAndLocalResults(Response response) throws IOException {
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(9, values.size());
List<String> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<String>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
assertThat(
flatList,
containsInAnyOrder(
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"engineering",
"engineering",
"engineering",
"management",
"sales",
"sales",
"marketing",
"marketing",
"support"
)
);
}
private void assertWithEnrich(Response response) throws IOException {
assertOK(response);
Map<String, Object> responseAsMap = entityAsMap(response);
List<?> columns = (List<?>) responseAsMap.get("columns");
List<?> values = (List<?>) responseAsMap.get("values");
assertEquals(2, columns.size());
assertEquals(2, values.size());
List<?> flatList = values.stream()
.flatMap(innerList -> innerList instanceof List ? ((List<?>) innerList).stream() : Stream.empty())
.collect(Collectors.toList());
assertThat(flatList, containsInAnyOrder(2, 3, "usa", "canada"));
}
record ExpectedFailure(String type, Collection<String> messages) {}
record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards, ExpectedFailure failure) {
ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {
this(clusterAlias, indexExpression, status, totalShards, null);
}
}
@SuppressWarnings("unchecked")
void assertExpectedClustersForMissingIndicesTests(Map<String, Object> responseMap, List<ExpectedCluster> expected) {
Map<String, ?> clusters = (Map<String, ?>) responseMap.get("_clusters");
assertThat((int) responseMap.get("took"), greaterThan(0));
Map<String, ?> detailsMap = (Map<String, ?>) clusters.get("details");
assertThat(detailsMap.size(), is(expected.size()));
assertThat((int) clusters.get("total"), is(expected.size()));
assertThat((int) clusters.get("successful"), is((int) expected.stream().filter(ec -> ec.status().equals("successful")).count()));
assertThat((int) clusters.get("skipped"), is((int) expected.stream().filter(ec -> ec.status().equals("skipped")).count()));
assertThat((int) clusters.get("failed"), is((int) expected.stream().filter(ec -> ec.status().equals("failed")).count()));
for (ExpectedCluster expectedCluster : expected) {
Map<String, ?> clusterDetails = (Map<String, ?>) detailsMap.get(expectedCluster.clusterAlias());
String msg = expectedCluster.clusterAlias();
assertThat(msg, (int) clusterDetails.get("took"), greaterThan(0));
assertThat(msg, clusterDetails.get("status"), is(expectedCluster.status()));
Map<String, ?> shards = (Map<String, ?>) clusterDetails.get("_shards");
if (expectedCluster.totalShards() == null) {
assertThat(msg, (int) shards.get("total"), greaterThan(0));
} else {
assertThat(msg, (int) shards.get("total"), is(expectedCluster.totalShards()));
}
if (expectedCluster.status().equals("successful")) {
assertThat((int) shards.get("successful"), is((int) shards.get("total")));
assertThat((int) shards.get("skipped"), is(0));
} else if (expectedCluster.status().equals("skipped")) {
assertThat((int) shards.get("successful"), is(0));
assertThat((int) shards.get("skipped"), is((int) shards.get("total")));
ArrayList<?> failures = (ArrayList<?>) clusterDetails.get("failures");
assertThat(failures.size(), is(1));
Map<String, ?> failure1 = (Map<String, ?>) failures.get(0);
Map<String, ?> innerReason = (Map<String, ?>) failure1.get("reason");
if (expectedCluster.failure() != null) {
for (var f : expectedCluster.failure().messages()) {
assertThat(innerReason.get("reason").toString(), containsString(f));
}
assertThat(innerReason.get("type").toString(), containsString(expectedCluster.failure().type()));
}
} else {
fail(msg + "; Unexpected status: " + expectedCluster.status());
}
// currently failed shards is always zero - change this once we start allowing partial data for individual shard failures
assertThat((int) shards.get("failed"), is(0));
}
}
private static Map<String, Object> fetchEsqlCcsFeatureUsageFromNode(RestClient client) throws IOException {
Request request = new Request(HttpGet.METHOD_NAME, "_license/feature_usage");
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue(USER, PASS)));
Response response = client.performRequest(request);
ObjectPath path = ObjectPath.createFromResponse(response);
List<Map<String, Object>> features = path.evaluate("features");
for (var feature : features) {
if ("esql-ccs".equals(feature.get("name"))) {
return feature;
}
}
return Collections.emptyMap();
}
}
| RemoteClusterSecurityEsqlIT |
java | elastic__elasticsearch | x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataframeCpsIT.java | {
"start": 1367,
"end": 3288
} | class ____ extends MlSingleNodeTestCase {
@Override
protected Settings nodeSettings() {
return Settings.builder().put(super.nodeSettings()).put("serverless.cross_project.enabled", "true").build();
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Stream.concat(super.getPlugins().stream(), Stream.of(CpsPlugin.class)).toList();
}
public void testCrossProjectFailsForDataFrameAnalytics() throws IOException {
var id = "test-cross-project-fails";
var sourceIndex = "project1:" + id + "_source_index";
var destIndex = id + "_results";
var config = new DataFrameAnalyticsConfig.Builder().setId(id)
.setSource(
new DataFrameAnalyticsSource(
new String[] { sourceIndex },
QueryProvider.fromParsedQuery(QueryBuilders.matchAllQuery()),
null,
Collections.emptyMap()
)
)
.setDest(new DataFrameAnalyticsDest(destIndex, null))
.setAnalysis(
new Classification(
"keyword-field",
BoostedTreeParams.builder().setNumTopFeatureImportanceValues(1).build(),
null,
null,
null,
null,
null,
null,
null
)
)
.build();
var request = new PutDataFrameAnalyticsAction.Request(config);
var response = client().execute(PutDataFrameAnalyticsAction.INSTANCE, request);
var validationException = assertThrows(ValidationException.class, response::actionGet);
assertThat(validationException.getMessage(), containsString("remote source and cross-project indices are not supported"));
}
public static | DataframeCpsIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/long2darrays/Long2DArrays_assertNullOrEmpty_Test.java | {
"start": 1017,
"end": 1282
} | class ____ extends Long2DArraysBaseTest {
@Test
void should_delegate_to_Arrays2D() {
// WHEN
long2dArrays.assertNullOrEmpty(info, actual);
// THEN
verify(arrays2d).assertNullOrEmpty(info, failures, actual);
}
}
| Long2DArrays_assertNullOrEmpty_Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SchedulerEndpointBuilderFactory.java | {
"start": 30486,
"end": 32678
} | interface ____ {
/**
* Scheduler (camel-scheduler)
* Generate messages in specified intervals using
* java.util.concurrent.ScheduledExecutorService.
*
* Category: core,scheduling
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-scheduler
*
* @return the dsl builder for the headers' name.
*/
default SchedulerHeaderNameBuilder scheduler() {
return SchedulerHeaderNameBuilder.INSTANCE;
}
/**
* Scheduler (camel-scheduler)
* Generate messages in specified intervals using
* java.util.concurrent.ScheduledExecutorService.
*
* Category: core,scheduling
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-scheduler
*
* Syntax: <code>scheduler:name</code>
*
* Path parameter: name (required)
* The name of the scheduler
*
* @param path name
* @return the dsl builder
*/
default SchedulerEndpointBuilder scheduler(String path) {
return SchedulerEndpointBuilderFactory.endpointBuilder("scheduler", path);
}
/**
* Scheduler (camel-scheduler)
* Generate messages in specified intervals using
* java.util.concurrent.ScheduledExecutorService.
*
* Category: core,scheduling
* Since: 2.15
* Maven coordinates: org.apache.camel:camel-scheduler
*
* Syntax: <code>scheduler:name</code>
*
* Path parameter: name (required)
* The name of the scheduler
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path name
* @return the dsl builder
*/
default SchedulerEndpointBuilder scheduler(String componentName, String path) {
return SchedulerEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Scheduler component.
*/
public static | SchedulerBuilders |
java | apache__maven | impl/maven-impl/src/main/java/org/apache/maven/impl/cache/Cache.java | {
"start": 4776,
"end": 5445
} | interface ____ {
/**
* Called when a key is evicted from the cache.
*/
void onKeyEviction();
/**
* Called when a value is evicted from the cache.
*/
void onValueEviction();
}
/**
* A concurrent map implementation that uses configurable reference types for both keys and values,
* and supports automatic cleanup of garbage-collected entries.
* <p>
* This implementation is package-private and accessed through the {@link Cache} interface.
*
* @param <K> the type of keys maintained by this map
* @param <V> the type of mapped values
*/
| EvictionListener |
java | google__dagger | javatests/dagger/hilt/android/testing/testinstallin/TestInstallInModules.java | {
"start": 1788,
"end": 2027
} | interface ____ {
@Provides
static Bar provideFoo() {
return new Bar(SingletonBarModule.class);
}
}
@Module
@TestInstallIn(components = SingletonComponent.class, replaces = SingletonFooModule.class)
| SingletonBarModule |
java | apache__spark | sql/core/src/test/java/test/org/apache/spark/sql/execution/datasources/xml/JavaXmlSuite.java | {
"start": 1364,
"end": 4137
} | class ____ {
private static final int numBooks = 12;
private static final String booksFile = "src/test/resources/test-data/xml-resources/books.xml";
private static final String booksFileTag = "book";
private SparkSession spark;
private Path tempDir;
private static void setEnv(String key, String value) {
try {
Map<String, String> env = System.getenv();
Class<?> cl = env.getClass();
Field field = cl.getDeclaredField("m");
field.setAccessible(true);
Map<String, String> writableEnv = (Map<String, String>) field.get(env);
writableEnv.put(key, value);
} catch (Exception e) {
throw new IllegalStateException("Failed to set environment variable", e);
}
}
@BeforeEach
public void setUp() throws IOException {
setEnv("SPARK_LOCAL_IP", "127.0.0.1");
spark = SparkSession.builder()
.master("local[2]")
.appName("XmlSuite")
.config("spark.ui.enabled", false)
.getOrCreate();
spark.sparkContext().setLogLevel("WARN");
tempDir = Files.createTempDirectory("JavaXmlSuite");
tempDir.toFile().deleteOnExit();
}
@AfterEach
public void tearDown() {
spark.stop();
spark = null;
}
private Path getEmptyTempDir() throws IOException {
return Files.createTempDirectory(tempDir, "test");
}
@Test
public void testXmlParser() {
Map<String, String> options = new HashMap<>();
options.put("rowTag", booksFileTag);
Dataset<Row> df = spark.read().options(options).xml(booksFile);
String prefix = XmlOptions.DEFAULT_ATTRIBUTE_PREFIX();
long result = df.select(prefix + "id").count();
Assertions.assertEquals(result, numBooks);
}
@Test
public void testLoad() {
Map<String, String> options = new HashMap<>();
options.put("rowTag", booksFileTag);
Dataset<Row> df = spark.read().options(options).xml(booksFile);
long result = df.select("description").count();
Assertions.assertEquals(result, numBooks);
}
@Test
public void testSave() throws IOException {
Map<String, String> options = new HashMap<>();
options.put("rowTag", booksFileTag);
Path booksPath = getEmptyTempDir().resolve("booksFile");
Dataset<Row> df = spark.read().options(options).xml(booksFile);
df.select("price", "description").write().options(options).xml(booksPath.toString());
Dataset<Row> newDf = spark.read().options(options).xml(booksPath.toString());
long result = newDf.select("price").count();
Assertions.assertEquals(result, numBooks);
}
}
| JavaXmlSuite |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/filter/reactive/ServerWebExchangeContextFilterTests.java | {
"start": 1754,
"end": 2198
} | class ____ {
private final AtomicReference<ServerWebExchange> exchangeRef = new AtomicReference<>();
public ServerWebExchange getExchange() {
return this.exchangeRef.get();
}
public Mono<String> service() {
return Mono.just("result")
.transformDeferredContextual((mono, contextView) -> {
ServerWebExchangeContextFilter.getExchange(contextView).ifPresent(exchangeRef::set);
return mono;
});
}
}
}
| MyService |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.