language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 33686,
"end": 33863
}
|
class ____ {}");
Source otherAFile =
CompilerTests.javaSource(
"other.test.A",
"package other.test;",
"",
"public final
|
A
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/net/UrlConnectionFactory.java
|
{
"start": 1839,
"end": 1892
}
|
class ____ be considered to be internal
*/
public
|
should
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/ws/code/javaguide/ws/StandaloneWithConfig.java
|
{
"start": 797,
"end": 1835
}
|
class ____ {
@Test
public void testMe() throws IOException {
// #ws-standalone-with-config
// Set up Pekko
String name = "wsclient";
ActorSystem system = ActorSystem.create(name);
Materializer materializer = Materializer.matFromSystem(system);
// Read in config file from application.conf
Config conf = ConfigFactory.load();
WSConfigParser parser = new WSConfigParser(conf, ClassLoader.getSystemClassLoader());
AhcWSClientConfig clientConf = AhcWSClientConfigFactory.forClientConfig(parser.parse());
// Start up asynchttpclient
final DefaultAsyncHttpClientConfig asyncHttpClientConfig =
new AhcConfigBuilder(clientConf).configure().build();
final DefaultAsyncHttpClient asyncHttpClient =
new DefaultAsyncHttpClient(asyncHttpClientConfig);
// Create a new WSClient, and then close the client.
WSClient client = new AhcWSClient(asyncHttpClient, materializer);
client.close();
system.terminate();
// #ws-standalone-with-config
}
}
|
StandaloneWithConfig
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/usertype/json/JsonType.java
|
{
"start": 284,
"end": 570
}
|
class ____ extends AbstractSingleColumnStandardBasicType<Json> {
public static final JsonType INSTANCE = new JsonType();
public JsonType() {
super( new PostgreSQLJsonPGObjectJsonbType(), JsonJavaType.INSTANCE );
}
@Override
public String getName() {
return "json";
}
}
|
JsonType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/merge/MergeUnsavedEntitiesTest.java
|
{
"start": 5425,
"end": 5782
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "BookID")
private Book book;
private String note;
public Long getId() {
return id;
}
public Book getBook() {
return book;
}
public String getNote() {
return note;
}
}
}
|
BookNote
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/simp/SimpAttributesTests.java
|
{
"start": 1129,
"end": 3765
}
|
class ____ {
private final Map<String, Object> map = new ConcurrentHashMap<>();
private final SimpAttributes simpAttributes = new SimpAttributes("session1", this.map);
@Test
void getAttribute() {
this.simpAttributes.setAttribute("name1", "value1");
assertThat(this.simpAttributes.getAttribute("name1")).isEqualTo("value1");
assertThat(this.simpAttributes.getAttribute("name2")).isNull();
}
@Test
void getAttributeNames() {
this.simpAttributes.setAttribute("name1", "value1");
this.simpAttributes.setAttribute("name2", "value1");
this.simpAttributes.setAttribute("name3", "value1");
assertThat(this.simpAttributes.getAttributeNames())
.containsExactlyInAnyOrder("name1", "name2", "name3");
}
@Test
void registerDestructionCallback() {
Runnable callback = mock();
this.simpAttributes.registerDestructionCallback("name1", callback);
assertThat(this.simpAttributes.getAttribute(
SimpAttributes.DESTRUCTION_CALLBACK_NAME_PREFIX + "name1")).isSameAs(callback);
}
@Test
void registerDestructionCallbackAfterSessionCompleted() {
this.simpAttributes.sessionCompleted();
assertThatIllegalStateException()
.isThrownBy(() -> this.simpAttributes.registerDestructionCallback("name1", mock()))
.withMessageContaining("already completed");
}
@Test
void removeDestructionCallback() {
Runnable callback1 = mock();
Runnable callback2 = mock();
this.simpAttributes.registerDestructionCallback("name1", callback1);
this.simpAttributes.registerDestructionCallback("name2", callback2);
assertThat(this.simpAttributes.getAttributeNames()).hasSize(2);
}
@Test
void getSessionMutex() {
assertThat(this.simpAttributes.getSessionMutex()).isSameAs(this.map);
}
@Test
void getSessionMutexExplicit() {
Object mutex = new Object();
this.simpAttributes.setAttribute(SimpAttributes.SESSION_MUTEX_NAME, mutex);
assertThat(this.simpAttributes.getSessionMutex()).isSameAs(mutex);
}
@Test
void sessionCompleted() {
Runnable callback1 = mock();
Runnable callback2 = mock();
this.simpAttributes.registerDestructionCallback("name1", callback1);
this.simpAttributes.registerDestructionCallback("name2", callback2);
this.simpAttributes.sessionCompleted();
verify(callback1, times(1)).run();
verify(callback2, times(1)).run();
}
@Test
void sessionCompletedIsIdempotent() {
Runnable callback1 = mock();
this.simpAttributes.registerDestructionCallback("name1", callback1);
this.simpAttributes.sessionCompleted();
this.simpAttributes.sessionCompleted();
this.simpAttributes.sessionCompleted();
verify(callback1, times(1)).run();
}
}
|
SimpAttributesTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/type/QueueType.java
|
{
"start": 676,
"end": 2163
}
|
class ____ implements UserCollectionType {
@Override
public CollectionClassification getClassification() {
return CollectionClassification.BAG;
}
@Override
public Class<?> getCollectionClass() {
return Queue.class;
}
@Override
public PersistentCollection instantiate(
SharedSessionContractImplementor session,
CollectionPersister persister) throws HibernateException {
return new PersistentQueue(session);
}
@Override
public PersistentCollection wrap(
SharedSessionContractImplementor session,
Object collection) {
return new PersistentQueue(session, (List) collection);
}
@Override
public Iterator getElementsIterator(Object collection) {
return ((Queue) collection).iterator();
}
@Override
public boolean contains(Object collection, Object entity) {
return ((Queue) collection).contains(entity);
}
@Override
public Object indexOf(Object collection, Object entity) {
int i = ((List) collection).indexOf(entity);
return (i < 0) ? null : i;
}
@Override
public Object replaceElements(
Object original,
Object target,
CollectionPersister persister,
Object owner,
Map copyCache,
SharedSessionContractImplementor session)
throws HibernateException {
Queue result = (Queue) target;
result.clear();
result.addAll((Queue) original);
return result;
}
@Override
public Object instantiate(int anticipatedSize) {
return new LinkedList<>();
}
}
//end::collections-custom-collection-mapping-example[]
|
QueueType
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertIsSortedAccordingToComparator_Test.java
|
{
"start": 1632,
"end": 3777
}
|
class ____ extends ShortArraysBaseTest {
private Comparator<Short> shortDescendingOrderComparator;
private Comparator<Short> shortAscendingOrderComparator;
@Override
@BeforeEach
public void setUp() {
super.setUp();
actual = new short[] { 4, 3, 2, 2, 1 };
shortDescendingOrderComparator = (short1, short2) -> -short1.compareTo(short2);
shortAscendingOrderComparator = (short1, short2) -> short1.compareTo(short2);
}
@Test
void should_pass_if_actual_is_sorted_according_to_given_comparator() {
arrays.assertIsSortedAccordingToComparator(someInfo(), actual, shortDescendingOrderComparator);
}
@Test
void should_pass_if_actual_is_empty_whatever_given_comparator_is() {
arrays.assertIsSortedAccordingToComparator(someInfo(), emptyArray(), shortDescendingOrderComparator);
arrays.assertIsSortedAccordingToComparator(someInfo(), emptyArray(), shortAscendingOrderComparator);
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertIsSortedAccordingToComparator(someInfo(), null,
shortDescendingOrderComparator))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_comparator_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertIsSortedAccordingToComparator(someInfo(), emptyArray(), null));
}
@Test
void should_fail_if_actual_is_not_sorted_according_to_given_comparator() {
AssertionInfo info = someInfo();
actual = new short[] { 3, 2, 1, 9 };
Throwable error = catchThrowable(() -> arrays.assertIsSortedAccordingToComparator(info, actual,
shortDescendingOrderComparator));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeSortedAccordingToGivenComparator(2, actual, shortDescendingOrderComparator));
}
}
|
ShortArrays_assertIsSortedAccordingToComparator_Test
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KMS2EndpointBuilderFactory.java
|
{
"start": 19269,
"end": 21291
}
|
interface ____ {
/**
* AWS Key Management Service (KMS) (camel-aws2-kms)
* Manage keys stored in AWS KMS instances.
*
* Category: cloud,management
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-kms
*
* @return the dsl builder for the headers' name.
*/
default KMS2HeaderNameBuilder aws2Kms() {
return KMS2HeaderNameBuilder.INSTANCE;
}
/**
* AWS Key Management Service (KMS) (camel-aws2-kms)
* Manage keys stored in AWS KMS instances.
*
* Category: cloud,management
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-kms
*
* Syntax: <code>aws2-kms:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param path label
* @return the dsl builder
*/
default KMS2EndpointBuilder aws2Kms(String path) {
return KMS2EndpointBuilderFactory.endpointBuilder("aws2-kms", path);
}
/**
* AWS Key Management Service (KMS) (camel-aws2-kms)
* Manage keys stored in AWS KMS instances.
*
* Category: cloud,management
* Since: 3.1
* Maven coordinates: org.apache.camel:camel-aws2-kms
*
* Syntax: <code>aws2-kms:label</code>
*
* Path parameter: label (required)
* Logical name
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path label
* @return the dsl builder
*/
default KMS2EndpointBuilder aws2Kms(String componentName, String path) {
return KMS2EndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the AWS Key Management Service (KMS) component.
*/
public static
|
KMS2Builders
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/CsrfConfigurerIgnoringRequestMatchersTests.java
|
{
"start": 4787,
"end": 5276
}
|
class ____ {
RequestMatcher requestMatcher = (request) -> HttpMethod.POST.name().equals(request.getMethod());
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.csrf((csrf) -> csrf
.requireCsrfProtectionMatcher(pathPattern("/path"))
.ignoringRequestMatchers(this.requestMatcher));
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
@EnableWebMvc
static
|
IgnoringRequestMatchers
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/main/java/org/apache/log4j/helpers/QuietWriter.java
|
{
"start": 1119,
"end": 2178
}
|
class ____ extends FilterWriter {
protected ErrorHandler errorHandler;
public QuietWriter(final Writer writer, final ErrorHandler errorHandler) {
super(writer);
setErrorHandler(errorHandler);
}
@Override
public void write(final String string) {
if (string != null) {
try {
out.write(string);
} catch (Exception e) {
errorHandler.error("Failed to write [" + string + "].", e, ErrorCode.WRITE_FAILURE);
}
}
}
@Override
public void flush() {
try {
out.flush();
} catch (Exception e) {
errorHandler.error("Failed to flush writer,", e, ErrorCode.FLUSH_FAILURE);
}
}
public void setErrorHandler(final ErrorHandler eh) {
if (eh == null) {
// This is a programming error on the part of the enclosing appender.
throw new IllegalArgumentException("Attempted to set null ErrorHandler.");
}
this.errorHandler = eh;
}
}
|
QuietWriter
|
java
|
quarkusio__quarkus
|
integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/KubernetesWithNodeSelectorTest.java
|
{
"start": 534,
"end": 2229
}
|
class ____ {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName("nodeselector")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("kubernetes-with-nodeselector.properties");
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Map<String, String> expectedNodeSelector = Map.of("diskType", "ssd");
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil
.deserializeAsList(kubernetesDir.resolve("kubernetes.yml"));
assertThat(kubernetesList.get(0)).isInstanceOfSatisfying(Deployment.class, d -> {
assertThat(d.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("nodeselector");
});
assertThat(d.getSpec()).satisfies(deploymentSpec -> {
assertThat(deploymentSpec.getTemplate()).satisfies(t -> {
assertThat(t.getSpec()).satisfies(podSpec -> {
assertThat(podSpec.getNodeSelector()).containsExactlyEntriesOf(expectedNodeSelector);
});
});
});
});
}
}
|
KubernetesWithNodeSelectorTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java
|
{
"start": 56799,
"end": 59024
}
|
class ____<T extends AuthorizationResult> implements ActionListener<T> {
private final Consumer<T> responseConsumer;
private final Consumer<Exception> failureConsumer;
private final RequestInfo requestInfo;
private final String requestId;
private final AuthorizationInfo authzInfo;
private AuthorizationResultListener(
Consumer<T> responseConsumer,
Consumer<Exception> failureConsumer,
RequestInfo requestInfo,
String requestId,
AuthorizationInfo authzInfo
) {
this.responseConsumer = responseConsumer;
this.failureConsumer = failureConsumer;
this.requestInfo = requestInfo;
this.requestId = requestId;
this.authzInfo = authzInfo;
}
@Override
public void onResponse(T result) {
if (result.isGranted()) {
auditTrailService.get()
.accessGranted(
requestId,
requestInfo.getAuthentication(),
requestInfo.getAction(),
requestInfo.getRequest(),
authzInfo
);
try {
responseConsumer.accept(result);
} catch (Exception e) {
failureConsumer.accept(e);
}
} else {
handleFailure(result.getFailureContext(requestInfo, restrictedIndices), null);
}
}
@Override
public void onFailure(Exception e) {
handleFailure(null, e);
}
private void handleFailure(@Nullable String context, @Nullable Exception e) {
Authentication authentication = requestInfo.getAuthentication();
String action = requestInfo.getAction();
TransportRequest request = requestInfo.getRequest();
auditTrailService.get().accessDenied(requestId, authentication, action, request, authzInfo);
failureConsumer.accept(actionDenied(authentication, authzInfo, action, request, context, e));
}
}
private static
|
AuthorizationResultListener
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/IpBucketedSortTests.java
|
{
"start": 823,
"end": 2504
}
|
class ____ extends BucketedSortTestCase<IpBucketedSort, BytesRef> {
@Override
protected IpBucketedSort build(SortOrder sortOrder, int bucketSize) {
return new IpBucketedSort(bigArrays(), sortOrder, bucketSize);
}
@Override
protected BytesRef randomValue() {
return new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean())));
}
@Override
protected List<BytesRef> threeSortedValues() {
return List.of(
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::"))),
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("127.0.0.1"))),
new BytesRef(InetAddressPoint.encode(InetAddresses.forString("9999::")))
);
}
@Override
protected void collect(IpBucketedSort sort, BytesRef value, int bucket) {
sort.collect(value, bucket);
}
@Override
protected void merge(IpBucketedSort sort, int groupId, IpBucketedSort other, int otherGroupId) {
sort.merge(groupId, other, otherGroupId);
}
@Override
protected Block toBlock(IpBucketedSort sort, BlockFactory blockFactory, IntVector selected) {
return sort.toBlock(blockFactory, selected);
}
@Override
protected void assertBlockTypeAndValues(Block block, List<BytesRef> values) {
assertThat(block.elementType(), equalTo(ElementType.BYTES_REF));
var typedBlock = (BytesRefBlock) block;
var scratch = new BytesRef();
for (int i = 0; i < values.size(); i++) {
assertThat("expected value on block position " + i, typedBlock.getBytesRef(i, scratch), equalTo(values.get(i)));
}
}
}
|
IpBucketedSortTests
|
java
|
quarkusio__quarkus
|
extensions/elytron-security-jdbc/deployment/src/test/java/io/quarkus/elytron/security/jdbc/TestApplication.java
|
{
"start": 235,
"end": 313
}
|
class ____ extends Application {
// intentionally left empty
}
|
TestApplication
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/type/PostgreSQLJsonArrayPGObjectJsonJdbcTypeConstructor.java
|
{
"start": 549,
"end": 1258
}
|
class ____ implements JdbcTypeConstructor {
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<?> elementType,
ColumnTypeInformation columnTypeInformation) {
return resolveType( typeConfiguration, dialect, elementType.getJdbcType(), columnTypeInformation );
}
@Override
public JdbcType resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
JdbcType elementType,
ColumnTypeInformation columnTypeInformation) {
return new PostgreSQLJsonArrayPGObjectType( elementType, false );
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON_ARRAY;
}
}
|
PostgreSQLJsonArrayPGObjectJsonJdbcTypeConstructor
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/test/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActorOversizedResponseMessageTest.java
|
{
"start": 6737,
"end": 6905
}
|
interface ____ extends RpcGateway {
CompletableFuture<String> messageAsync();
String messageSync() throws RpcException;
}
static
|
MessageRpcGateway
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/stream/MiniBatchStreamingJoinOperator.java
|
{
"start": 15396,
"end": 16345
}
|
class ____
extends MiniBatchStreamingJoinOperator {
public MiniBatchLeftOuterJoinStreamOperator(MiniBatchStreamingJoinParameter parameter) {
super(parameter);
}
@Override
protected void processBundles(BufferBundle<?> leftBuffer, BufferBundle<?> rightBuffer)
throws Exception {
// more efficient to process right first for left out join, i.e, some retractions can be
// avoided for timing-dependent left and right stream
// process right
this.processSingleSideBundles(
rightBuffer, rightRecordStateView, leftRecordStateView, false);
// process left
this.processSingleSideBundles(
leftBuffer, leftRecordStateView, rightRecordStateView, true);
}
}
/** MiniBatch Right outer join operator. */
private static final
|
MiniBatchLeftOuterJoinStreamOperator
|
java
|
google__truth
|
core/src/main/java/com/google/common/truth/ActualValueInference.java
|
{
"start": 3368,
"end": 5744
}
|
class ____ method visitors really need.
// TODO(cpovirk): Log a warning?
return null;
}
ClassLoader loader =
firstNonNull(
currentThread().getContextClassLoader(), ActualValueInference.class.getClassLoader());
/*
* We're assuming that classes were loaded in a simple way. In principle, we could do better
* with java.lang.instrument.
*/
InputStream stream = null;
try {
stream = loader.getResourceAsStream(className.replace('.', '/') + ".class");
// TODO(cpovirk): Disable inference if the bytecode version is newer than we've tested on?
new ClassReader(stream).accept(visitor, /* parsingOptions= */ 0);
ImmutableSet<StackEntry> actualsAtLine = visitor.actualValueAtLine.build().get(lineNumber);
/*
* It's very unlikely that more than one assertion would happen on the same line _but with
* different root actual values_.
*
* That is, it's common to have:
* assertThat(list).containsExactly(...).inOrder();
*
* But it's not common to have, all on one line:
* assertThat(list).isEmpty(); assertThat(list2).containsExactly(...);
*
* In principle, we could try to distinguish further by looking at what assertion method
* failed (which our caller could pass us by looking higher on the stack). But it's hard to
* imagine that it would be worthwhile.
*/
return actualsAtLine.size() == 1 ? getOnlyElement(actualsAtLine).description() : null;
} catch (IOException e) {
/*
* Likely "Class not found," perhaps from generated bytecode (or from StackTraceCleaner's
* pseudo-frames, which ideally ActualValueInference would tell it not to create).
*/
// TODO(cpovirk): Log a warning?
return null;
} catch (SecurityException e) {
// Inside Google, some tests run under a security manager that forbids filesystem access.
// TODO(cpovirk): Log a warning?
return null;
} finally {
closeQuietly(stream);
}
}
/**
* An entry on the stack (or the local-variable table) with a {@linkplain InferredType type} and
* sometimes a description of {@linkplain DescribedEntry how the value was produced} or, as a
* special case, whether {@linkplain SubjectEntry the value is a Truth subject}.
*/
abstract static
|
and
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/CharacterTrait.java
|
{
"start": 201,
"end": 286
}
|
enum ____ {
GENTLE,
NORMAL,
AGGRESSIVE,
ATTENTIVE,
VIOLENT,
CRAFTY
}
|
CharacterTrait
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/aot/hint/predicate/ReflectionHintsPredicatesTests.java
|
{
"start": 15352,
"end": 17952
}
|
class ____ {
@Test
void shouldFailForMissingField() {
assertThatIllegalArgumentException().isThrownBy(() -> reflection.onField(SampleClass.class, "missingField"));
}
@Test
void shouldFailForUnknownClass() {
assertThatThrownBy(() -> reflection.onFieldAccess("com.example.DoesNotExist", "missingField"))
.isInstanceOf(ClassNotFoundException.class);
}
@Test
void publicFieldAccessMatchesFieldHint() {
runtimeHints.reflection().registerType(SampleClass.class, typeHint -> typeHint.withField("publicField"));
assertPredicateMatches(reflection.onField(SampleClass.class, "publicField"));
}
@Test
void publicFieldAccessMatchesPublicFieldsHint() {
runtimeHints.reflection().registerType(SampleClass.class, MemberCategory.PUBLIC_FIELDS);
assertPredicateMatches(reflection.onField(SampleClass.class, "publicField"));
}
@Test
void publicFieldAccessMatchesAccessPublicFieldsHint() {
runtimeHints.reflection().registerType(SampleClass.class, MemberCategory.ACCESS_PUBLIC_FIELDS);
assertPredicateMatches(reflection.onField(SampleClass.class, "publicField"));
}
@Test
void fieldAccessDoesNotMatchTypeHint() {
runtimeHints.reflection().registerType(SampleClass.class);
assertPredicateDoesNotMatch(reflection.onField(SampleClass.class, "publicField"));
}
@Test
void privateFieldAccessDoesNotMatchTypeHint() {
runtimeHints.reflection().registerType(SampleClass.class);
assertPredicateDoesNotMatch(reflection.onField(SampleClass.class, "privateField"));
}
@Test
void privateFieldAccessMatchesFieldHint() {
runtimeHints.reflection().registerType(SampleClass.class, typeHint -> typeHint.withField("privateField"));
assertPredicateMatches(reflection.onField(SampleClass.class, "privateField"));
}
@Test
void privateFieldAccessMatchesDeclaredFieldsHint() {
runtimeHints.reflection().registerType(SampleClass.class, MemberCategory.DECLARED_FIELDS);
assertPredicateMatches(reflection.onField(SampleClass.class, "privateField"));
}
@Test
void privateFieldAccessMatchesAccessDeclaredFieldsHint() {
runtimeHints.reflection().registerType(SampleClass.class, MemberCategory.ACCESS_DECLARED_FIELDS);
assertPredicateMatches(reflection.onField(SampleClass.class, "privateField"));
}
}
private void assertPredicateMatches(Predicate<RuntimeHints> predicate) {
assertThat(predicate).accepts(this.runtimeHints);
}
private void assertPredicateDoesNotMatch(Predicate<RuntimeHints> predicate) {
assertThat(predicate).rejects(this.runtimeHints);
}
@SuppressWarnings("unused")
static
|
ReflectionOnField
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/context/processor_legacy/SomeBean.java
|
{
"start": 861,
"end": 925
}
|
class ____ {
@Executable
void method() {
}
}
|
SomeBean
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/async/impl/TestAMRMClientAsync.java
|
{
"start": 21091,
"end": 22338
}
|
class ____
extends AMRMClientAsync.AbstractCallbackHandler {
Object notifier = new Object();
@SuppressWarnings("rawtypes")
AMRMClientAsync asynClient;
boolean stop = true;
volatile boolean notify = false;
boolean throwOutException = false;
@Override
public void onContainersCompleted(List<ContainerStatus> statuses) {
if (throwOutException) {
throw new YarnRuntimeException("Exception from callback handler");
}
}
@Override
public void onContainersAllocated(List<Container> containers) {}
@Override
public void onContainersUpdated(
List<UpdatedContainer> containers) {}
@Override
public void onShutdownRequest() {}
@Override
public void onNodesUpdated(List<NodeReport> updatedNodes) {}
@Override
public float getProgress() {
callStopAndNotify();
return 0;
}
@Override
public void onError(Throwable e) {
assertThat(e).hasMessage("Exception from callback handler");
callStopAndNotify();
}
void callStopAndNotify() {
if(stop) {
asynClient.stop();
}
notify = true;
synchronized (notifier) {
notifier.notifyAll();
}
}
}
}
|
TestCallbackHandler2
|
java
|
google__guice
|
core/test/com/googlecode/guice/bundle/OSGiTestActivator.java
|
{
"start": 9620,
"end": 9902
}
|
class ____ extends AbstractModule {
@Override
protected void configure() {
bindInterceptor(
new AbstractMatcher<Class<?>>() {
@Override
public boolean matches(Class<?> clazz) {
try {
// the
|
InterceptorModule
|
java
|
apache__camel
|
components/camel-dynamic-router/src/test/java/org/apache/camel/component/dynamicrouter/integration/DynamicRouterTwoRoutesIT.java
|
{
"start": 2047,
"end": 4838
}
|
class ____ {
@Autowired
CamelContext camelContext;
@EndpointInject("mock:one")
MockEndpoint mockOne;
@EndpointInject("mock:two")
MockEndpoint mockTwo;
@Produce("direct:start1")
ProducerTemplate start1;
@Produce("direct:start2")
ProducerTemplate start2;
@Produce(CONTROL_CHANNEL_URI + ":" + CONTROL_ACTION_SUBSCRIBE)
ProducerTemplate subscribe;
/**
* This test demonstrates how two different Dynamic Router channels are, indeed, separate. We send both routing
* channels ("test1" and "test2") the same content. Because the "test1" channel has a predicate that accepts message
* bodies with even numbers, and the "test2" channel has a predicate that accepts message bodies with odd numbers,
* the expected message bodies are received correctly by the subscribing participants on both channels.
*
* @throws InterruptedException if interrupted while waiting for mocks to be satisfied
*/
@Test
void testConsumersWithNonConflictingRules() throws InterruptedException {
mockOne.expectedBodiesReceived(0, 2, 4, 6, 8, 10);
mockTwo.expectedBodiesReceived(1, 3, 5, 7, 9);
// Create a subscription that accepts an exchange when the message body contains an even number
// The destination URI is for the endpoint "mockOne"
Predicate evenPredicate = body().regex("^\\d*[02468]$");
subscribe.sendBodyAndHeaders("direct:subscribe-no-url-predicate", evenPredicate,
Map.of("controlAction", "subscribe",
"subscribeChannel", "test1",
"subscriptionId", "evenNumberSubscription",
"destinationUri", mockOne.getEndpointUri(),
"priority", 2));
// Create a subscription that accepts an exchange when the message body contains an odd number
// The destination URI is for the endpoint "mockTwo"
Predicate oddPredicate = body().regex("^\\d*[13579]$");
subscribe.sendBodyAndHeaders("direct:subscribe-no-url-predicate", oddPredicate,
Map.of("controlAction", "subscribe",
"subscribeChannel", "test2",
"subscriptionId", "oddNumberSubscription",
"destinationUri", mockTwo.getEndpointUri(),
"priority", 2));
// Send both channels the same content: numbers from 0 to 10, inclusive
IntStream.rangeClosed(0, 10).forEach(n -> {
start1.sendBody(String.valueOf(n));
start2.sendBody(String.valueOf(n));
});
// Verify that both mocks received the expected messages
MockEndpoint.assertIsSatisfied(camelContext, 2, TimeUnit.SECONDS);
}
}
|
DynamicRouterTwoRoutesIT
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/integration/ITestDirectoryCommitProtocol.java
|
{
"start": 4087,
"end": 6017
}
|
class ____ extends
DirectoryStagingCommitter implements CommitterFaultInjection {
private final CommitterFaultInjectionImpl injection;
CommitterWithFailedThenSucceed(Path outputPath,
TaskAttemptContext context) throws IOException {
super(outputPath, context);
injection = new CommitterFaultInjectionImpl(outputPath, context, true);
}
@Override
public void setupJob(JobContext context) throws IOException {
injection.setupJob(context);
super.setupJob(context);
}
@Override
public void abortJob(JobContext context, JobStatus.State state)
throws IOException {
injection.abortJob(context, state);
super.abortJob(context, state);
}
@Override
@SuppressWarnings("deprecation")
public void cleanupJob(JobContext context) throws IOException {
injection.cleanupJob(context);
super.cleanupJob(context);
}
@Override
public void setupTask(TaskAttemptContext context) throws IOException {
injection.setupTask(context);
super.setupTask(context);
}
@Override
public void commitTask(TaskAttemptContext context) throws IOException {
injection.commitTask(context);
super.commitTask(context);
}
@Override
public void abortTask(TaskAttemptContext context) throws IOException {
injection.abortTask(context);
super.abortTask(context);
}
@Override
public void commitJob(JobContext context) throws IOException {
injection.commitJob(context);
super.commitJob(context);
}
@Override
public boolean needsTaskCommit(TaskAttemptContext context)
throws IOException {
injection.needsTaskCommit(context);
return super.needsTaskCommit(context);
}
@Override
public void setFaults(CommitterFaultInjection.Faults... faults) {
injection.setFaults(faults);
}
}
}
|
CommitterWithFailedThenSucceed
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java
|
{
"start": 1836,
"end": 4524
}
|
class ____ extends AcknowledgedRequest<Request> {
public static final String MODEL_ALIAS = "model_alias";
public static final String REASSIGN = "reassign";
private final String modelAlias;
private final String modelId;
private final boolean reassign;
public Request(String modelAlias, String modelId, boolean reassign) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS);
this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID);
this.reassign = reassign;
}
public Request(StreamInput in) throws IOException {
super(in);
this.modelAlias = in.readString();
this.modelId = in.readString();
this.reassign = in.readBoolean();
}
public String getModelAlias() {
return modelAlias;
}
public String getModelId() {
return modelId;
}
public boolean isReassign() {
return reassign;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(modelAlias);
out.writeString(modelId);
out.writeBoolean(reassign);
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (modelAlias.equals(modelId)) {
validationException = addValidationError(
String.format(Locale.ROOT, "model_alias [%s] cannot equal model_id [%s]", modelAlias, modelId),
validationException
);
}
if (VALID_MODEL_ALIAS_CHAR_PATTERN.matcher(modelAlias).matches() == false) {
validationException = addValidationError(Messages.getMessage(INVALID_MODEL_ALIAS, modelAlias), validationException);
}
return validationException;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(modelAlias, request.modelAlias)
&& Objects.equals(modelId, request.modelId)
&& Objects.equals(reassign, request.reassign);
}
@Override
public int hashCode() {
return Objects.hash(modelAlias, modelId, reassign);
}
}
}
|
Request
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java
|
{
"start": 702,
"end": 1445
}
|
class ____ extends InternalStatsTests {
@Override
protected InternalStatsBucket createInstance(
String name,
long count,
double sum,
double min,
double max,
DocValueFormat formatter,
Map<String, Object> metadata
) {
return new InternalStatsBucket(name, count, sum, min, max, formatter, metadata);
}
@Override
public void testReduceRandom() {
expectThrows(UnsupportedOperationException.class, () -> createTestInstance("name", null).getReducer(null, 0));
}
@Override
protected void assertReduced(InternalStats reduced, List<InternalStats> inputs) {
// no test since reduce operation is unsupported
}
}
|
InternalStatsBucketTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/SearchUsageStatsIT.java
|
{
"start": 1226,
"end": 3790
}
|
class ____ extends ESIntegTestCase {
@Override
protected boolean addMockHttpTransport() {
return false; // enable http
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(AsyncSearch.class);
}
public void testSearchUsageStats() throws IOException {
{
SearchUsageStats stats = clusterAdmin().prepareClusterStats().get().getIndicesStats().getSearchUsageStats();
assertEquals(0, stats.getTotalSearchCount());
assertEquals(0, stats.getQueryUsage().size());
assertEquals(0, stats.getSectionsUsage().size());
}
{
Request request = new Request("POST", "/_async_search");
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.matchQuery("field", "value"))
.aggregation(new TermsAggregationBuilder("name").field("field"));
request.setJsonEntity(Strings.toString(searchSourceBuilder));
getRestClient().performRequest(request);
}
{
Request request = new Request("POST", "/_async_search");
// error at parsing: request not counted
request.setJsonEntity("{\"unknown]\":10}");
expectThrows(ResponseException.class, () -> getRestClient().performRequest(request));
}
{
// non existent index: request counted
Request request = new Request("POST", "/unknown/_async_search");
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(QueryBuilders.termQuery("field", "value"));
request.setJsonEntity(Strings.toString(searchSourceBuilder));
ResponseException responseException = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request));
assertEquals(404, responseException.getResponse().getStatusLine().getStatusCode());
}
SearchUsageStats stats = clusterAdmin().prepareClusterStats().get().getIndicesStats().getSearchUsageStats();
assertEquals(2, stats.getTotalSearchCount());
assertEquals(2, stats.getQueryUsage().size());
assertEquals(1, stats.getQueryUsage().get("match").longValue());
assertEquals(1, stats.getQueryUsage().get("term").longValue());
assertEquals(2, stats.getSectionsUsage().size());
assertEquals(2, stats.getSectionsUsage().get("query").longValue());
assertEquals(1, stats.getSectionsUsage().get("aggs").longValue());
}
}
|
SearchUsageStatsIT
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/geo/GeoBoundingBoxTests.java
|
{
"start": 1046,
"end": 7486
}
|
class ____ extends ESTestCase {
public void testInvalidParseInvalidWKT() throws IOException {
XContentBuilder bboxBuilder = XContentFactory.jsonBuilder().startObject().field("wkt", "invalid").endObject();
try (XContentParser parser = createParser(bboxBuilder)) {
parser.nextToken();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> GeoBoundingBox.parseBoundingBox(parser));
assertThat(e.getMessage(), equalTo("failed to parse WKT bounding box"));
}
}
public void testInvalidParsePoint() throws IOException {
XContentBuilder bboxBuilder = XContentFactory.jsonBuilder().startObject().field("wkt", "POINT (100.0 100.0)").endObject();
XContentParser parser = createParser(bboxBuilder);
parser.nextToken();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> GeoBoundingBox.parseBoundingBox(parser));
assertThat(e.getMessage(), equalTo("failed to parse WKT bounding box. [POINT] found. expected [ENVELOPE]"));
}
public void testWKT() throws IOException {
GeoBoundingBox geoBoundingBox = randomBBox();
assertBBox(geoBoundingBox, XContentFactory.jsonBuilder().startObject().field("wkt", geoBoundingBox.toString()).endObject());
}
public void testTopBottomLeftRight() throws Exception {
GeoBoundingBox geoBoundingBox = randomBBox();
assertBBox(
geoBoundingBox,
XContentFactory.jsonBuilder()
.startObject()
.field("top", geoBoundingBox.top())
.field("bottom", geoBoundingBox.bottom())
.field("left", geoBoundingBox.left())
.field("right", geoBoundingBox.right())
.endObject()
);
}
public void testTopLeftBottomRight() throws Exception {
GeoBoundingBox geoBoundingBox = randomBBox();
assertBBox(
geoBoundingBox,
XContentFactory.jsonBuilder()
.startObject()
.field("top_left", geoBoundingBox.topLeft())
.field("bottom_right", geoBoundingBox.bottomRight())
.endObject()
);
}
public void testTopRightBottomLeft() throws Exception {
GeoBoundingBox geoBoundingBox = randomBBox();
assertBBox(
geoBoundingBox,
XContentFactory.jsonBuilder()
.startObject()
.field("top_right", new GeoPoint(geoBoundingBox.top(), geoBoundingBox.right()))
.field("bottom_left", new GeoPoint(geoBoundingBox.bottom(), geoBoundingBox.left()))
.endObject()
);
}
// test that no exception is thrown. BBOX parsing is not validated
public void testNullTopBottomLeftRight() throws Exception {
GeoBoundingBox geoBoundingBox = randomBBox();
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
for (String field : randomSubsetOf(List.of("top", "bottom", "left", "right"))) {
switch (field) {
case "top" -> builder.field("top", geoBoundingBox.top());
case "bottom" -> builder.field("bottom", geoBoundingBox.bottom());
case "left" -> builder.field("left", geoBoundingBox.left());
case "right" -> builder.field("right", geoBoundingBox.right());
default -> throw new IllegalStateException("unexpected branching");
}
}
builder.endObject();
try (XContentParser parser = createParser(builder)) {
parser.nextToken();
GeoBoundingBox.parseBoundingBox(parser);
}
}
public void testPointInBounds() {
for (int iter = 0; iter < 1000; iter++) {
GeoBoundingBox geoBoundingBox = randomBBox();
GeoBoundingBox bbox = new GeoBoundingBox(
new GeoPoint(quantizeLat(geoBoundingBox.top()), quantizeLon(geoBoundingBox.left())),
new GeoPoint(quantizeLat(geoBoundingBox.bottom()), quantizeLon(geoBoundingBox.right()))
);
if (bbox.left() > bbox.right()) {
double lonWithin = randomBoolean()
? randomDoubleBetween(bbox.left(), 180.0, true)
: randomDoubleBetween(-180.0, bbox.right(), true);
double latWithin = randomDoubleBetween(bbox.bottom(), bbox.top(), true);
double lonOutside = randomDoubleBetween(bbox.left(), bbox.right(), true);
double latOutside = randomBoolean()
? randomDoubleBetween(Math.max(bbox.top(), bbox.bottom()), 90, false)
: randomDoubleBetween(-90, Math.min(bbox.bottom(), bbox.top()), false);
assertTrue(bbox.pointInBounds(lonWithin, latWithin));
assertFalse(bbox.pointInBounds(lonOutside, latOutside));
} else {
double lonWithin = randomDoubleBetween(bbox.left(), bbox.right(), true);
double latWithin = randomDoubleBetween(bbox.bottom(), bbox.top(), true);
double lonOutside = GeoUtils.normalizeLon(randomDoubleBetween(bbox.right(), 180, false));
double latOutside = GeoUtils.normalizeLat(randomDoubleBetween(bbox.top(), 90, false));
assertTrue(bbox.pointInBounds(lonWithin, latWithin));
assertFalse(bbox.pointInBounds(lonOutside, latOutside));
}
}
}
private void assertBBox(GeoBoundingBox expected, XContentBuilder builder) throws IOException {
try (XContentParser parser = createParser(builder)) {
parser.nextToken();
assertThat(GeoBoundingBox.parseBoundingBox(parser), equalTo(expected));
}
}
public static GeoBoundingBox randomBBox() {
Rectangle rectangle = GeometryTestUtils.randomRectangle();
return new GeoBoundingBox(
new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()),
new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon())
);
}
private static double quantizeLat(double lat) {
return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat));
}
private static double quantizeLon(double lon) {
return GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon));
}
}
|
GeoBoundingBoxTests
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/jdk8/MaybeMapOptional.java
|
{
"start": 1631,
"end": 3355
}
|
class ____<T, R> implements MaybeObserver<T>, Disposable {
final MaybeObserver<? super R> downstream;
final Function<? super T, Optional<? extends R>> mapper;
Disposable upstream;
MapOptionalMaybeObserver(MaybeObserver<? super R> downstream, Function<? super T, Optional<? extends R>> mapper) {
this.downstream = downstream;
this.mapper = mapper;
}
@Override
public void dispose() {
Disposable d = this.upstream;
this.upstream = DisposableHelper.DISPOSED;
d.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T value) {
Optional<? extends R> v;
try {
v = Objects.requireNonNull(mapper.apply(value), "The mapper returned a null item");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
downstream.onError(ex);
return;
}
if (v.isPresent()) {
downstream.onSuccess(v.get());
} else {
downstream.onComplete();
}
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
}
@Override
public void onComplete() {
downstream.onComplete();
}
}
}
|
MapOptionalMaybeObserver
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestRehashPartitioner.java
|
{
"start": 1146,
"end": 2730
}
|
class ____ {
/** number of partitions */
private static final int PARTITIONS = 32;
/** step in sequence */
private static final int STEP = 3;
/** end of test sequence */
private static final int END = 100000;
/** maximum error for considering too big/small bucket */
private static final double MAX_ERROR = 0.20;
/** maximum number of oddly sized buckets */
private static final double MAX_BADBUCKETS = 0.10;
/** test partitioner for patterns */
@Test
public void testPatterns() {
int results[] = new int[PARTITIONS];
RehashPartitioner <IntWritable, NullWritable> p = new RehashPartitioner < IntWritable, NullWritable> ();
/* test sequence 4, 8, 12, ... 128 */
for(int i = 0; i < END; i+= STEP) {
results[p.getPartition(new IntWritable(i), null, PARTITIONS)]++;
}
int badbuckets = 0;
Integer min = Collections.min(Arrays.asList(ArrayUtils.toObject(results)));
Integer max = Collections.max(Arrays.asList(ArrayUtils.toObject(results)));
Integer avg = (int) Math.round((max+min)/2.0);
System.out.println("Dumping buckets distribution: min="+min+" avg="+avg+" max="+max);
for (int i = 0; i < PARTITIONS; i++) {
double var = (results[i]-avg)/(double)(avg);
System.out.println("bucket "+i+" "+results[i]+" items, variance "+var);
if (Math.abs(var) > MAX_ERROR)
badbuckets++;
}
System.out.println(badbuckets + " of "+PARTITIONS+" are too small or large buckets");
assertTrue(badbuckets < PARTITIONS * MAX_BADBUCKETS, "too many overflow buckets");
}
}
|
TestRehashPartitioner
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/aop/lifecycle/ProductBean.java
|
{
"start": 418,
"end": 460
}
|
interface ____ {
}
// end::class[]
|
ProductBean
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/annotation/repeatable/SomeOther.java
|
{
"start": 712,
"end": 776
}
|
interface ____ {
Property[] properties() default {};
}
|
SomeOther
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/AltibaseSqlAstTranslator.java
|
{
"start": 1628,
"end": 6898
}
|
class ____<T extends JdbcOperation> extends AbstractSqlAstTranslator<T> {
public AltibaseSqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement) {
super( sessionFactory, statement );
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {
// Use limit because Altibase does not support fetch first rows only.
renderCombinedLimitClause( queryPart );
}
}
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
// Altibase does not support is distinct from clause. So override renderComparion() and use not exists
renderComparisonEmulateIntersect( lhs, operator, rhs );
}
@Override
public void visitOver(Over<?> over) {
final Expression expression = over.getExpression();
if ( expression instanceof FunctionExpression functionExpression
&& "row_number".equals( functionExpression.getFunctionName() ) ) {
if ( over.getPartitions().isEmpty() && over.getOrderList().isEmpty()
&& over.getStartKind() == FrameKind.UNBOUNDED_PRECEDING
&& over.getEndKind() == FrameKind.CURRENT_ROW
&& over.getExclusion() == FrameExclusion.NO_OTHERS ) {
// Altibase doesn't allow an empty over clause for the row_number() function,
append( "row_number() over(order by 1)" );
return;
}
}
super.visitOver( over );
}
@Override
public void visitQuerySpec(QuerySpec querySpec) {
if ( shouldEmulateFetchClause( querySpec ) ) {
// Altibase does not support row_with_ties
emulateFetchOffsetWithWindowFunctions( querySpec, true );
}
else {
super.visitQuerySpec( querySpec );
}
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Check if current query part is already row numbering to avoid infinite recursion
return useOffsetFetchClause( queryPart ) && getQueryPartForRowNumbering() != queryPart
&& getDialect().supportsWindowFunctions() && !isRowsOnlyFetchClauseType( queryPart );
}
@Override
protected void renderPartitionItem(Expression expression) {
if ( expression instanceof Literal ) {
appendSql( "'0' || '0'" );
}
else if ( expression instanceof Summarization ) {
// This could theoretically be emulated by rendering all grouping variations of the query and
// connect them via union all but that's probably pretty inefficient and would have to happen
// on the query spec level
throw new UnsupportedOperationException( "Summarization is not supported by DBMS!" );
}
else {
expression.accept( this );
}
}
@Override
protected void renderOffsetExpression(Expression offsetExpression) {
// Altibase offset starts from 1
appendSql( "1+" );
offsetExpression.accept( this );
}
@Override
public void visitValuesTableReference(ValuesTableReference tableReference) {
emulateValuesTableReferenceColumnAliasing( tableReference );
}
@Override
protected void visitInsertStatementOnly(InsertSelectStatement statement) {
if ( statement.getConflictClause() == null || statement.getConflictClause().isDoNothing() ) {
// Render plain insert statement and possibly run into unique constraint violation
super.visitInsertStatementOnly( statement );
}
else {
visitInsertStatementEmulateMerge( statement );
}
}
@Override
protected void renderMergeUpdateClause(List<Assignment> assignments, Predicate wherePredicate) {
// In Altibase, where condition in merge can be placed next to the set clause."
appendSql( " then update" );
renderSetClause( assignments );
visitWhereClause( wherePredicate );
}
@Override
protected void renderDeleteClause(DeleteStatement statement) {
appendSql( "delete" );
final Stack<Clause> clauseStack = getClauseStack();
try {
clauseStack.push( Clause.DELETE );
renderTableReferenceIdentificationVariable( statement.getTargetTable() );
if ( statement.getFromClause().getRoots().isEmpty() ) {
appendSql( " from " );
renderDmlTargetTableExpression( statement.getTargetTable() );
}
else {
visitFromClause( statement.getFromClause() );
}
}
finally {
clauseStack.pop();
}
}
@Override
protected void renderDmlTargetTableExpression(NamedTableReference tableReference) {
super.renderDmlTargetTableExpression( tableReference );
if ( getClauseStack().getCurrent() != Clause.INSERT ) {
renderTableReferenceIdentificationVariable( tableReference );
}
}
@Override
protected void renderUpdateClause(UpdateStatement updateStatement) {
if ( updateStatement.getFromClause().getRoots().isEmpty() ) {
super.renderUpdateClause( updateStatement );
}
else {
appendSql( "update " );
renderFromClauseSpaces( updateStatement.getFromClause() );
}
}
@Override
public void visitBinaryArithmeticExpression(BinaryArithmeticExpression arithmeticExpression) {
if ( isIntegerDivisionEmulationRequired( arithmeticExpression ) ) {
appendSql( "floor" );
}
super.visitBinaryArithmeticExpression(arithmeticExpression);
}
@Override
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
@Override
protected boolean needsRecursiveKeywordInWithClause() {
return false;
}
}
|
AltibaseSqlAstTranslator
|
java
|
apache__thrift
|
lib/java/src/main/java/org/apache/thrift/protocol/TSimpleJSONProtocol.java
|
{
"start": 2573,
"end": 2918
}
|
class ____ extends Context {
protected boolean first_ = true;
protected boolean colon_ = true;
protected void write() throws TException {
if (first_) {
first_ = false;
colon_ = true;
} else {
trans_.write(colon_ ? COLON : COMMA);
colon_ = !colon_;
}
}
}
protected
|
StructContext
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java
|
{
"start": 1643,
"end": 3464
}
|
class ____ extends EditLogInputStream {
public static final Logger LOG = LoggerFactory.getLogger(
RedundantEditLogInputStream.class.getName());
private int curIdx;
private long prevTxId;
private final EditLogInputStream[] streams;
/** Limit logging about fast forwarding the stream to every 5 seconds max. */
private static final long FAST_FORWARD_LOGGING_INTERVAL_MS = 5000;
private static final LogThrottlingHelper FAST_FORWARD_LOGGING_HELPER =
new LogThrottlingHelper(FAST_FORWARD_LOGGING_INTERVAL_MS);
/**
* States that the RedundantEditLogInputStream can be in.
*
* <pre>
* start (if no streams)
* |
* V
* PrematureEOFException +----------------+
* +-------------->| EOF |<--------------+
* | +----------------+ |
* | |
* | start (if there are streams) |
* | | |
* | V | EOF
* | resync +----------------+ skipUntil +---------+
* | +---------->| SKIP_UNTIL |----------->| OK |
* | | +----------------+ +---------+
* | | | IOE ^ fail over to | IOE
* | | V | next stream |
* +----------------------+ +----------------+ |
* | STREAM_FAILED_RESYNC | | STREAM_FAILED |<----------+
* +----------------------+ +----------------+
* ^ Recovery mode |
* +--------------------+
* </pre>
*/
static private
|
RedundantEditLogInputStream
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/arcInvContext/ArcInvocationContextTest.java
|
{
"start": 454,
"end": 1944
}
|
class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(Foo.class, SomeBinding.class,
ArcContextInterceptor.class, ArcContextInterceptorPrivate.class, ArcContextLifecycleInterceptor.class,
ArcContextLifecycleInterceptorPrivate.class);
@Test
public void testArcContextCanBeUsedAsParam() {
ArcContainer arc = Arc.container();
Assertions.assertFalse(ArcContextLifecycleInterceptor.POST_CONSTRUCT_INVOKED);
Assertions.assertFalse(ArcContextLifecycleInterceptor.PRE_DESTROY_INVOKED);
Assertions.assertFalse(ArcContextLifecycleInterceptorPrivate.POST_CONSTRUCT_INVOKED);
Assertions.assertFalse(ArcContextLifecycleInterceptorPrivate.PRE_DESTROY_INVOKED);
InstanceHandle<Foo> handle = arc.instance(Foo.class);
Foo bean = handle.get();
String expected = Foo.class.getSimpleName() + ArcContextInterceptorPrivate.class.getSimpleName()
+ ArcContextInterceptor.class.getSimpleName();
Assertions.assertEquals(expected, bean.ping());
Assertions.assertTrue(ArcContextLifecycleInterceptor.POST_CONSTRUCT_INVOKED);
Assertions.assertTrue(ArcContextLifecycleInterceptorPrivate.POST_CONSTRUCT_INVOKED);
handle.destroy();
Assertions.assertTrue(ArcContextLifecycleInterceptor.PRE_DESTROY_INVOKED);
Assertions.assertTrue(ArcContextLifecycleInterceptorPrivate.PRE_DESTROY_INVOKED);
}
}
|
ArcInvocationContextTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringAiToolsEndpointBuilderFactory.java
|
{
"start": 7652,
"end": 13533
}
|
interface ____
extends
EndpointConsumerBuilder {
default SpringAiToolsEndpointConsumerBuilder basic() {
return (SpringAiToolsEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedSpringAiToolsEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the Spring AI Tools component.
*/
public
|
AdvancedSpringAiToolsEndpointConsumerBuilder
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/inheritfromconfig/BaseVehicleEntity.java
|
{
"start": 248,
"end": 664
}
|
class ____ {
private long primaryKey;
private String auditTrail;
public long getPrimaryKey() {
return primaryKey;
}
public void setPrimaryKey(long primaryKey) {
this.primaryKey = primaryKey;
}
public String getAuditTrail() {
return auditTrail;
}
public void setAuditTrail(String auditTrail) {
this.auditTrail = auditTrail;
}
}
|
BaseVehicleEntity
|
java
|
quarkusio__quarkus
|
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/RemoveOptionalFromSecretEnvSourceDecorator.java
|
{
"start": 580,
"end": 1120
}
|
class ____ extends ApplicationContainerDecorator<SecretEnvSourceFluent> {
@Override
public void andThenVisit(SecretEnvSourceFluent ref) {
ref.withOptional(null);
}
@Override
public Class<? extends Decorator>[] after() {
return new Class[] { AddEnvVarDecorator.class,
AddSecretVolumeDecorator.class, AddSecretVolumeToRevisionDecorator.class,
AddConfigMapVolumeToRevisionDecorator.class, AddConfigMapVolumeDecorator.class };
}
}
|
RemoveOptionalFromSecretEnvSourceDecorator
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/records/RMDelegationTokenIdentifierData.java
|
{
"start": 1279,
"end": 2529
}
|
class ____ {
RMDelegationTokenIdentifierDataProto.Builder builder =
RMDelegationTokenIdentifierDataProto.newBuilder();
public RMDelegationTokenIdentifierData() {}
public RMDelegationTokenIdentifierData(
YARNDelegationTokenIdentifier identifier, long renewdate) {
builder.setTokenIdentifier(identifier.getProto());
builder.setRenewDate(renewdate);
}
public void readFields(DataInput in) throws IOException {
builder.mergeFrom((DataInputStream) in);
}
public byte[] toByteArray() throws IOException {
return builder.build().toByteArray();
}
public RMDelegationTokenIdentifier getTokenIdentifier() throws IOException {
ByteArrayInputStream in =
new ByteArrayInputStream(builder.getTokenIdentifier().toByteArray());
RMDelegationTokenIdentifier identifer = new RMDelegationTokenIdentifier();
identifer.readFields(new DataInputStream(in));
return identifer;
}
public long getRenewDate() {
return builder.getRenewDate();
}
public void setIdentifier(YARNDelegationTokenIdentifier identifier) {
builder.setTokenIdentifier(identifier.getProto());
}
public void setRenewDate(long renewDate) {
builder.setRenewDate(renewDate);
}
}
|
RMDelegationTokenIdentifierData
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/clientrm/PassThroughClientRequestInterceptor.java
|
{
"start": 6569,
"end": 14197
}
|
class ____ extends AbstractClientRequestInterceptor {
@Override
public GetNewApplicationResponse getNewApplication(
GetNewApplicationRequest request) throws YarnException, IOException {
return getNextInterceptor().getNewApplication(request);
}
@Override
public SubmitApplicationResponse submitApplication(
SubmitApplicationRequest request) throws YarnException, IOException {
return getNextInterceptor().submitApplication(request);
}
@Override
public KillApplicationResponse forceKillApplication(
KillApplicationRequest request) throws YarnException, IOException {
return getNextInterceptor().forceKillApplication(request);
}
@Override
public GetClusterMetricsResponse getClusterMetrics(
GetClusterMetricsRequest request) throws YarnException, IOException {
return getNextInterceptor().getClusterMetrics(request);
}
@Override
public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request)
throws YarnException, IOException {
return getNextInterceptor().getClusterNodes(request);
}
@Override
public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request)
throws YarnException, IOException {
return getNextInterceptor().getQueueInfo(request);
}
@Override
public GetQueueUserAclsInfoResponse getQueueUserAcls(
GetQueueUserAclsInfoRequest request) throws YarnException, IOException {
return getNextInterceptor().getQueueUserAcls(request);
}
@Override
public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(
MoveApplicationAcrossQueuesRequest request)
throws YarnException, IOException {
return getNextInterceptor().moveApplicationAcrossQueues(request);
}
@Override
public GetNewReservationResponse getNewReservation(
GetNewReservationRequest request) throws YarnException, IOException {
return getNextInterceptor().getNewReservation(request);
}
@Override
public ReservationSubmissionResponse submitReservation(
ReservationSubmissionRequest request) throws YarnException, IOException {
return getNextInterceptor().submitReservation(request);
}
@Override
public ReservationListResponse listReservations(
ReservationListRequest request) throws YarnException, IOException {
return getNextInterceptor().listReservations(request);
}
@Override
public ReservationUpdateResponse updateReservation(
ReservationUpdateRequest request) throws YarnException, IOException {
return getNextInterceptor().updateReservation(request);
}
@Override
public ReservationDeleteResponse deleteReservation(
ReservationDeleteRequest request) throws YarnException, IOException {
return getNextInterceptor().deleteReservation(request);
}
@Override
public GetNodesToLabelsResponse getNodeToLabels(
GetNodesToLabelsRequest request) throws YarnException, IOException {
return getNextInterceptor().getNodeToLabels(request);
}
@Override
public GetLabelsToNodesResponse getLabelsToNodes(
GetLabelsToNodesRequest request) throws YarnException, IOException {
return getNextInterceptor().getLabelsToNodes(request);
}
@Override
public GetClusterNodeLabelsResponse getClusterNodeLabels(
GetClusterNodeLabelsRequest request) throws YarnException, IOException {
return getNextInterceptor().getClusterNodeLabels(request);
}
@Override
public GetApplicationReportResponse getApplicationReport(
GetApplicationReportRequest request) throws YarnException, IOException {
return getNextInterceptor().getApplicationReport(request);
}
@Override
public GetApplicationsResponse getApplications(GetApplicationsRequest request)
throws YarnException, IOException {
return getNextInterceptor().getApplications(request);
}
@Override
public GetApplicationAttemptReportResponse getApplicationAttemptReport(
GetApplicationAttemptReportRequest request)
throws YarnException, IOException {
return getNextInterceptor().getApplicationAttemptReport(request);
}
@Override
public GetApplicationAttemptsResponse getApplicationAttempts(
GetApplicationAttemptsRequest request) throws YarnException, IOException {
return getNextInterceptor().getApplicationAttempts(request);
}
@Override
public GetContainerReportResponse getContainerReport(
GetContainerReportRequest request) throws YarnException, IOException {
return getNextInterceptor().getContainerReport(request);
}
@Override
public GetContainersResponse getContainers(GetContainersRequest request)
throws YarnException, IOException {
return getNextInterceptor().getContainers(request);
}
@Override
public GetDelegationTokenResponse getDelegationToken(
GetDelegationTokenRequest request) throws YarnException, IOException {
return getNextInterceptor().getDelegationToken(request);
}
@Override
public RenewDelegationTokenResponse renewDelegationToken(
RenewDelegationTokenRequest request) throws YarnException, IOException {
return getNextInterceptor().renewDelegationToken(request);
}
@Override
public CancelDelegationTokenResponse cancelDelegationToken(
CancelDelegationTokenRequest request) throws YarnException, IOException {
return getNextInterceptor().cancelDelegationToken(request);
}
@Override
public FailApplicationAttemptResponse failApplicationAttempt(
FailApplicationAttemptRequest request) throws YarnException, IOException {
return getNextInterceptor().failApplicationAttempt(request);
}
@Override
public UpdateApplicationPriorityResponse updateApplicationPriority(
UpdateApplicationPriorityRequest request)
throws YarnException, IOException {
return getNextInterceptor().updateApplicationPriority(request);
}
@Override
public SignalContainerResponse signalToContainer(
SignalContainerRequest request) throws YarnException, IOException {
return getNextInterceptor().signalToContainer(request);
}
@Override
public UpdateApplicationTimeoutsResponse updateApplicationTimeouts(
UpdateApplicationTimeoutsRequest request)
throws YarnException, IOException {
return getNextInterceptor().updateApplicationTimeouts(request);
}
@Override
public GetAllResourceProfilesResponse getResourceProfiles(
GetAllResourceProfilesRequest request) throws YarnException, IOException {
return getNextInterceptor().getResourceProfiles(request);
}
@Override
public GetResourceProfileResponse getResourceProfile(
GetResourceProfileRequest request) throws YarnException, IOException {
return getNextInterceptor().getResourceProfile(request);
}
@Override
public GetAllResourceTypeInfoResponse getResourceTypeInfo(
GetAllResourceTypeInfoRequest request) throws YarnException, IOException {
return getNextInterceptor().getResourceTypeInfo(request);
}
@Override
public GetAttributesToNodesResponse getAttributesToNodes(
GetAttributesToNodesRequest request) throws YarnException, IOException {
return getNextInterceptor().getAttributesToNodes(request);
}
@Override
public GetClusterNodeAttributesResponse getClusterNodeAttributes(
GetClusterNodeAttributesRequest request)
throws YarnException, IOException {
return getNextInterceptor().getClusterNodeAttributes(request);
}
@Override
public GetNodesToAttributesResponse getNodesToAttributes(
GetNodesToAttributesRequest request) throws YarnException, IOException {
return getNextInterceptor().getNodesToAttributes(request);
}
}
|
PassThroughClientRequestInterceptor
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
|
{
"start": 2767,
"end": 5503
}
|
class ____ {
private static final File base =
GenericTestUtils.getTestDir("work-dir/localfs");
private static final String TEST_ROOT_DIR = base.getAbsolutePath();
private final Path TEST_PATH = new Path(TEST_ROOT_DIR, "test-file");
private Configuration conf;
private LocalFileSystem fileSys;
private void cleanupFile(FileSystem fs, Path name) throws IOException {
assertTrue(fs.exists(name));
fs.delete(name, true);
assertTrue(!fs.exists(name));
}
@BeforeEach
public void setup() throws IOException {
conf = new Configuration(false);
conf.set("fs.file.impl", LocalFileSystem.class.getName());
fileSys = FileSystem.getLocal(conf);
fileSys.delete(new Path(TEST_ROOT_DIR), true);
}
@AfterEach
public void after() throws IOException {
FileUtil.setWritable(base, true);
FileUtil.fullyDelete(base);
assertTrue(!base.exists());
RawLocalFileSystem.useStatIfAvailable();
}
/**
* Test the capability of setting the working directory.
*/
@Test
public void testWorkingDirectory() throws IOException {
Path origDir = fileSys.getWorkingDirectory();
Path subdir = new Path(TEST_ROOT_DIR, "new");
try {
// make sure it doesn't already exist
assertTrue(!fileSys.exists(subdir));
// make it and check for it
assertTrue(fileSys.mkdirs(subdir));
assertTrue(fileSys.isDirectory(subdir));
fileSys.setWorkingDirectory(subdir);
// create a directory and check for it
Path dir1 = new Path("dir1");
assertTrue(fileSys.mkdirs(dir1));
assertTrue(fileSys.isDirectory(dir1));
// delete the directory and make sure it went away
fileSys.delete(dir1, true);
assertTrue(!fileSys.exists(dir1));
// create files and manipulate them.
Path file1 = new Path("file1");
Path file2 = new Path("sub/file2");
String contents = writeFile(fileSys, file1, 1);
fileSys.copyFromLocalFile(file1, file2);
assertTrue(fileSys.exists(file1));
assertTrue(fileSys.isFile(file1));
cleanupFile(fileSys, file2);
fileSys.copyToLocalFile(file1, file2);
cleanupFile(fileSys, file2);
// try a rename
fileSys.rename(file1, file2);
assertTrue(!fileSys.exists(file1));
assertTrue(fileSys.exists(file2));
fileSys.rename(file2, file1);
// try reading a file
InputStream stm = fileSys.open(file1);
byte[] buffer = new byte[3];
int bytesRead = stm.read(buffer, 0, 3);
assertEquals(contents, new String(buffer, 0, bytesRead));
stm.close();
} finally {
fileSys.setWorkingDirectory(origDir);
}
}
/**
* test Syncable
|
TestLocalFileSystem
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/bounds/SourceTypeIsBoundedTypeVarMapper.java
|
{
"start": 1167,
"end": 1568
}
|
class ____ {
private Long prop1;
private Nested prop2;
public Long getProp1() {
return prop1;
}
public void setProp1(Long prop1) {
this.prop1 = prop1;
}
public Nested getProp2() {
return prop2;
}
public void setProp2(Nested prop2) {
this.prop2 = prop2;
}
}
|
Target
|
java
|
apache__camel
|
components/camel-xmlsecurity/src/main/java/org/apache/camel/component/xmlsecurity/processor/XmlSignerProcessor.java
|
{
"start": 7902,
"end": 11913
}
|
class ____ extends XmlSignatureProcessor {
private static final Logger LOG = LoggerFactory.getLogger(XmlSignerProcessor.class);
private static final String SHA512 = "sha512";
private static final String SHA384 = "sha384";
private static final String SHA256 = "sha256";
private static final String SHA224 = "sha224";
private static final String SHA1 = "sha1";
private static final String RIPEMD160 = "ripemd160";
private static final String HTTP_WWW_W3_ORG_2001_04_XMLDSIG_MORE_SHA224 = "http://www.w3.org/2001/04/xmldsig-more#sha224"; // see RFC 4051
private static final String HTTP_WWW_W3_ORG_2001_04_XMLDSIG_MORE_SHA384 = "http://www.w3.org/2001/04/xmldsig-more#sha384"; // see RFC 4051
private final XmlSignerConfiguration config;
public XmlSignerProcessor(CamelContext context, XmlSignerConfiguration config) {
super(context);
this.config = config;
}
@Override
public XmlSignerConfiguration getConfiguration() {
return config;
}
@Override
public void process(Exchange exchange) throws Exception {
try {
LOG.debug("XML signature generation started using algorithm {} and canonicalization method {}", getConfiguration()
.getSignatureAlgorithm(), getConfiguration().getCanonicalizationMethod().getAlgorithm());
// lets setup the out message before we invoke the signing
// so that it can mutate it if necessary
Message out = exchange.getOut();
out.copyFrom(exchange.getIn());
Document outputDoc = sign(out);
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
XmlSignatureHelper.transformNonTextNodeToOutputStream(outputDoc, outStream, omitXmlDeclaration(out),
getConfiguration().getOutputXmlEncoding());
byte[] data = outStream.toByteArray();
out.setBody(data);
setOutputEncodingToMessageHeader(out);
clearMessageHeaders(out);
LOG.debug("XML signature generation finished");
} catch (Exception e) {
// remove OUT message, as an exception occurred
exchange.setOut(null);
throw e;
}
}
protected Document sign(final Message out) throws Exception {
try {
XMLSignatureFactory fac;
// Try to install the Santuario Provider - fall back to the JDK provider if this does
// not work
try {
fac = XMLSignatureFactory.getInstance("DOM", "ApacheXMLDSig");
} catch (NoSuchProviderException ex) {
fac = XMLSignatureFactory.getInstance("DOM");
}
final Node node = getMessageBodyNode(out);
if (getConfiguration().getKeyAccessor() == null) {
throw new XmlSignatureNoKeyException(
"Key accessor is missing for XML signature generation. Specify a key accessor in the configuration.");
}
final KeySelector keySelector = getConfiguration().getKeyAccessor().getKeySelector(out);
if (keySelector == null) {
throw new XmlSignatureNoKeyException(
"Key selector is missing for XML signature generation. Specify a key selector in the configuration.");
}
SignatureType signatureType = determineSignatureType(out);
final List<String> contentReferenceUris = getContentReferenceUris(out, signatureType, node);
Node lastParent = null;
// per content reference URI a signature is built; for enveloped and enveloping there is only one content reference URI;
// only in the detached case there can be several
for (final String contentReferenceUri : contentReferenceUris) {
// the method KeyAccessor.getKeyInfo must be called after the method KeyAccessor.getKeySelector, this is part of the
|
XmlSignerProcessor
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java
|
{
"start": 69720,
"end": 70008
}
|
class ____ {
@Bean
@Role(BeanDefinition.ROLE_INFRASTRUCTURE)
Advisor returnBeforeJsr250() {
return returnAdvisor(AuthorizationInterceptorsOrder.JSR250.getOrder() + OffsetConfig.OFFSET - 1);
}
}
@Configuration
@Import(OffsetConfig.class)
static
|
ReturnBeforeOffsetJsr250Config
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/spi/AbstractDelegatingSharedSessionBuilder.java
|
{
"start": 675,
"end": 936
}
|
class ____ {@link SharedSessionBuilder} implementations that wish to implement only parts of that contract
* themselves while forwarding other method invocations to a delegate instance.
*
* @author Gunnar Morling
* @author Guillaume Smet
*/
public abstract
|
for
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/ingest/IngestDocument.java
|
{
"start": 84679,
"end": 86070
}
|
class ____ implements ListIterator<Object> {
private final ListIterator<Object> data;
UnmodifiableListIterator(ListIterator<Object> data) {
this.data = data;
}
@Override
public boolean hasNext() {
return data.hasNext();
}
@Override
public Object next() {
return wrapUnmodifiable(data.next());
}
@Override
public boolean hasPrevious() {
return data.hasPrevious();
}
@Override
public Object previous() {
return wrapUnmodifiable(data.previous());
}
@Override
public int nextIndex() {
return data.nextIndex();
}
@Override
public int previousIndex() {
return data.previousIndex();
}
@Override
public void remove() {
throw unmodifiableException();
}
@Override
public void set(final Object o) {
throw unmodifiableException();
}
@Override
public void add(final Object o) {
throw unmodifiableException();
}
}
}
private static final
|
UnmodifiableListIterator
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/CustomizedConverters.java
|
{
"start": 1388,
"end": 3946
}
|
class ____ {
private static final Map<FunctionDefinition, CustomizedConverter> CONVERTERS = new HashMap<>();
static {
CONVERTERS.put(BuiltInFunctionDefinitions.CAST, new CastConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.TRY_CAST, new TryCastConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.REINTERPRET_CAST, new ReinterpretCastConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.IN, new InConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.GET, new GetConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.TRIM, new TrimConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.AS, new AsConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.BETWEEN, new BetweenConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.NOT_BETWEEN, new NotBetweenConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.REPLACE, new ReplaceConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.PLUS, new PlusConverter());
CONVERTERS.put(
BuiltInFunctionDefinitions.TEMPORAL_OVERLAPS, new TemporalOverlapsConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.TIMESTAMP_DIFF, new TimestampDiffConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.ARRAY, new ArrayConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.MAP, new MapConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.ROW, new RowConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.ORDER_ASC, new OrderAscConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.SQRT, new SqrtConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.IS_JSON, new IsJsonConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.JSON_EXISTS, new JsonExistsConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.JSON_VALUE, new JsonValueConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.JSON_QUERY, new JsonQueryConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.JSON_OBJECT, new JsonObjectConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.JSON_ARRAY, new JsonArrayConverter());
CONVERTERS.put(BuiltInFunctionDefinitions.DEFAULT, new DefaultConverter());
CONVERTERS.put(InternalFunctionDefinitions.THROW_EXCEPTION, new ThrowExceptionConverter());
}
public Optional<CustomizedConverter> getConverter(FunctionDefinition functionDefinition) {
return Optional.ofNullable(CONVERTERS.get(functionDefinition));
}
}
|
CustomizedConverters
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-kinesis/src/test/java/org/apache/camel/component/aws2/firehose/KinesisFirehoseComponentConfigurationTest.java
|
{
"start": 1163,
"end": 5628
}
|
class ____ extends CamelTestSupport {
@Test
public void createEndpointWithAccessAndSecretKey() throws Exception {
KinesisFirehose2Component component = context.getComponent("aws2-kinesis-firehose", KinesisFirehose2Component.class);
KinesisFirehose2Endpoint endpoint = (KinesisFirehose2Endpoint) component
.createEndpoint("aws2-kinesis-firehose://some_stream_name?accessKey=xxxxx&secretKey=yyyyy");
assertEquals("some_stream_name", endpoint.getConfiguration().getStreamName());
assertEquals("xxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
}
@Test
public void createEndpointWithComponentElements() throws Exception {
KinesisFirehose2Component component = context.getComponent("aws2-kinesis-firehose", KinesisFirehose2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
KinesisFirehose2Endpoint endpoint
= (KinesisFirehose2Endpoint) component.createEndpoint("aws2-kinesis-firehose://some_stream_name");
assertEquals("some_stream_name", endpoint.getConfiguration().getStreamName());
assertEquals("XXX", endpoint.getConfiguration().getAccessKey());
assertEquals("YYY", endpoint.getConfiguration().getSecretKey());
}
@Test
public void createEndpointWithComponentAndEndpointElements() throws Exception {
KinesisFirehose2Component component = context.getComponent("aws2-kinesis-firehose", KinesisFirehose2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
KinesisFirehose2Endpoint endpoint = (KinesisFirehose2Endpoint) component
.createEndpoint("aws2-kinesis-firehose://some_stream_name?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1");
assertEquals("some_stream_name", endpoint.getConfiguration().getStreamName());
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
}
@Test
public void createEndpointWithComponentEndpointElementsAndProxy() throws Exception {
KinesisFirehose2Component component = context.getComponent("aws2-kinesis-firehose", KinesisFirehose2Component.class);
component.getConfiguration().setAccessKey("XXX");
component.getConfiguration().setSecretKey("YYY");
component.getConfiguration().setRegion(Region.US_WEST_1.toString());
KinesisFirehose2Endpoint endpoint = (KinesisFirehose2Endpoint) component
.createEndpoint(
"aws2-kinesis-firehose://label?accessKey=xxxxxx&secretKey=yyyyy®ion=US_EAST_1&proxyHost=localhost&proxyPort=9000&proxyProtocol=HTTP");
assertEquals("xxxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("US_EAST_1", endpoint.getConfiguration().getRegion());
assertEquals(Protocol.HTTP, endpoint.getConfiguration().getProxyProtocol());
assertEquals("localhost", endpoint.getConfiguration().getProxyHost());
assertEquals(Integer.valueOf(9000), endpoint.getConfiguration().getProxyPort());
}
@Test
public void createEndpointWithOverride() throws Exception {
KinesisFirehose2Component component = context.getComponent("aws2-kinesis-firehose", KinesisFirehose2Component.class);
KinesisFirehose2Endpoint endpoint = (KinesisFirehose2Endpoint) component
.createEndpoint(
"aws2-kinesis-firehose://some_stream_name?accessKey=xxxxx&secretKey=yyyyy&overrideEndpoint=true&uriEndpointOverride=http://localhost:4567");
assertEquals("some_stream_name", endpoint.getConfiguration().getStreamName());
assertEquals("xxxxx", endpoint.getConfiguration().getAccessKey());
assertEquals("yyyyy", endpoint.getConfiguration().getSecretKey());
assertEquals("http://localhost:4567", endpoint.getConfiguration().getUriEndpointOverride());
assertTrue(endpoint.getConfiguration().isOverrideEndpoint());
}
}
|
KinesisFirehoseComponentConfigurationTest
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyDialect.java
|
{
"start": 5918,
"end": 35256
}
|
class ____ extends Dialect {
// KNOWN LIMITATIONS:
// no support for nationalized data (nchar, nvarchar, nclob)
// * limited set of fields for extract()
// (no 'day of xxxx', nor 'week of xxxx')
// * no support for format()
// * pad() can only pad with blanks
// * can't cast String to Binary
// * can't select a parameter unless wrapped
// in a cast or function call
private final static DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 10, 15, 2 );
private final LimitHandler limitHandler = new DerbyLimitHandler( true );
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
public DerbyDialect() {
this( MINIMUM_VERSION);
}
public DerbyDialect(DatabaseVersion version) {
super(version);
}
public DerbyDialect(DialectResolutionInfo info) {
super(info);
}
@Override
protected DatabaseVersion getMinimumSupportedVersion() {
return MINIMUM_VERSION;
}
@Override
protected String columnType(int sqlTypeCode) {
return switch ( sqlTypeCode ) {
//no tinyint
case TINYINT -> "smallint";
// HHH-12827: map them both to the same type to avoid problems with schema update
// Note that 31 is the maximum precision Derby supports
case NUMERIC -> columnType( DECIMAL );
case VARBINARY -> "varchar($l) for bit data";
case NCHAR -> columnType( CHAR );
case NVARCHAR -> columnType( VARCHAR );
case BLOB -> "blob";
case CLOB, NCLOB -> "clob";
case TIME, TIME_WITH_TIMEZONE -> "time";
case TIMESTAMP, TIMESTAMP_WITH_TIMEZONE -> "timestamp";
default -> super.columnType( sqlTypeCode );
};
}
@Override
protected void registerColumnTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
int varcharDdlTypeCapacity = 32_672;
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARBINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARCHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( VARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NVARCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( NVARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( NVARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
BINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( 254, "char($l) for bit data" )
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
// This is the maximum size for the CHAR datatype on Derby
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
CHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( CHAR ),
this
)
.withTypeCapacity( 254, columnType( CHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32NVARCHAR ),
columnType( NCHAR ),
this
)
.withTypeCapacity( 254, columnType( NCHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( NVARCHAR ) )
.build()
);
}
@Override
public int getMaxVarcharLength() {
return 32_672;
}
@Override
public int getMaxVarcharCapacity() {
return 32_700;
}
@Override
public int getDefaultDecimalPrecision() {
//this is the maximum allowed in Derby
return 31;
}
@Override
public NationalizationSupport getNationalizationSupport() {
return NationalizationSupport.IMPLICIT;
}
@Override
public int getDefaultStatementBatchSize() {
return 15;
}
@Override
public int getFloatPrecision() {
return 23;
}
@Override
public int getDoublePrecision() {
return 52;
}
@Override
public int getDefaultTimestampPrecision() {
return 9;
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"count",
new CountFunction(
this,
functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
"||",
ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( Size.nil(), stringType, ddlTypeRegistry ),
true
)
);
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
// Note that Derby does not have chr() / ascii() functions.
// It does have a function named char(), but it's really a
// sort of to_char() function.
// We register an emulation instead, that can at least translate integer literals
functionContributions.getFunctionRegistry().register(
"chr",
new ChrLiteralEmulation( functionContributions.getTypeConfiguration() )
);
functionFactory.concat_pipeOperator();
functionFactory.cot();
functionFactory.degrees();
functionFactory.radians();
functionFactory.log10();
functionFactory.sinh();
functionFactory.cosh();
functionFactory.tanh();
functionFactory.pi();
functionFactory.rand();
functionFactory.trim1();
functionFactory.hourMinuteSecond();
functionFactory.yearMonthDay();
functionFactory.varPopSamp();
functionFactory.stddevPopSamp();
functionFactory.substring_substr();
functionFactory.leftRight_substrLength();
functionFactory.characterLength_length( SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.power_expLn();
functionFactory.round_floor();
functionFactory.trunc_floor();
functionFactory.octetLength_pattern( "length(?1)", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.bitLength_pattern( "length(?1)*8", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"concat",
new CastingConcatFunction(
this,
"||",
true,
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
functionContributions.getTypeConfiguration()
)
);
//no way I can see to pad with anything other than spaces
functionContributions.getFunctionRegistry().register( "lpad", new DerbyLpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "rpad", new DerbyRpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "least", new CaseLeastGreatestEmulation( true ) );
functionContributions.getFunctionRegistry().register( "greatest", new CaseLeastGreatestEmulation( false ) );
functionContributions.getFunctionRegistry().register( "overlay", new InsertSubstringOverlayEmulation( functionContributions.getTypeConfiguration(), true ) );
}
@Override
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
return new StandardSqlAstTranslatorFactory() {
@Override
protected <T extends JdbcOperation> SqlAstTranslator<T> buildTranslator(
SessionFactoryImplementor sessionFactory, Statement statement) {
return new DerbySqlAstTranslator<>( sessionFactory, statement );
}
};
}
/**
* Derby doesn't have an extract() function, and has
* no functions at all for calendaring, but we can
* emulate the most basic functionality of extract()
* using the functions it does have.
* <p>
* The only supported {@link TemporalUnit}s are:
* {@link TemporalUnit#YEAR},
* {@link TemporalUnit#MONTH}
* {@link TemporalUnit#DAY},
* {@link TemporalUnit#HOUR},
* {@link TemporalUnit#MINUTE},
* {@link TemporalUnit#SECOND} (along with
* {@link TemporalUnit#NANOSECOND},
* {@link TemporalUnit#DATE}, and
* {@link TemporalUnit#TIME}, which are desugared
* by the parser).
*/
@Override
public String extractPattern(TemporalUnit unit) {
switch (unit) {
case DAY_OF_MONTH:
return "day(?2)";
case DAY_OF_YEAR:
return "({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),?2)}+1)";
case DAY_OF_WEEK:
// Use the approach as outlined here: https://stackoverflow.com/questions/36357013/day-of-week-from-seconds-since-epoch
return "(mod(mod({fn timestampdiff(sql_tsi_day,{d '1970-01-01'},?2)}+4,7)+7,7)+1)";
case WEEK:
// Use the approach as outlined here: https://www.sqlservercentral.com/articles/a-simple-formula-to-calculate-the-iso-week-number
// In SQL Server terms this is (DATEPART(dy,DATEADD(dd,DATEDIFF(dd,'17530101',@SomeDate)/7*7,'17530104'))+6)/7
return "(({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),{fn timestampadd(sql_tsi_day,{fn timestampdiff(sql_tsi_day,{d '1753-01-01'},?2)}/7*7,{d '1753-01-04'})})}+7)/7)";
case QUARTER:
return "((month(?2)+2)/3)";
case EPOCH:
return "{fn timestampdiff(sql_tsi_second,{ts '1970-01-01 00:00:00'},?2)}";
default:
return "?1(?2)";
}
}
@Override
public String translateExtractField(TemporalUnit unit) {
switch (unit) {
case WEEK:
case DAY_OF_YEAR:
case DAY_OF_WEEK:
throw new UnsupportedOperationException("field type not supported on Derby: " + unit);
case DAY_OF_MONTH:
return "day";
default:
return super.translateExtractField(unit);
}
}
/**
* Derby does have a real {@link Types#BOOLEAN}
* type, but it doesn't know how to cast to it. Worse,
* Derby makes us use the {@code double()} function to
* cast things to its floating point types.
*/
@Override
public String castPattern(CastType from, CastType to) {
switch ( to ) {
case FLOAT:
return "cast(double(?1) as real)";
case DOUBLE:
return "double(?1)";
case STRING:
// Derby madness http://db.apache.org/derby/docs/10.8/ref/rrefsqlj33562.html
// With a nice rant: https://blog.jooq.org/2011/10/29/derby-casting-madness-the-sequel/
// See https://issues.apache.org/jira/browse/DERBY-2072
// Since numerics can't be cast to varchar directly, use char(254) i.e. with the maximum char capacity
// as an intermediate type before converting to varchar
switch ( from ) {
case FLOAT:
case DOUBLE:
// Derby can't cast to char directly, but needs to be cast to decimal first...
return "cast(trim(cast(cast(?1 as decimal("
+ getDefaultDecimalPrecision() + ","
+ BigDecimalJavaType.INSTANCE.getDefaultSqlScale( this, null )
+ ")) as char(254))) as ?2)";
case INTEGER:
case LONG:
case FIXED:
return "cast(trim(cast(?1 as char(254))) as ?2)";
case DATE:
// The maximum length of a date
return "cast(?1 as varchar(10))";
case TIME:
// The maximum length of a time
return "cast(?1 as varchar(8))";
case TIMESTAMP:
// The maximum length of a timestamp
return "cast(?1 as varchar(30))";
}
break;
}
return super.castPattern( from, to );
}
@Override
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
switch (unit) {
case NANOSECOND:
case NATIVE:
return "{fn timestampadd(sql_tsi_frac_second,mod(bigint(?2),1000000000),{fn timestampadd(sql_tsi_second,bigint((?2)/1000000000),?3)})}";
default:
final String addExpression = "{fn timestampadd(sql_tsi_?1,bigint(?2),?3)}";
// Since timestampadd will always produce a TIMESTAMP, we have to cast back to the intended type
return temporalType == TemporalType.TIMESTAMP
? addExpression
: "cast(" + addExpression + " as " + temporalType.name().toLowerCase( Locale.ROOT ) + ")" ;
}
}
@Override
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
switch (unit) {
case NANOSECOND:
case NATIVE:
return "{fn timestampdiff(sql_tsi_frac_second,?2,?3)}";
default:
return "{fn timestampdiff(sql_tsi_?1,?2,?3)}";
}
}
@Override
public void appendBooleanValueString(SqlAppender appender, boolean bool) {
appender.appendSql( bool );
}
@Override
public SequenceSupport getSequenceSupport() {
return DerbySequenceSupport.INSTANCE;
}
@Override
public String getQuerySequencesString() {
return "select sys.sysschemas.schemaname as sequence_schema,sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid=sys.sysschemas.schemaid";
}
@Override
public SequenceInformationExtractor getSequenceInformationExtractor() {
return SequenceInformationExtractorDerbyDatabaseImpl.INSTANCE;
}
@Override
public String[] getDropSchemaCommand(String schemaName) {
return new String[] {"drop schema " + schemaName + " restrict"};
}
@Override
public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfiguration) {
return DB2Dialect.selectNullString( sqlType );
}
@Override
public boolean supportsCommentOn() {
//HHH-4531
return false;
}
@Override
public LockingSupport getLockingSupport() {
return LockingSupportSimple.NO_OUTER_JOIN;
}
@Override
protected LockingClauseStrategy buildLockingClauseStrategy(
PessimisticLockKind lockKind,
RowLockStrategy rowLockStrategy,
LockOptions lockOptions,
Set<NavigablePath> rootPathsForLocking) {
return new DerbyLockingClauseStrategy( this, lockKind, rowLockStrategy, lockOptions, rootPathsForLocking );
}
@Override
public String getForUpdateString() {
return " for update with rs";
}
@Override
public String getWriteLockString(Timeout timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(Timeout timeout) {
return " for read only with rs";
}
@Override
public String getWriteLockString(int timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(int timeout) {
return " for read only with rs";
}
@Override
public boolean supportsExistsInSelect() {
//TODO: check this!
return false;
}
@Override
public boolean supportsCurrentTimestampSelection() {
return true;
}
@Override
public String getCurrentTimestampSelectString() {
return "values current timestamp";
}
@Override
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
@Override
public LimitHandler getLimitHandler() {
return limitHandler;
}
@Override
public IdentityColumnSupport getIdentityColumnSupport() {
return DB2IdentityColumnSupport.INSTANCE;
}
@Override
public boolean doesReadCommittedCauseWritersToBlockReaders() {
//TODO: check this
return true;
}
@Override
public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() {
return false;
}
@Override
public boolean supportsTupleDistinctCounts() {
//checked on Derby 10.14
return false;
}
@Override
public boolean supportsOrderByInSubquery() {
// As of version 10.5 Derby supports OFFSET and FETCH as well as ORDER BY in subqueries
return true;
}
@Override
public boolean requiresCastForConcatenatingNonStrings() {
return true;
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptor( Types.TIMESTAMP_WITH_TIMEZONE, TimestampJdbcType.INSTANCE );
// Derby requires a custom binder for binding untyped nulls that resolves the type through the statement
typeContributions.contributeJdbcType( ObjectNullResolvingJdbcType.INSTANCE );
// Until we remove StandardBasicTypes, we have to keep this
typeContributions.contributeType(
new JavaObjectType(
ObjectNullResolvingJdbcType.INSTANCE,
typeContributions.getTypeConfiguration()
.getJavaTypeRegistry()
.getDescriptor( Object.class )
)
);
}
// Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsLobValueChangePropagation() {
return false;
}
@Override
public boolean supportsUnboundedLobLocatorMaterialization() {
return false;
}
@Override
public int getInExpressionCountLimit() {
// Derby does not have a limit on the number of expressions/parameters per-se (it may, I just
// don't know). It does, however, have a limit on the size of the SQL text it will accept as a
// PreparedStatement; so let's limit this to a sensible value to avoid that.
return 512;
}
@Override
public ViolatedConstraintNameExtractor getViolatedConstraintNameExtractor() {
return new TemplatedViolatedConstraintNameExtractor( sqle -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqle );
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
return TemplatedViolatedConstraintNameExtractor.extractUsingTemplate(
"'", "'",
sqle.getMessage()
);
}
}
return null;
} );
}
@Override
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return (sqlException, message, sql) -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqlException );
// final int errorCode = JdbcExceptionHelper.extractErrorCode( sqlException );
final String constraintName;
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
// Unique constraint violation
constraintName = getViolatedConstraintNameExtractor().extractConstraintName(sqlException);
return new ConstraintViolationException(
message,
sqlException,
sql,
ConstraintViolationException.ConstraintKind.UNIQUE,
constraintName
);
case "40XL1":
case "40XL2":
return new LockTimeoutException( message, sqlException, sql );
}
}
return null;
};
}
@Override
public void appendDatetimeFormat(SqlAppender appender, String format) {
throw new UnsupportedOperationException("format() function not supported on Derby");
}
@Override
protected void registerDefaultKeywords() {
super.registerDefaultKeywords();
registerKeyword( "ADD" );
registerKeyword( "ALL" );
registerKeyword( "ALLOCATE" );
registerKeyword( "ALTER" );
registerKeyword( "AND" );
registerKeyword( "ANY" );
registerKeyword( "ARE" );
registerKeyword( "AS" );
registerKeyword( "ASC" );
registerKeyword( "ASSERTION" );
registerKeyword( "AT" );
registerKeyword( "AUTHORIZATION" );
registerKeyword( "AVG" );
registerKeyword( "BEGIN" );
registerKeyword( "BETWEEN" );
registerKeyword( "BIT" );
registerKeyword( "BOOLEAN" );
registerKeyword( "BOTH" );
registerKeyword( "BY" );
registerKeyword( "CALL" );
registerKeyword( "CASCADE" );
registerKeyword( "CASCADED" );
registerKeyword( "CASE" );
registerKeyword( "CAST" );
registerKeyword( "CHAR" );
registerKeyword( "CHARACTER" );
registerKeyword( "CHECK" );
registerKeyword( "CLOSE" );
registerKeyword( "COLLATE" );
registerKeyword( "COLLATION" );
registerKeyword( "COLUMN" );
registerKeyword( "COMMIT" );
registerKeyword( "CONNECT" );
registerKeyword( "CONNECTION" );
registerKeyword( "CONSTRAINT" );
registerKeyword( "CONSTRAINTS" );
registerKeyword( "CONTINUE" );
registerKeyword( "CONVERT" );
registerKeyword( "CORRESPONDING" );
registerKeyword( "COUNT" );
registerKeyword( "CREATE" );
registerKeyword( "CURRENT" );
registerKeyword( "CURRENT_DATE" );
registerKeyword( "CURRENT_TIME" );
registerKeyword( "CURRENT_TIMESTAMP" );
registerKeyword( "CURRENT_USER" );
registerKeyword( "CURSOR" );
registerKeyword( "DEALLOCATE" );
registerKeyword( "DEC" );
registerKeyword( "DECIMAL" );
registerKeyword( "DECLARE" );
registerKeyword( "DEFERRABLE" );
registerKeyword( "DEFERRED" );
registerKeyword( "DELETE" );
registerKeyword( "DESC" );
registerKeyword( "DESCRIBE" );
registerKeyword( "DIAGNOSTICS" );
registerKeyword( "DISCONNECT" );
registerKeyword( "DISTINCT" );
registerKeyword( "DOUBLE" );
registerKeyword( "DROP" );
registerKeyword( "ELSE" );
registerKeyword( "END" );
registerKeyword( "ENDEXEC" );
registerKeyword( "ESCAPE" );
registerKeyword( "EXCEPT" );
registerKeyword( "EXCEPTION" );
registerKeyword( "EXEC" );
registerKeyword( "EXECUTE" );
registerKeyword( "EXISTS" );
registerKeyword( "EXPLAIN" );
registerKeyword( "EXTERNAL" );
registerKeyword( "FALSE" );
registerKeyword( "FETCH" );
registerKeyword( "FIRST" );
registerKeyword( "FLOAT" );
registerKeyword( "FOR" );
registerKeyword( "FOREIGN" );
registerKeyword( "FOUND" );
registerKeyword( "FROM" );
registerKeyword( "FULL" );
registerKeyword( "FUNCTION" );
registerKeyword( "GET" );
registerKeyword( "GET_CURRENT_CONNECTION" );
registerKeyword( "GLOBAL" );
registerKeyword( "GO" );
registerKeyword( "GOTO" );
registerKeyword( "GRANT" );
registerKeyword( "GROUP" );
registerKeyword( "HAVING" );
registerKeyword( "HOUR" );
registerKeyword( "IDENTITY" );
registerKeyword( "IMMEDIATE" );
registerKeyword( "IN" );
registerKeyword( "INDICATOR" );
registerKeyword( "INITIALLY" );
registerKeyword( "INNER" );
registerKeyword( "INOUT" );
registerKeyword( "INPUT" );
registerKeyword( "INSENSITIVE" );
registerKeyword( "INSERT" );
registerKeyword( "INT" );
registerKeyword( "INTEGER" );
registerKeyword( "INTERSECT" );
registerKeyword( "INTO" );
registerKeyword( "IS" );
registerKeyword( "ISOLATION" );
registerKeyword( "JOIN" );
registerKeyword( "KEY" );
registerKeyword( "LAST" );
registerKeyword( "LEFT" );
registerKeyword( "LIKE" );
registerKeyword( "LONGINT" );
registerKeyword( "LOWER" );
registerKeyword( "LTRIM" );
registerKeyword( "MATCH" );
registerKeyword( "MAX" );
registerKeyword( "MIN" );
registerKeyword( "MINUTE" );
registerKeyword( "NATIONAL" );
registerKeyword( "NATURAL" );
registerKeyword( "NCHAR" );
registerKeyword( "NVARCHAR" );
registerKeyword( "NEXT" );
registerKeyword( "NO" );
registerKeyword( "NOT" );
registerKeyword( "NULL" );
registerKeyword( "NULLIF" );
registerKeyword( "NUMERIC" );
registerKeyword( "OF" );
registerKeyword( "ON" );
registerKeyword( "ONLY" );
registerKeyword( "OPEN" );
registerKeyword( "OPTION" );
registerKeyword( "OR" );
registerKeyword( "ORDER" );
registerKeyword( "OUT" );
registerKeyword( "OUTER" );
registerKeyword( "OUTPUT" );
registerKeyword( "OVERLAPS" );
registerKeyword( "PAD" );
registerKeyword( "PARTIAL" );
registerKeyword( "PREPARE" );
registerKeyword( "PRESERVE" );
registerKeyword( "PRIMARY" );
registerKeyword( "PRIOR" );
registerKeyword( "PRIVILEGES" );
registerKeyword( "PROCEDURE" );
registerKeyword( "PUBLIC" );
registerKeyword( "READ" );
registerKeyword( "REAL" );
registerKeyword( "REFERENCES" );
registerKeyword( "RELATIVE" );
registerKeyword( "RESTRICT" );
registerKeyword( "REVOKE" );
registerKeyword( "RIGHT" );
registerKeyword( "ROLLBACK" );
registerKeyword( "ROWS" );
registerKeyword( "RTRIM" );
registerKeyword( "SCHEMA" );
registerKeyword( "SCROLL" );
registerKeyword( "SECOND" );
registerKeyword( "SELECT" );
registerKeyword( "SESSION_USER" );
registerKeyword( "SET" );
registerKeyword( "SMALLINT" );
registerKeyword( "SOME" );
registerKeyword( "SPACE" );
registerKeyword( "SQL" );
registerKeyword( "SQLCODE" );
registerKeyword( "SQLERROR" );
registerKeyword( "SQLSTATE" );
registerKeyword( "SUBSTR" );
registerKeyword( "SUBSTRING" );
registerKeyword( "SUM" );
registerKeyword( "SYSTEM_USER" );
registerKeyword( "TABLE" );
registerKeyword( "TEMPORARY" );
registerKeyword( "TIMEZONE_HOUR" );
registerKeyword( "TIMEZONE_MINUTE" );
registerKeyword( "TO" );
registerKeyword( "TRAILING" );
registerKeyword( "TRANSACTION" );
registerKeyword( "TRANSLATE" );
registerKeyword( "TRANSLATION" );
registerKeyword( "TRUE" );
registerKeyword( "UNION" );
registerKeyword( "UNIQUE" );
registerKeyword( "UNKNOWN" );
registerKeyword( "UPDATE" );
registerKeyword( "UPPER" );
registerKeyword( "USER" );
registerKeyword( "USING" );
registerKeyword( "VALUES" );
registerKeyword( "VARCHAR" );
registerKeyword( "VARYING" );
registerKeyword( "VIEW" );
registerKeyword( "WHENEVER" );
registerKeyword( "WHERE" );
registerKeyword( "WITH" );
registerKeyword( "WORK" );
registerKeyword( "WRITE" );
registerKeyword( "XML" );
registerKeyword( "XMLEXISTS" );
registerKeyword( "XMLPARSE" );
registerKeyword( "XMLSERIALIZE" );
registerKeyword( "YEAR" );
}
@Override
public SqmMultiTableMutationStrategy getFallbackSqmMutationStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableMutationStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public SqmMultiTableInsertStrategy getFallbackSqmInsertStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableInsertStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public TemporaryTableKind getSupportedTemporaryTableKind() {
return TemporaryTableKind.LOCAL;
}
@Override
public TemporaryTableStrategy getLocalTemporaryTableStrategy() {
return DerbyLocalTemporaryTableStrategy.INSTANCE;
}
@Override
public String getTemporaryTableCreateOptions() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateOptions();
}
@Override
public String getTemporaryTableCreateCommand() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateCommand();
}
@Override
public BeforeUseAction getTemporaryTableBeforeUseAction() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableBeforeUseAction();
}
@Override
public boolean supportsTemporaryTablePrimaryKey() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.supportsTemporaryTablePrimaryKey();
}
@Override
public boolean supportsPartitionBy() {
return false;
}
@Override
public boolean supportsWindowFunctions() {
// It seems at least the row_number function is supported as of 10.4
return true;
}
@Override
public boolean supportsValuesList() {
return true;
}
@Override
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData metadata)
throws SQLException {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, metadata );
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
return DmlTargetColumnQualifierSupport.TABLE_ALIAS;
}
@Override
public String getDual() {
return "(values 0)";
}
@Override
public String getFromDualForSelectOnly() {
return " from " + getDual() + " dual";
}
@Override
public boolean supportsJoinInMutationStatementSubquery() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntax() {
return false;
}
@Override
public boolean supportsWithClause() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInInList() {
return false;
}
}
|
DerbyDialect
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/basicapi/MocksSerializationForAnnotationTest.java
|
{
"start": 11466,
"end": 12039
}
|
class ____ {
@Mock(serializable = true)
SerializableAndNoDefaultConstructor serializableAndNoDefaultConstructor;
}
@Test
public void
should_be_able_to_serialize_type_that_implements_Serializable_but_but_dont_declare_a_no_arg_constructor()
throws Exception {
TestClassThatHoldValidField testClass = new TestClassThatHoldValidField();
MockitoAnnotations.openMocks(testClass);
serializeAndBack(testClass.serializableAndNoDefaultConstructor);
}
public static
|
TestClassThatHoldValidField
|
java
|
elastic__elasticsearch
|
build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestDistribution.java
|
{
"start": 605,
"end": 660
}
|
enum ____ {
INTEG_TEST,
DEFAULT,
}
|
TestDistribution
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingFloatResults.java
|
{
"start": 1424,
"end": 1528
}
|
class ____ which other dense embedding float result classes inherit their behaviour
*/
public abstract
|
from
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/CompositeByteBuf.java
|
{
"start": 67954,
"end": 77744
}
|
class ____ {
final ByteBuf srcBuf; // the originally added buffer
final ByteBuf buf; // srcBuf unwrapped zero or more times
int srcAdjustment; // index of the start of this CompositeByteBuf relative to srcBuf
int adjustment; // index of the start of this CompositeByteBuf relative to buf
int offset; // offset of this component within this CompositeByteBuf
int endOffset; // end offset of this component within this CompositeByteBuf
private ByteBuf slice; // cached slice, may be null
Component(ByteBuf srcBuf, int srcOffset, ByteBuf buf, int bufOffset,
int offset, int len, ByteBuf slice) {
this.srcBuf = srcBuf;
this.srcAdjustment = srcOffset - offset;
this.buf = buf;
this.adjustment = bufOffset - offset;
this.offset = offset;
this.endOffset = offset + len;
this.slice = slice;
}
int srcIdx(int index) {
return index + srcAdjustment;
}
int idx(int index) {
return index + adjustment;
}
int length() {
return endOffset - offset;
}
void reposition(int newOffset) {
int move = newOffset - offset;
endOffset += move;
srcAdjustment -= move;
adjustment -= move;
offset = newOffset;
}
// copy then release
void transferTo(ByteBuf dst) {
dst.writeBytes(buf, idx(offset), length());
free();
}
ByteBuf slice() {
ByteBuf s = slice;
if (s == null) {
slice = s = srcBuf.slice(srcIdx(offset), length());
}
return s;
}
ByteBuf duplicate() {
return srcBuf.duplicate();
}
ByteBuffer internalNioBuffer(int index, int length) {
// Some buffers override this so we must use srcBuf
return srcBuf.internalNioBuffer(srcIdx(index), length);
}
void free() {
slice = null;
// Release the original buffer since it may have a different
// refcount to the unwrapped buf (e.g. if PooledSlicedByteBuf)
srcBuf.release();
}
}
@Override
public CompositeByteBuf readerIndex(int readerIndex) {
super.readerIndex(readerIndex);
return this;
}
@Override
public CompositeByteBuf writerIndex(int writerIndex) {
super.writerIndex(writerIndex);
return this;
}
@Override
public CompositeByteBuf setIndex(int readerIndex, int writerIndex) {
super.setIndex(readerIndex, writerIndex);
return this;
}
@Override
public CompositeByteBuf clear() {
super.clear();
return this;
}
@Override
public CompositeByteBuf markReaderIndex() {
super.markReaderIndex();
return this;
}
@Override
public CompositeByteBuf resetReaderIndex() {
super.resetReaderIndex();
return this;
}
@Override
public CompositeByteBuf markWriterIndex() {
super.markWriterIndex();
return this;
}
@Override
public CompositeByteBuf resetWriterIndex() {
super.resetWriterIndex();
return this;
}
@Override
public CompositeByteBuf ensureWritable(int minWritableBytes) {
super.ensureWritable(minWritableBytes);
return this;
}
@Override
public CompositeByteBuf getBytes(int index, ByteBuf dst) {
return getBytes(index, dst, dst.writableBytes());
}
@Override
public CompositeByteBuf getBytes(int index, ByteBuf dst, int length) {
getBytes(index, dst, dst.writerIndex(), length);
dst.writerIndex(dst.writerIndex() + length);
return this;
}
@Override
public CompositeByteBuf getBytes(int index, byte[] dst) {
return getBytes(index, dst, 0, dst.length);
}
@Override
public CompositeByteBuf setBoolean(int index, boolean value) {
return setByte(index, value? 1 : 0);
}
@Override
public CompositeByteBuf setChar(int index, int value) {
return setShort(index, value);
}
@Override
public CompositeByteBuf setFloat(int index, float value) {
return setInt(index, Float.floatToRawIntBits(value));
}
@Override
public CompositeByteBuf setDouble(int index, double value) {
return setLong(index, Double.doubleToRawLongBits(value));
}
@Override
public CompositeByteBuf setBytes(int index, ByteBuf src) {
super.setBytes(index, src, src.readableBytes());
return this;
}
@Override
public CompositeByteBuf setBytes(int index, ByteBuf src, int length) {
super.setBytes(index, src, length);
return this;
}
@Override
public CompositeByteBuf setBytes(int index, byte[] src) {
return setBytes(index, src, 0, src.length);
}
@Override
public CompositeByteBuf setZero(int index, int length) {
super.setZero(index, length);
return this;
}
@Override
public CompositeByteBuf readBytes(ByteBuf dst) {
super.readBytes(dst, dst.writableBytes());
return this;
}
@Override
public CompositeByteBuf readBytes(ByteBuf dst, int length) {
super.readBytes(dst, length);
return this;
}
@Override
public CompositeByteBuf readBytes(ByteBuf dst, int dstIndex, int length) {
super.readBytes(dst, dstIndex, length);
return this;
}
@Override
public CompositeByteBuf readBytes(byte[] dst) {
super.readBytes(dst, 0, dst.length);
return this;
}
@Override
public CompositeByteBuf readBytes(byte[] dst, int dstIndex, int length) {
super.readBytes(dst, dstIndex, length);
return this;
}
@Override
public CompositeByteBuf readBytes(ByteBuffer dst) {
super.readBytes(dst);
return this;
}
@Override
public CompositeByteBuf readBytes(OutputStream out, int length) throws IOException {
super.readBytes(out, length);
return this;
}
@Override
public CompositeByteBuf skipBytes(int length) {
super.skipBytes(length);
return this;
}
@Override
public CompositeByteBuf writeBoolean(boolean value) {
writeByte(value ? 1 : 0);
return this;
}
@Override
public CompositeByteBuf writeByte(int value) {
ensureWritable0(1);
_setByte(writerIndex++, value);
return this;
}
@Override
public CompositeByteBuf writeShort(int value) {
super.writeShort(value);
return this;
}
@Override
public CompositeByteBuf writeMedium(int value) {
super.writeMedium(value);
return this;
}
@Override
public CompositeByteBuf writeInt(int value) {
super.writeInt(value);
return this;
}
@Override
public CompositeByteBuf writeLong(long value) {
super.writeLong(value);
return this;
}
@Override
public CompositeByteBuf writeChar(int value) {
super.writeShort(value);
return this;
}
@Override
public CompositeByteBuf writeFloat(float value) {
super.writeInt(Float.floatToRawIntBits(value));
return this;
}
@Override
public CompositeByteBuf writeDouble(double value) {
super.writeLong(Double.doubleToRawLongBits(value));
return this;
}
@Override
public CompositeByteBuf writeBytes(ByteBuf src) {
super.writeBytes(src, src.readableBytes());
return this;
}
@Override
public CompositeByteBuf writeBytes(ByteBuf src, int length) {
super.writeBytes(src, length);
return this;
}
@Override
public CompositeByteBuf writeBytes(ByteBuf src, int srcIndex, int length) {
super.writeBytes(src, srcIndex, length);
return this;
}
@Override
public CompositeByteBuf writeBytes(byte[] src) {
super.writeBytes(src, 0, src.length);
return this;
}
@Override
public CompositeByteBuf writeBytes(byte[] src, int srcIndex, int length) {
super.writeBytes(src, srcIndex, length);
return this;
}
@Override
public CompositeByteBuf writeBytes(ByteBuffer src) {
super.writeBytes(src);
return this;
}
@Override
public CompositeByteBuf writeZero(int length) {
super.writeZero(length);
return this;
}
@Override
public CompositeByteBuf retain(int increment) {
super.retain(increment);
return this;
}
@Override
public CompositeByteBuf retain() {
super.retain();
return this;
}
@Override
public CompositeByteBuf touch() {
return this;
}
@Override
public CompositeByteBuf touch(Object hint) {
return this;
}
@Override
public ByteBuffer[] nioBuffers() {
return nioBuffers(readerIndex(), readableBytes());
}
@Override
public CompositeByteBuf discardSomeReadBytes() {
return discardReadComponents();
}
@Override
protected void deallocate() {
if (freed) {
return;
}
freed = true;
// We're not using foreach to avoid creating an iterator.
// see https://github.com/netty/netty/issues/2642
for (int i = 0, size = componentCount; i < size; i++) {
components[i].free();
}
}
@Override
boolean isAccessible() {
return !freed;
}
@Override
public ByteBuf unwrap() {
return null;
}
private final
|
Component
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-annotations/src/main/java8/org/apache/hadoop/classification/tools/RootDocProcessor.java
|
{
"start": 1870,
"end": 3196
}
|
class ____ {
static String stability = StabilityOptions.UNSTABLE_OPTION;
static boolean treatUnannotatedClassesAsPrivate = false;
public static RootDoc process(RootDoc root) {
return (RootDoc) process(root, RootDoc.class);
}
private static Object process(Object obj, Class<?> type) {
if (obj == null) {
return null;
}
Class<?> cls = obj.getClass();
if (cls.getName().startsWith("com.sun.")) {
return getProxy(obj);
} else if (obj instanceof Object[]) {
Class<?> componentType = type.isArray() ? type.getComponentType()
: cls.getComponentType();
Object[] array = (Object[]) obj;
Object[] newArray = (Object[]) Array.newInstance(componentType,
array.length);
for (int i = 0; i < array.length; ++i) {
newArray[i] = process(array[i], componentType);
}
return newArray;
}
return obj;
}
private static Map<Object, Object> proxies =
new WeakHashMap<Object, Object>();
private static Object getProxy(Object obj) {
Object proxy = proxies.get(obj);
if (proxy == null) {
proxy = Proxy.newProxyInstance(obj.getClass().getClassLoader(),
obj.getClass().getInterfaces(), new ExcludeHandler(obj));
proxies.put(obj, proxy);
}
return proxy;
}
private static
|
RootDocProcessor
|
java
|
apache__dubbo
|
dubbo-compatible/src/main/java/com/alibaba/dubbo/rpc/support/RpcUtils.java
|
{
"start": 982,
"end": 2814
}
|
class ____ extends org.apache.dubbo.rpc.support.RpcUtils {
public static Class<?> getReturnType(Invocation invocation) {
return org.apache.dubbo.rpc.support.RpcUtils.getReturnType(invocation);
}
// TODO why not get return type when initialize Invocation?
public static Type[] getReturnTypes(Invocation invocation) {
return org.apache.dubbo.rpc.support.RpcUtils.getReturnTypes(invocation);
}
public static Long getInvocationId(Invocation inv) {
return org.apache.dubbo.rpc.support.RpcUtils.getInvocationId(inv);
}
/**
* Idempotent operation: invocation id will be added in async operation by default
*
* @param url
* @param inv
*/
public static void attachInvocationIdIfAsync(URL url, Invocation inv) {
org.apache.dubbo.rpc.support.RpcUtils.attachInvocationIdIfAsync(url.getOriginalURL(), inv);
}
public static String getMethodName(Invocation invocation) {
return org.apache.dubbo.rpc.support.RpcUtils.getMethodName(invocation);
}
public static Object[] getArguments(Invocation invocation) {
return org.apache.dubbo.rpc.support.RpcUtils.getArguments(invocation);
}
public static Class<?>[] getParameterTypes(Invocation invocation) {
return org.apache.dubbo.rpc.support.RpcUtils.getParameterTypes(invocation);
}
public static boolean isAsync(URL url, Invocation inv) {
return org.apache.dubbo.rpc.support.RpcUtils.isAsync(url.getOriginalURL(), inv);
}
public static boolean isReturnTypeFuture(Invocation inv) {
return org.apache.dubbo.rpc.support.RpcUtils.isReturnTypeFuture(inv);
}
public static boolean isOneway(URL url, Invocation inv) {
return org.apache.dubbo.rpc.support.RpcUtils.isOneway(url.getOriginalURL(), inv);
}
}
|
RpcUtils
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantLongTerms.java
|
{
"start": 1281,
"end": 5217
}
|
class ____ extends InternalSignificantTerms.Bucket<Bucket> {
long term;
public Bucket(long subsetDf, long supersetDf, long term, InternalAggregations aggregations, DocValueFormat format, double score) {
super(subsetDf, supersetDf, aggregations, format);
this.term = term;
this.score = score;
}
Bucket(StreamInput in, DocValueFormat format) throws IOException {
super(format);
subsetDf = in.readVLong();
supersetDf = in.readVLong();
term = in.readLong();
score = in.readDouble();
aggregations = InternalAggregations.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(subsetDf);
out.writeVLong(supersetDf);
out.writeLong(term);
out.writeDouble(getSignificanceScore());
aggregations.writeTo(out);
}
@Override
public Object getKey() {
return term;
}
@Override
public String getKeyAsString() {
return format.format(term).toString();
}
@Override
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), term);
if (format != DocValueFormat.RAW) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term));
}
return builder;
}
@Override
public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(term, ((Bucket) obj).term);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
}
public SignificantLongTerms(
String name,
int requiredSize,
long minDocCount,
Map<String, Object> metadata,
DocValueFormat format,
long subsetSize,
long supersetSize,
SignificanceHeuristic significanceHeuristic,
List<Bucket> buckets
) {
super(name, requiredSize, minDocCount, metadata, format, subsetSize, supersetSize, significanceHeuristic, buckets);
}
/**
* Read from a stream.
*/
public SignificantLongTerms(StreamInput in) throws IOException {
super(in, Bucket::new);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public SignificantLongTerms create(List<SignificantLongTerms.Bucket> buckets) {
return new SignificantLongTerms(
name,
requiredSize,
minDocCount,
metadata,
format,
subsetSize,
supersetSize,
significanceHeuristic,
buckets
);
}
@Override
public Bucket createBucket(InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) {
return new Bucket(prototype.subsetDf, prototype.supersetDf, prototype.term, aggregations, prototype.format, prototype.score);
}
@Override
protected SignificantLongTerms create(long subsetSize, long supersetSize, List<Bucket> buckets) {
return new SignificantLongTerms(
getName(),
requiredSize,
minDocCount,
getMetadata(),
format,
subsetSize,
supersetSize,
significanceHeuristic,
buckets
);
}
@Override
protected Bucket[] createBucketsArray(int size) {
return new Bucket[size];
}
@Override
Bucket createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, SignificantLongTerms.Bucket prototype) {
return new Bucket(subsetDf, supersetDf, prototype.term, aggregations, format, prototype.score);
}
}
|
Bucket
|
java
|
apache__camel
|
test-infra/camel-test-infra-triton/src/main/java/org/apache/camel/test/infra/triton/services/TritonLocalContainerInfraService.java
|
{
"start": 1335,
"end": 3784
}
|
class ____ implements TritonInfraService, ContainerService<GenericContainer<?>> {
private static final Logger LOG = LoggerFactory.getLogger(TritonLocalContainerInfraService.class);
public static final int HTTP_PORT = 8000;
public static final int GRPC_PORT = 8001;
public static final int METRICS_PORT = 8002;
private static final String CONTAINER_COMMAND = "tritonserver --model-repository=/models";
private final GenericContainer<?> container;
public TritonLocalContainerInfraService() {
String imageName = LocalPropertyResolver.getProperty(
TritonLocalContainerInfraService.class,
TritonProperties.TRITON_CONTAINER);
container = initContainer(imageName);
}
@SuppressWarnings("resource")
protected GenericContainer<?> initContainer(String imageName) {
return new GenericContainer<>(DockerImageName.parse(imageName))
.withExposedPorts(HTTP_PORT, GRPC_PORT, METRICS_PORT)
.withCopyFileToContainer(MountableFile.forClasspathResource("models"), "/models")
.waitingFor(Wait.forListeningPorts(HTTP_PORT, GRPC_PORT, METRICS_PORT))
.withCommand(CONTAINER_COMMAND);
}
@Override
public void registerProperties() {
System.setProperty(TritonProperties.TRITON_HTTP_PORT, String.valueOf(httpPort()));
System.setProperty(TritonProperties.TRITON_GPRC_PORT, String.valueOf(grpcPort()));
System.setProperty(TritonProperties.TRITON_METRICS_PORT, String.valueOf(metricsPort()));
}
@Override
public void initialize() {
LOG.info("Trying to start the Triton Inference Server container");
container.start();
registerProperties();
LOG.info("Triton Inference Server instance running at {}, {} and {}", httpPort(), grpcPort(), metricsPort());
}
@Override
public void shutdown() {
LOG.info("Stopping the Triton Inference Server container");
container.stop();
}
@Override
public GenericContainer<?> getContainer() {
return container;
}
@Override
public int httpPort() {
return container.getMappedPort(HTTP_PORT);
}
@Override
public int grpcPort() {
return container.getMappedPort(GRPC_PORT);
}
@Override
public int metricsPort() {
return container.getMappedPort(METRICS_PORT);
}
}
|
TritonLocalContainerInfraService
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestResourceManagerAdministrationProtocolPBClientImpl.java
|
{
"start": 3374,
"end": 8472
}
|
class ____ {
private static ResourceManager resourceManager;
private static final Logger LOG = LoggerFactory
.getLogger(TestResourceManagerAdministrationProtocolPBClientImpl.class);
private final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
private static ResourceManagerAdministrationProtocol client;
/**
* Start resource manager server
*/
@BeforeAll
public static void setUpResourceManager() throws IOException,
InterruptedException {
Configuration.addDefaultResource("config-with-security.xml");
Configuration configuration = new YarnConfiguration();
resourceManager = new ResourceManager() {
@Override
protected void doSecureLogin() throws IOException {
}
};
// a reliable way to wait for resource manager to fully start
final CountDownLatch rmStartedSignal = new CountDownLatch(1);
ServiceStateChangeListener rmStateChangeListener =
new ServiceStateChangeListener() {
@Override
public void stateChanged(Service service) {
if (service.getServiceState() == STATE.STARTED) {
rmStartedSignal.countDown();
}
}
};
resourceManager.registerServiceListener(rmStateChangeListener);
resourceManager.init(configuration);
new SubjectInheritingThread() {
public void work() {
resourceManager.start();
}
}.start();
boolean rmStarted = rmStartedSignal.await(60000L, TimeUnit.MILLISECONDS);
assertTrue(rmStarted, "ResourceManager failed to start up.");
LOG.info("ResourceManager RMAdmin address: {}.",
configuration.get(YarnConfiguration.RM_ADMIN_ADDRESS));
client = new ResourceManagerAdministrationProtocolPBClientImpl(1L,
getProtocolAddress(configuration), configuration);
}
/**
* Test method refreshQueues. This method is present and it works.
*/
@Test
public void testRefreshQueues() throws Exception {
RefreshQueuesRequest request = recordFactory
.newRecordInstance(RefreshQueuesRequest.class);
RefreshQueuesResponse response = client.refreshQueues(request);
assertNotNull(response);
}
/**
* Test method refreshNodes. This method is present and it works.
*/
@Test
public void testRefreshNodes() throws Exception {
resourceManager.getClientRMService();
RefreshNodesRequest request = RefreshNodesRequest
.newInstance(DecommissionType.NORMAL);
RefreshNodesResponse response = client.refreshNodes(request);
assertNotNull(response);
}
/**
* Test method refreshSuperUserGroupsConfiguration. This method present and it works.
*/
@Test
public void testRefreshSuperUserGroupsConfiguration() throws Exception {
RefreshSuperUserGroupsConfigurationRequest request = recordFactory
.newRecordInstance(RefreshSuperUserGroupsConfigurationRequest.class);
RefreshSuperUserGroupsConfigurationResponse response = client
.refreshSuperUserGroupsConfiguration(request);
assertNotNull(response);
}
/**
* Test method refreshUserToGroupsMappings. This method is present and it works.
*/
@Test
public void testRefreshUserToGroupsMappings() throws Exception {
RefreshUserToGroupsMappingsRequest request = recordFactory
.newRecordInstance(RefreshUserToGroupsMappingsRequest.class);
RefreshUserToGroupsMappingsResponse response = client
.refreshUserToGroupsMappings(request);
assertNotNull(response);
}
/**
* Test method refreshAdminAcls. This method is present and it works.
*/
@Test
public void testRefreshAdminAcls() throws Exception {
RefreshAdminAclsRequest request = recordFactory
.newRecordInstance(RefreshAdminAclsRequest.class);
RefreshAdminAclsResponse response = client.refreshAdminAcls(request);
assertNotNull(response);
}
@Test
public void testUpdateNodeResource() throws Exception {
UpdateNodeResourceRequest request = recordFactory
.newRecordInstance(UpdateNodeResourceRequest.class);
UpdateNodeResourceResponse response = client.updateNodeResource(request);
assertNotNull(response);
}
@Test
public void testRefreshServiceAcls() throws Exception {
RefreshServiceAclsRequest request = recordFactory
.newRecordInstance(RefreshServiceAclsRequest.class);
RefreshServiceAclsResponse response = client.refreshServiceAcls(request);
assertNotNull(response);
}
/**
* Stop server
*/
@AfterAll
public static void tearDownResourceManager() throws InterruptedException {
if (resourceManager != null) {
LOG.info("Stopping ResourceManager...");
resourceManager.stop();
}
}
private static InetSocketAddress getProtocolAddress(Configuration conf)
throws IOException {
return conf.getSocketAddr(YarnConfiguration.RM_ADMIN_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADMIN_PORT);
}
}
|
TestResourceManagerAdministrationProtocolPBClientImpl
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/RequestLeakDetectionTest.java
|
{
"start": 5213,
"end": 5581
}
|
class ____ {
private final Context context;
private final Runnable runnable;
private Task(Context context, Runnable runnable) {
this.context = context;
this.runnable = runnable;
}
void run() {
context.runOnContext(x -> runnable.run());
}
}
@RequestScoped
public static
|
Task
|
java
|
alibaba__nacos
|
common/src/test/java/com/alibaba/nacos/common/utils/JacksonUtilsTest.java
|
{
"start": 2137,
"end": 20085
}
|
class ____ {
@Test
void testToJson1() {
assertEquals("null", JacksonUtils.toJson(null));
assertEquals("\"string\"", JacksonUtils.toJson("string"));
assertEquals("30", JacksonUtils.toJson(new BigDecimal(30)));
assertEquals("{\"key\":\"value\"}", JacksonUtils.toJson(Collections.singletonMap("key", "value")));
assertEquals("[{\"key\":\"value\"}]",
JacksonUtils.toJson(Collections.singletonList(Collections.singletonMap("key", "value"))));
assertEquals("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}", JacksonUtils.toJson(new TestOfAtomicObject()));
assertEquals("{\"date\":1626192000000}", JacksonUtils.toJson(new TestOfDate()));
// only public
assertEquals("{\"publicAccessModifier\":\"public\"}", JacksonUtils.toJson(new TestOfAccessModifier()));
// getter is also recognized
assertEquals("{\"value\":\"value\",\"key\":\"key\"}", JacksonUtils.toJson(new TestOfGetter()));
// annotation available
assertEquals(
"{\"@type\":\"JacksonUtilsTest$TestOfAnnotationSub\",\"date\":\"2021-07-14\",\"subField\":\"subField\","
+ "\"camelCase\":\"value\"}", JacksonUtils.toJson(new TestOfAnnotationSub()));
}
@Test
void testToJson2() {
assertThrows(NacosSerializationException.class, () -> {
// object without field will throw exceptions
JacksonUtils.toJson(new Object());
});
}
@Test
void testToJsonBytes1() {
assertArrayEquals("null".getBytes(), JacksonUtils.toJsonBytes(null));
assertArrayEquals("\"string\"".getBytes(), JacksonUtils.toJsonBytes("string"));
assertArrayEquals("30".getBytes(), JacksonUtils.toJsonBytes(new BigDecimal(30)));
assertArrayEquals("{\"key\":\"value\"}".getBytes(), JacksonUtils.toJsonBytes(Collections.singletonMap("key", "value")));
assertArrayEquals("[{\"key\":\"value\"}]".getBytes(),
JacksonUtils.toJsonBytes(Collections.singletonList(Collections.singletonMap("key", "value"))));
assertArrayEquals("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}".getBytes(),
JacksonUtils.toJsonBytes(new TestOfAtomicObject()));
assertArrayEquals("{\"date\":1626192000000}".getBytes(), JacksonUtils.toJsonBytes(new TestOfDate()));
// only public
assertArrayEquals("{\"publicAccessModifier\":\"public\"}".getBytes(),
JacksonUtils.toJsonBytes(new TestOfAccessModifier()));
// getter is also recognized
assertArrayEquals("{\"value\":\"value\",\"key\":\"key\"}".getBytes(), JacksonUtils.toJsonBytes(new TestOfGetter()));
// annotation available
assertArrayEquals(
("{\"@type\":\"JacksonUtilsTest$TestOfAnnotationSub\",\"date\":\"2021-07-14\",\"subField\":\"subField\","
+ "\"camelCase\":\"value\"}").getBytes(), JacksonUtils.toJsonBytes(new TestOfAnnotationSub()));
}
@Test
void testToJsonBytes2() {
assertThrows(NacosSerializationException.class, () -> {
// object without field will throw exceptions
JacksonUtils.toJsonBytes(new Object());
});
}
/**
* JacksonUtils.toObj(byte[], Class)
*/
@Test
void testToObject1() {
assertNull(JacksonUtils.toObj("null".getBytes(), Object.class));
assertEquals("string", JacksonUtils.toObj("\"string\"".getBytes(), String.class));
assertEquals(new BigDecimal(30), JacksonUtils.toObj("30".getBytes(), BigDecimal.class));
assertEquals(Collections.singletonMap("key", "value"), JacksonUtils.toObj("{\"key\":\"value\"}".getBytes(), Map.class));
assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")),
JacksonUtils.toObj("[{\"key\":\"value\"}]".getBytes(), List.class));
assertEquals(new TestOfAtomicObject(),
JacksonUtils.toObj("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}".getBytes(), TestOfAtomicObject.class));
assertEquals(new TestOfDate(), JacksonUtils.toObj("{\"date\":1626192000000}".getBytes(), TestOfDate.class));
assertEquals(new TestOfAccessModifier(),
JacksonUtils.toObj("{\"publicAccessModifier\":\"public\"}".getBytes(), TestOfAccessModifier.class));
assertEquals(new TestOfGetter(),
JacksonUtils.toObj("{\"value\":\"value\",\"key\":\"key\"}".getBytes(), TestOfGetter.class));
assertEquals(new TestOfAnnotationSub(), JacksonUtils.toObj(
("{\"@type\":\"JacksonUtilsTest$TestOfAnnotationSub\",\"date\":\"2021-07-14\","
+ "\"subField\":\"subField\",\"camelCase\":\"value\"}").getBytes(), TestOfAnnotation.class));
}
/**
* JacksonUtils.toObj(byte[], Class)
*/
@Test
void testToObject2() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj(("{not_A}Json:String}").getBytes(), TestOfAnnotationSub.class);
});
}
/**
* JacksonUtils.toObj(byte[], Type)
*/
@Test
void testToObject3() {
assertEquals(Collections.singletonMap("key", "value"), JacksonUtils.toObj("{\"key\":\"value\"}".getBytes(),
TypeUtils.parameterize(Map.class, String.class, String.class)));
assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")),
JacksonUtils.toObj("[{\"key\":\"value\"}]".getBytes(),
TypeUtils.parameterize(List.class, TypeUtils.parameterize(Map.class, String.class, String.class))));
}
/**
* JacksonUtils.toObj(byte[], Type)
*/
@Test
void testToObject4() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj("{not_A}Json:String}".getBytes(), TypeUtils.parameterize(Map.class, String.class, String.class));
});
}
/**
* JacksonUtils.toObj(byte[], Type)
*/
@Test
void testToObject5() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj("{\"key\":\"value\"}".getBytes(), Object.class.getGenericSuperclass());
});
}
/**
* JacksonUtils.toObj(InputStream, Class)
*/
@Test
void testToObject6() {
assertNull(JacksonUtils.toObj(new ByteArrayInputStream("null".getBytes()), Object.class));
assertEquals("string", JacksonUtils.toObj(new ByteArrayInputStream("\"string\"".getBytes()), String.class));
assertEquals(new BigDecimal(30), JacksonUtils.toObj(new ByteArrayInputStream("30".getBytes()), BigDecimal.class));
assertEquals(Collections.singletonMap("key", "value"),
JacksonUtils.toObj(new ByteArrayInputStream("{\"key\":\"value\"}".getBytes()), Map.class));
assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")),
JacksonUtils.toObj(new ByteArrayInputStream("[{\"key\":\"value\"}]".getBytes()), List.class));
assertEquals(new TestOfAtomicObject(),
JacksonUtils.toObj(new ByteArrayInputStream("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}".getBytes()),
TestOfAtomicObject.class));
assertEquals(new TestOfDate(),
JacksonUtils.toObj(new ByteArrayInputStream("{\"date\":1626192000000}".getBytes()), TestOfDate.class));
assertEquals(new TestOfAccessModifier(),
JacksonUtils.toObj(new ByteArrayInputStream("{\"publicAccessModifier\":\"public\"}".getBytes()),
TestOfAccessModifier.class));
assertEquals(new TestOfGetter(),
JacksonUtils.toObj(new ByteArrayInputStream("{\"value\":\"value\",\"key\":\"key\"}".getBytes()),
TestOfGetter.class));
assertEquals(new TestOfAnnotationSub(), JacksonUtils.toObj((new ByteArrayInputStream(
("{\"@type\":\"JacksonUtilsTest$TestOfAnnotationSub\","
+ "\"date\":\"2021-07-14\",\"subField\":\"subField\",\"camelCase\":\"value\"}").getBytes())),
TestOfAnnotation.class));
}
/**
* JacksonUtils.toObj(InputStream, Class)
*/
@Test
void testToObject7() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj((ByteArrayInputStream) null, BigDecimal.class);
});
}
/**
* JacksonUtils.toObj(InputStream, Class)
*/
@Test
void testToObject8() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj(new ByteArrayInputStream("{not_A}Json:String}".getBytes()), Object.class);
});
}
/**
* JacksonUtils.toObj(byte[], TypeReference)
*/
@Test
void testToObject9() {
assertNull(JacksonUtils.toObj("null".getBytes(), new TypeReference<Object>() {
}));
assertEquals("string", JacksonUtils.toObj("\"string\"".getBytes(), new TypeReference<String>() {
}));
assertEquals(new BigDecimal(30), JacksonUtils.toObj("30".getBytes(), new TypeReference<BigDecimal>() {
}));
assertEquals(Collections.singletonMap("key", "value"),
JacksonUtils.toObj("{\"key\":\"value\"}".getBytes(), new TypeReference<Map<String, String>>() {
}));
assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")),
JacksonUtils.toObj("[{\"key\":\"value\"}]".getBytes(), new TypeReference<List<Map<String, String>>>() {
}));
assertEquals(new TestOfAtomicObject(), JacksonUtils.toObj("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}".getBytes(),
new TypeReference<TestOfAtomicObject>() {
}));
assertEquals(new TestOfDate(), JacksonUtils.toObj("{\"date\":1626192000000}".getBytes(), new TypeReference<TestOfDate>() {
}));
assertEquals(new TestOfAccessModifier(),
JacksonUtils.toObj("{\"publicAccessModifier\":\"public\"}".getBytes(), new TypeReference<TestOfAccessModifier>() {
}));
assertEquals(new TestOfGetter(),
JacksonUtils.toObj("{\"value\":\"value\",\"key\":\"key\"}".getBytes(), new TypeReference<TestOfGetter>() {
}));
assertEquals(new TestOfAnnotationSub(), JacksonUtils.toObj(
("{\"@type\":\"JacksonUtilsTest$TestOfAnnotationSub\",\"date\":\"2021-07-14\","
+ "\"subField\":\"subField\",\"camelCase\":\"value\"}").getBytes(),
new TypeReference<TestOfAnnotation>() {
}));
}
/**
* JacksonUtils.toObj(byte[], TypeReference)
*/
@Test
void testToObject10() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj("{not_A}Json:String}".getBytes(), new TypeReference<Object>() {
});
});
}
/**
* JacksonUtils.toObj(InputStream, Type)
*/
@Test
void testToObject11() {
assertEquals(Collections.singletonMap("key", "value"),
JacksonUtils.toObj(new ByteArrayInputStream("{\"key\":\"value\"}".getBytes()),
TypeUtils.parameterize(Map.class, String.class, String.class)));
assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")),
JacksonUtils.toObj(new ByteArrayInputStream("[{\"key\":\"value\"}]".getBytes()),
TypeUtils.parameterize(List.class, TypeUtils.parameterize(Map.class, String.class, String.class))));
}
/**
* JacksonUtils.toObj(InputStream, Type)
*/
@Test
void testToObject12() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj(new ByteArrayInputStream("{not_A}Json:String}".getBytes()),
TypeUtils.parameterize(Map.class, String.class, String.class));
});
}
/**
* JacksonUtils.toObj(InputStream, Type)
*/
@Test
void testToObject13() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj(new ByteArrayInputStream("{\"key\":\"value\"}".getBytes()), Object.class.getGenericSuperclass());
});
}
/**
* JacksonUtils.toObj(InputStream, Type)
*/
@Test
void testToObject14() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj((InputStream) null, Object.class.getGenericSuperclass());
});
}
/**
* JacksonUtils.toObj(String)
*/
@Test
void testToObject15() {
assertEquals("null", JacksonUtils.toObj("null").asText());
assertEquals("string", JacksonUtils.toObj("\"string\"").asText());
assertEquals(30, JacksonUtils.toObj("30").asInt());
assertEquals("value", JacksonUtils.toObj("{\"key\":\"value\"}").get("key").asText());
assertEquals("value", JacksonUtils.toObj("[{\"key\":\"value\"}]").get(0).get("key").asText());
JsonNode jsonNode = JacksonUtils.toObj("{\"aLong\":0,\"aInteger\":1,\"aBoolean\":false}");
assertEquals(0L, jsonNode.get("aLong").asLong());
assertEquals(1, jsonNode.get("aInteger").asInt());
}
/**
* JacksonUtils.toObj(String)
*/
@Test
void testToObject16() {
assertThrows(Exception.class, () -> {
JacksonUtils.toObj("{not_A}Json:String}");
});
}
@Test
void testRegisterSubtype() {
JacksonUtils.registerSubtype(TestOfChild.class, "JacksonUtilsTest$TestOfChild");
assertEquals(new TestOfChild(), JacksonUtils.toObj(
"{\"@type\":\"JacksonUtilsTest$TestOfChild\",\"parentField\":\"parentValue\"," + "\"childField\":\"childValue\"}",
TestOfParent.class));
}
@Test
void testCreateEmptyJsonNode() {
assertEquals("", JacksonUtils.createEmptyJsonNode().asText());
assertTrue(JacksonUtils.createEmptyJsonNode().isEmpty());
}
@Test
void testCreateEmptyArrayNode() {
assertEquals("", JacksonUtils.createEmptyJsonNode().asText());
assertEquals(0, JacksonUtils.createEmptyArrayNode().size());
assertTrue(JacksonUtils.createEmptyArrayNode().isEmpty());
}
@Test
void testTransferToJsonNode() {
JsonNode jsonNode1 = JacksonUtils.transferToJsonNode(Collections.singletonMap("key", "value"));
assertEquals("value", jsonNode1.get("key").asText());
JsonNode jsonNode2 = JacksonUtils.transferToJsonNode(new TestOfAtomicObject());
assertEquals("0", jsonNode2.get("aLong").asText());
assertEquals("1", jsonNode2.get("aInteger").asText());
assertEquals("false", jsonNode2.get("aBoolean").asText());
}
@Test
void testConstructJavaType() {
assertEquals("java.lang.String", JacksonUtils.constructJavaType(String.class).getRawClass().getName());
assertTrue(JacksonUtils.constructJavaType(String.class).isFinal());
}
@Test
void testToJsonBytes() {
Map<String, Object> map = new LinkedHashMap<String, Object>();
map.put("string", "你好,中国!");
map.put("integer", 999);
RestResult<Map<String, Object>> restResult = new RestResult();
restResult.setData(map);
byte[] bytes = JacksonUtils.toJsonBytes(restResult);
String jsonFromBytes = ByteUtils.toString(bytes);
assertTrue(jsonFromBytes.contains("\"code\":0"));
assertTrue(jsonFromBytes.contains("\"data\":{\"string\":\"你好,中国!\",\"integer\":999}"));
// old `toJsonBytes` method implementation:
// public static byte[] toJsonBytes(Object obj) {
// try {
// return ByteUtils.toBytes(mapper.writeValueAsString(obj));
// } catch (JsonProcessingException e) {
// throw new NacosSerializationException(obj.getClass(), e);
// }
// }
// here is a verification to compare with the old implementation
byte[] bytesFromOldImplementation = ByteUtils.toBytes(JacksonUtils.toJson(restResult));
String jsonFromBytesOldImplementation = new String(bytesFromOldImplementation, Charset.forName(Constants.ENCODE));
assertTrue(jsonFromBytesOldImplementation.contains("\"code\":0"));
assertTrue(jsonFromBytesOldImplementation.contains("\"data\":{\"string\":\"你好,中国!\",\"integer\":999}"));
}
@Test
void testToObjFromBytes() {
String json = "{\"code\":0,\"data\":{\"string\":\"你好,中国!\",\"integer\":999}}";
RestResult<Map<String, Object>> restResult = JacksonUtils.toObj(json, RestResult.class);
assertEquals(0, restResult.getCode());
assertEquals("你好,中国!", restResult.getData().get("string"));
assertEquals(999, restResult.getData().get("integer"));
restResult = JacksonUtils.toObj(json, new TypeReference<RestResult<Map<String, Object>>>() {
});
assertEquals(0, restResult.getCode());
assertEquals("你好,中国!", restResult.getData().get("string"));
assertEquals(999, restResult.getData().get("integer"));
}
@Test
void tesToObjForClassWithException() {
assertThrows(NacosDeserializationException.class, () -> {
JacksonUtils.toObj("aaa", JsonNode.class);
});
}
@Test
void tesToObjForTypeWithException() {
assertThrows(NacosDeserializationException.class, () -> {
JacksonUtils.toObj("aaa", TypeUtils.parameterize(JsonNode.class));
});
}
@Test
void tesToObjForTypeTypeReferenceWithException() {
assertThrows(NacosDeserializationException.class, () -> {
JacksonUtils.toObj("aaa", new TypeReference<JsonNode>() {
});
});
}
@JsonPropertyOrder({"aLong", "aInteger", "aBoolean"})
static
|
JacksonUtilsTest
|
java
|
elastic__elasticsearch
|
modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureClientProviderTests.java
|
{
"start": 1076,
"end": 4747
}
|
class ____ extends ESTestCase {
private static final AzureClientProvider.RequestMetricsHandler NOOP_HANDLER = (purpose, method, url, metrics) -> {};
private ThreadPool threadPool;
private AzureClientProvider azureClientProvider;
@Before
public void setUpThreadPool() {
threadPool = new TestThreadPool(
getTestName(),
AzureRepositoryPlugin.executorBuilder(Settings.EMPTY),
AzureRepositoryPlugin.nettyEventLoopExecutorBuilder(Settings.EMPTY)
);
azureClientProvider = AzureClientProvider.create(threadPool, Settings.EMPTY);
}
@After
public void tearDownThreadPool() {
azureClientProvider.close();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
public void testCanCreateAClientWithSecondaryLocation() {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1"));
final String endpoint;
if (randomBoolean()) {
endpoint = "ignored;BlobEndpoint=https://myaccount1.blob.core.windows.net;"
+ "BlobSecondaryEndpoint=https://myaccount1-secondary.blob.core.windows.net";
} else {
endpoint = "core.windows.net";
}
final Settings settings = Settings.builder()
.setSecureSettings(secureSettings)
.put("azure.client.azure1.endpoint_suffix", endpoint)
.build();
Map<String, AzureStorageSettings> clientSettings = AzureStorageSettings.load(settings);
AzureStorageSettings storageSettings = clientSettings.get("azure1");
assertNotNull(storageSettings);
LocationMode locationMode = LocationMode.SECONDARY_ONLY;
RequestRetryOptions requestRetryOptions = new RequestRetryOptions();
azureClientProvider.createClient(
storageSettings,
locationMode,
requestRetryOptions,
null,
NOOP_HANDLER,
randomFrom(OperationPurpose.values())
);
}
public void testCanNotCreateAClientWithSecondaryLocationWithoutAProperEndpoint() {
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1"));
final String endpoint = "ignored;BlobEndpoint=https://myaccount1.blob.core.windows.net";
final Settings settings = Settings.builder()
.setSecureSettings(secureSettings)
.put("azure.client.azure1.endpoint_suffix", endpoint)
.build();
Map<String, AzureStorageSettings> clientSettings = AzureStorageSettings.load(settings);
AzureStorageSettings storageSettings = clientSettings.get("azure1");
assertNotNull(storageSettings);
LocationMode locationMode = LocationMode.SECONDARY_ONLY;
RequestRetryOptions requestRetryOptions = new RequestRetryOptions();
expectThrows(
IllegalArgumentException.class,
() -> azureClientProvider.createClient(
storageSettings,
locationMode,
requestRetryOptions,
null,
NOOP_HANDLER,
randomFrom(OperationPurpose.values())
)
);
}
private static String encodeKey(final String value) {
return Base64.getEncoder().encodeToString(value.getBytes(StandardCharsets.UTF_8));
}
}
|
AzureClientProviderTests
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java
|
{
"start": 11724,
"end": 11963
}
|
interface ____ extends Reader {
/**
* Reads the values of the given document into the builder.
*/
void read(int docId, StoredFields storedFields, Builder builder) throws IOException;
}
|
RowStrideReader
|
java
|
elastic__elasticsearch
|
x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordPainlessExtension.java
|
{
"start": 728,
"end": 1448
}
|
class ____ implements PainlessExtension {
private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles(
ConstantKeywordPainlessExtension.class,
"org.elasticsearch.xpack.constantkeyword.txt"
);
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
List<Whitelist> whitelist = singletonList(WHITELIST);
Map<ScriptContext<?>, List<Whitelist>> contextWhitelists = Maps.newMapWithExpectedSize(CORE_CONTEXTS.size());
for (ScriptContext<?> scriptContext : CORE_CONTEXTS.values()) {
contextWhitelists.put(scriptContext, whitelist);
}
return contextWhitelists;
}
}
|
ConstantKeywordPainlessExtension
|
java
|
elastic__elasticsearch
|
qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java
|
{
"start": 1507,
"end": 5666
}
|
enum ____ {
File,
Directory
}
public static final Set<PosixFilePermission> p444 = fromString("r--r--r--");
public static final Set<PosixFilePermission> p555 = fromString("r-xr-xr-x");
public static final Set<PosixFilePermission> p600 = fromString("rw-------");
public static final Set<PosixFilePermission> p644 = fromString("rw-r--r--");
public static final Set<PosixFilePermission> p660 = fromString("rw-rw----");
public static final Set<PosixFilePermission> p664 = fromString("rw-rw-r--");
public static final Set<PosixFilePermission> p750 = fromString("rwxr-x---");
public static final Set<PosixFilePermission> p755 = fromString("rwxr-xr-x");
public static final Set<PosixFilePermission> p770 = fromString("rwxrwx---");
public static final Set<PosixFilePermission> p775 = fromString("rwxrwxr-x");
protected final Fileness fileness;
protected final String owner;
protected final String group;
protected final Set<PosixFilePermission> posixPermissions;
protected String mismatch;
public FileMatcher(Fileness fileness, String owner, String group, Set<PosixFilePermission> posixPermissions) {
this.fileness = Objects.requireNonNull(fileness);
this.owner = owner;
this.group = group;
this.posixPermissions = posixPermissions;
}
@Override
protected boolean matchesSafely(Path path) {
if (Files.exists(path) == false) {
mismatch = "Does not exist";
return false;
}
if (Platforms.WINDOWS) {
final BasicFileAttributes attributes = getBasicFileAttributes(path);
if (fileness.equals(Fileness.Directory) != attributes.isDirectory()) {
mismatch = "Is " + (attributes.isDirectory() ? "a directory" : "a file");
return false;
}
if (owner != null) {
final String attributeViewOwner = getFileOwner(path);
if (attributeViewOwner.contains(owner) == false) {
mismatch = "Owned by " + attributeViewOwner;
return false;
}
}
} else {
final PosixFileAttributes attributes = getPosixFileAttributes(path);
if (fileness.equals(Fileness.Directory) != attributes.isDirectory()) {
mismatch = "Is " + (attributes.isDirectory() ? "a directory" : "a file");
return false;
}
if (owner != null && owner.equals(attributes.owner().getName()) == false) {
mismatch = "Owned by " + attributes.owner().getName();
return false;
}
if (group != null && group.equals(attributes.group().getName()) == false) {
mismatch = "Owned by group " + attributes.group().getName();
return false;
}
if (posixPermissions != null && posixPermissions.equals(attributes.permissions()) == false) {
mismatch = "Has permissions " + attributes.permissions();
return false;
}
}
return true;
}
@Override
public void describeMismatchSafely(Path path, Description description) {
description.appendText("path ").appendValue(path);
if (mismatch != null) {
description.appendText(mismatch);
}
}
@Override
public void describeTo(Description description) {
description.appendValue("file/directory: ")
.appendValue(fileness)
.appendText(" with owner ")
.appendValue(owner)
.appendText(" with group ")
.appendValue(group)
.appendText(" with posix permissions ")
.appendValueList("[", ",", "]", posixPermissions);
}
public static FileMatcher file(Fileness fileness, String owner) {
return file(fileness, owner, null, null);
}
public static FileMatcher file(Fileness fileness, String owner, String group, Set<PosixFilePermission> permissions) {
return new FileMatcher(fileness, owner, group, permissions);
}
}
|
Fileness
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncTaskIndexService.java
|
{
"start": 3902,
"end": 27004
}
|
class ____<R extends AsyncResponse<R>> {
private static final Logger logger = LogManager.getLogger(AsyncTaskIndexService.class);
public static final String HEADERS_FIELD = "headers";
public static final String RESPONSE_HEADERS_FIELD = "response_headers";
public static final String EXPIRATION_TIME_FIELD = "expiration_time";
public static final String RESULT_FIELD = "result";
private static final int ASYNC_TASK_INDEX_MAPPINGS_VERSION = 0;
// Usually the settings, mappings and system index descriptor below
// would be co-located with the SystemIndexPlugin implementation,
// however in this case this service is in a different project to
// AsyncResultsIndexPlugin, as are tests that need access to
// #settings().
static Settings settings() {
return Settings.builder()
.put("index.codec", "best_compression")
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.build();
}
private static XContentBuilder mappings() {
try {
XContentBuilder builder = jsonBuilder().startObject()
.startObject(SINGLE_MAPPING_NAME)
.startObject("_meta")
.field("version", Version.CURRENT)
.field(SystemIndexDescriptor.VERSION_META_KEY, ASYNC_TASK_INDEX_MAPPINGS_VERSION)
.endObject()
.field("dynamic", "strict")
.startObject("properties")
.startObject(HEADERS_FIELD)
.field("type", "object")
.field("enabled", "false")
.endObject()
.startObject(RESPONSE_HEADERS_FIELD)
.field("type", "object")
.field("enabled", "false")
.endObject()
.startObject(RESULT_FIELD)
.field("type", "object")
.field("enabled", "false")
.endObject()
.startObject(EXPIRATION_TIME_FIELD)
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject();
return builder;
} catch (IOException e) {
throw new UncheckedIOException("Failed to build mappings for " + XPackPlugin.ASYNC_RESULTS_INDEX, e);
}
}
public static SystemIndexDescriptor getSystemIndexDescriptor() {
return SystemIndexDescriptor.builder()
.setIndexPattern(XPackPlugin.ASYNC_RESULTS_INDEX + "*")
.setDescription("Async search results")
.setPrimaryIndex(XPackPlugin.ASYNC_RESULTS_INDEX)
.setMappings(mappings())
.setSettings(settings())
.setOrigin(ASYNC_SEARCH_ORIGIN)
.build();
}
private final String index;
private final ThreadContext threadContext;
private final Client client;
final AsyncSearchSecurity security;
private final Client clientWithOrigin;
private final NamedWriteableRegistry registry;
private final Writeable.Reader<R> reader;
private final BigArrays bigArrays;
private volatile long maxResponseSize;
private final ClusterService clusterService;
private final CircuitBreaker circuitBreaker;
public AsyncTaskIndexService(
String index,
ClusterService clusterService,
ThreadContext threadContext,
Client client,
String origin,
Writeable.Reader<R> reader,
NamedWriteableRegistry registry,
BigArrays bigArrays
) {
this.index = index;
this.threadContext = threadContext;
this.client = client;
this.security = new AsyncSearchSecurity(
index,
new SecurityContext(clusterService.getSettings(), client.threadPool().getThreadContext()),
client,
origin
);
this.clientWithOrigin = new OriginSettingClient(client, origin);
this.registry = registry;
this.reader = reader;
this.bigArrays = bigArrays;
this.maxResponseSize = MAX_ASYNC_SEARCH_RESPONSE_SIZE_SETTING.get(clusterService.getSettings()).getBytes();
clusterService.getClusterSettings()
.addSettingsUpdateConsumer(MAX_ASYNC_SEARCH_RESPONSE_SIZE_SETTING, (v) -> maxResponseSize = v.getBytes());
this.clusterService = clusterService;
this.circuitBreaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST);
}
/**
* Returns the internal client wrapped with the async user origin.
*/
public Client getClientWithOrigin() {
return clientWithOrigin;
}
/**
* Returns the internal client.
*/
public Client getClient() {
return client;
}
public AsyncSearchSecurity getSecurity() {
return security;
}
/**
* Stores the initial response with the original headers of the authenticated user
* and the expected expiration time.
* Currently for EQL we don't set limit for a stored async response
* TODO: add limit for stored async response in EQL, and instead of this method use createResponse
*/
public void createResponseForEQL(String docId, Map<String, String> headers, R response, ActionListener<DocWriteResponse> listener) {
indexResponse(docId, headers, null, response, false, listener);
}
public void createResponseForEQL(
String docId,
Map<String, String> headers,
Map<String, List<String>> responseHeaders,
R response,
ActionListener<DocWriteResponse> listener
) {
indexResponse(docId, headers, responseHeaders, response, false, listener);
}
/**
* Stores the initial response with the original headers of the authenticated user
* and the expected expiration time.
*/
public void createResponse(String docId, Map<String, String> headers, R response, ActionListener<DocWriteResponse> listener) {
indexResponse(docId, headers, null, response, true, listener);
}
public void updateResponse(
String docId,
Map<String, List<String>> responseHeaders,
R response,
ActionListener<UpdateResponse> listener
) {
updateResponse(docId, responseHeaders, response, listener, false);
}
private void indexResponse(
String docId,
Map<String, String> headers,
@Nullable Map<String, List<String>> responseHeaders,
R response,
boolean limitToMaxResponseSize,
ActionListener<DocWriteResponse> listener
) {
try {
var buffer = allocateBuffer(limitToMaxResponseSize);
listener = ActionListener.runBefore(listener, buffer::close);
final XContentBuilder source = jsonBuilder(buffer).startObject()
.field(HEADERS_FIELD, headers)
.field(EXPIRATION_TIME_FIELD, response.getExpirationTime());
if (responseHeaders != null) {
source.field(RESPONSE_HEADERS_FIELD, responseHeaders);
}
addResultFieldAndFinish(response, source);
clientWithOrigin.index(new IndexRequest(index).create(true).id(docId).source(buffer.bytes(), source.contentType()), listener);
} catch (Exception e) {
listener.onFailure(e);
}
}
/**
* Stores the final response if the place-holder document is still present (update).
*/
private void updateResponse(
String docId,
Map<String, List<String>> responseHeaders,
R response,
ActionListener<UpdateResponse> listener,
boolean isFailure
) {
ReleasableBytesStreamOutput buffer = null;
try {
buffer = allocateBuffer(isFailure == false);
final XContentBuilder source = jsonBuilder(buffer).startObject().field(RESPONSE_HEADERS_FIELD, responseHeaders);
addResultFieldAndFinish(response, source);
clientWithOrigin.update(
new UpdateRequest().index(index).id(docId).doc(buffer.bytes(), source.contentType()).retryOnConflict(5),
ActionListener.runBefore(listener, buffer::close)
);
} catch (Exception e) {
// release buffer right away to save memory, particularly in case the exception came from the circuit breaker
Releasables.close(buffer);
// even if we expect updating with a failure always succeed
// this is just an extra precaution not to create infinite loops
if (isFailure) {
listener.onFailure(e);
} else {
Throwable cause = ExceptionsHelper.unwrapCause(e);
if (cause instanceof DocumentMissingException == false && cause instanceof VersionConflictEngineException == false) {
logger.error(() -> "failed to store async-search [" + docId + "]", e);
// at end, we should report a failure to the listener
updateResponse(
docId,
responseHeaders,
response.convertToFailure(e),
ActionListener.running(() -> listener.onFailure(e)),
true
);
} else {
listener.onFailure(e);
}
}
}
}
private ReleasableBytesStreamOutput allocateBuffer(boolean limitToMaxResponseSize) {
return limitToMaxResponseSize
? new ReleasableBytesStreamOutputWithLimit(0, bigArrays.withCircuitBreaking(), maxResponseSize)
: new ReleasableBytesStreamOutput(0, bigArrays.withCircuitBreaking());
}
private void addResultFieldAndFinish(Writeable response, XContentBuilder source) throws IOException {
source.directFieldAsBase64(RESULT_FIELD, os -> {
// do not close the output
os = Streams.noCloseStream(os);
TransportVersion minNodeVersion = clusterService.state().getMinTransportVersion();
TransportVersion.writeVersion(minNodeVersion, new OutputStreamStreamOutput(os));
os = CompressorFactory.COMPRESSOR.threadLocalOutputStream(os);
try (OutputStreamStreamOutput out = new OutputStreamStreamOutput(os)) {
out.setTransportVersion(minNodeVersion);
response.writeTo(out);
}
}).endObject();
// do not close the buffer or the XContentBuilder until the request is completed (i.e., listener is notified);
// otherwise, we underestimate the memory usage in case the circuit breaker does not use the real memory usage.
source.flush();
}
/**
* Updates the expiration time of the provided <code>docId</code> if the place-holder
* document is still present (update).
*/
public void updateExpirationTime(String docId, long expirationTimeMillis, ActionListener<UpdateResponse> listener) {
Map<String, Object> source = Collections.singletonMap(EXPIRATION_TIME_FIELD, expirationTimeMillis);
UpdateRequest request = new UpdateRequest().index(index).id(docId).doc(source, XContentType.JSON).retryOnConflict(5);
clientWithOrigin.update(request, listener);
}
/**
* Deletes the provided <code>asyncTaskId</code> from the index if present.
*/
public void deleteResponse(AsyncExecutionId asyncExecutionId, ActionListener<DeleteResponse> listener) {
try {
DeleteRequest request = new DeleteRequest(index).id(asyncExecutionId.getDocId());
clientWithOrigin.delete(request, listener);
} catch (Exception e) {
listener.onFailure(e);
}
}
/**
* Returns the {@link AsyncTask} if the provided <code>asyncTaskId</code>
* is registered in the task manager, <code>null</code> otherwise.
*/
public static <T extends AsyncTask> T getTask(TaskManager taskManager, AsyncExecutionId asyncExecutionId, Class<T> tClass)
throws IOException {
Task task = taskManager.getTask(asyncExecutionId.getTaskId().getId());
if (tClass.isInstance(task) == false) {
return null;
}
@SuppressWarnings("unchecked")
T asyncTask = (T) task;
if (asyncTask.getExecutionId().equals(asyncExecutionId) == false) {
return null;
}
return asyncTask;
}
/**
* Returns the {@link AsyncTask} if the provided <code>asyncTaskId</code>
* is registered in the task manager, <code>null</code> otherwise.
*
* This method throws a {@link ResourceNotFoundException} if the authenticated user
* is not the creator of the original task.
*/
public <T extends AsyncTask> T getTaskAndCheckAuthentication(
TaskManager taskManager,
AsyncExecutionId asyncExecutionId,
Class<T> tClass
) throws IOException {
return getTaskAndCheckAuthentication(taskManager, security, asyncExecutionId, tClass);
}
/**
* Returns the {@link AsyncTask} if the provided <code>asyncTaskId</code>
* is registered in the task manager, <code>null</code> otherwise.
*
* This method throws a {@link ResourceNotFoundException} if the authenticated user
* is not the creator of the original task.
*/
public static <T extends AsyncTask> T getTaskAndCheckAuthentication(
TaskManager taskManager,
AsyncSearchSecurity security,
AsyncExecutionId asyncExecutionId,
Class<T> tClass
) throws IOException {
T asyncTask = getTask(taskManager, asyncExecutionId, tClass);
if (asyncTask == null) {
return null;
}
// Check authentication for the user
if (false == security.currentUserHasAccessToTask(asyncTask)) {
throw new ResourceNotFoundException(asyncExecutionId.getEncoded() + " not found");
}
return asyncTask;
}
/**
* Gets the response from the index if present, or delegate a {@link ResourceNotFoundException}
* failure to the provided listener if not.
* When the provided <code>restoreResponseHeaders</code> is <code>true</code>, this method also restores the
* response headers of the original request in the current thread context.
*/
public void getResponse(AsyncExecutionId asyncExecutionId, boolean restoreResponseHeaders, ActionListener<R> listener) {
getResponseFromIndex(asyncExecutionId, restoreResponseHeaders, true, listener);
}
private void getResponseFromIndex(
AsyncExecutionId asyncExecutionId,
boolean restoreResponseHeaders,
boolean checkAuthentication,
ActionListener<R> outerListener
) {
final GetRequest getRequest = new GetRequest(index).preference(asyncExecutionId.getEncoded())
.id(asyncExecutionId.getDocId())
.realtime(true);
clientWithOrigin.get(getRequest, outerListener.delegateFailure((listener, getResponse) -> {
if (getResponse.isExists() == false) {
listener.onFailure(new ResourceNotFoundException(asyncExecutionId.getEncoded()));
return;
}
final R resp;
try {
final BytesReference source = getResponse.getSourceInternal();
// reserve twice memory of the source length: one for the internal XContent parser and one for the response
final long reservedBytes = source.length() * 2L;
circuitBreaker.addEstimateBytesAndMaybeBreak(reservedBytes, "decode async response");
listener = ActionListener.runAfter(listener, () -> circuitBreaker.addWithoutBreaking(-reservedBytes));
resp = parseResponseFromIndex(asyncExecutionId, source, restoreResponseHeaders, checkAuthentication);
} catch (Exception e) {
listener.onFailure(e);
return;
}
ActionListener.respondAndRelease(listener, resp);
}));
}
private R parseResponseFromIndex(
AsyncExecutionId asyncExecutionId,
BytesReference source,
boolean restoreResponseHeaders,
boolean checkAuthentication
) {
try (
XContentParser parser = XContentHelper.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
source,
XContentType.JSON
)
) {
ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser);
R resp = null;
Long expirationTime = null;
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
parser.nextToken();
switch (parser.currentName()) {
case RESULT_FIELD -> resp = decodeResponse(parser.charBuffer());
case EXPIRATION_TIME_FIELD -> expirationTime = (long) parser.numberValue();
case HEADERS_FIELD -> {
@SuppressWarnings("unchecked")
final Map<String, String> headers = (Map<String, String>) XContentParserUtils.parseFieldsValue(parser);
// check the authentication of the current user against the user that initiated the async task
if (checkAuthentication && false == security.currentUserHasAccessToTaskWithHeaders(headers)) {
throw new ResourceNotFoundException(asyncExecutionId.getEncoded());
}
}
case RESPONSE_HEADERS_FIELD -> {
@SuppressWarnings("unchecked")
final Map<String, List<String>> responseHeaders = (Map<String, List<String>>) XContentParserUtils.parseFieldsValue(
parser
);
if (restoreResponseHeaders) {
restoreResponseHeadersContext(threadContext, responseHeaders);
}
}
default -> XContentParserUtils.parseFieldsValue(parser); // consume and discard unknown fields
}
}
Objects.requireNonNull(resp, "Get result doesn't include [" + RESULT_FIELD + "] field");
Objects.requireNonNull(expirationTime, "Get result doesn't include [" + EXPIRATION_TIME_FIELD + "] field");
try {
return resp.withExpirationTime(expirationTime);
} finally {
resp.decRef();
}
} catch (IOException e) {
throw new ElasticsearchParseException("Failed to parse the get result", e);
}
}
/**
* Retrieve the status of the async search or async or stored eql search.
* Retrieve from the task if the task is still available or from the index.
*/
public <T extends AsyncTask, SR extends SearchStatusResponse> void retrieveStatus(
GetAsyncStatusRequest request,
TaskManager taskManager,
Class<T> tClass,
Function<T, SR> statusProducerFromTask,
TriFunction<R, Long, String, SR> statusProducerFromIndex,
ActionListener<SR> originalListener
) {
// check if the result has expired
final ActionListener<SR> outerListener = originalListener.delegateFailure((listener, resp) -> {
if (resp.getExpirationTime() < System.currentTimeMillis()) {
listener.onFailure(new ResourceNotFoundException(request.getId()));
} else {
listener.onResponse(resp);
}
});
security.currentUserCanSeeStatusOfAllSearches(ActionListener.wrap(canSeeAll -> {
AsyncExecutionId asyncExecutionId = AsyncExecutionId.decode(request.getId());
try {
T asyncTask = getTask(taskManager, asyncExecutionId, tClass);
if (asyncTask != null) { // get status response from task
if (canSeeAll || security.currentUserHasAccessToTask(asyncTask)) {
var response = statusProducerFromTask.apply(asyncTask);
outerListener.onResponse(response);
} else {
outerListener.onFailure(new ResourceNotFoundException(request.getId()));
}
} else {
// get status response from index
final boolean checkAuthentication = canSeeAll == false;
getResponseFromIndex(
asyncExecutionId,
false,
checkAuthentication,
outerListener.map(
resp -> statusProducerFromIndex.apply(resp, resp.getExpirationTime(), asyncExecutionId.getEncoded())
)
);
}
} catch (Exception exc) {
outerListener.onFailure(exc);
}
}, outerListener::onFailure));
}
/**
* Decode the provided base-64 bytes into a {@link AsyncSearchResponse}.
*/
private R decodeResponse(CharBuffer encodedBuffer) throws IOException {
InputStream encodedIn = Base64.getDecoder().wrap(new InputStream() {
@Override
public int read() {
if (encodedBuffer.hasRemaining()) {
return encodedBuffer.get();
} else {
return -1; // end of stream
}
}
});
TransportVersion version = TransportVersion.readVersion(new InputStreamStreamInput(encodedIn));
assert version.onOrBefore(TransportVersion.current()) : version + " >= " + TransportVersion.current();
if (TransportVersion.isCompatible(version) == false) {
throw new IllegalArgumentException(
"Unable to retrieve async search results. Stored results were created with an incompatible version of Elasticsearch."
);
}
final StreamInput input;
input = CompressorFactory.COMPRESSOR.threadLocalStreamInput(encodedIn);
try (StreamInput in = new NamedWriteableAwareStreamInput(input, registry)) {
in.setTransportVersion(version);
return reader.read(in);
}
}
/**
* Restores the provided <code>responseHeaders</code> to the current thread context.
*/
public static void restoreResponseHeadersContext(ThreadContext threadContext, Map<String, List<String>> responseHeaders) {
for (Map.Entry<String, List<String>> entry : responseHeaders.entrySet()) {
for (String value : entry.getValue()) {
threadContext.addResponseHeader(entry.getKey(), value);
}
}
}
private static
|
AsyncTaskIndexService
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithFinalInterceptedClassTest.java
|
{
"start": 1964,
"end": 2133
}
|
class ____ {
@Produces
MyNonbean produce(InterceptionProxy<MyNonbean> proxy) {
return proxy.create(new MyNonbean());
}
}
}
|
MyProducer
|
java
|
resilience4j__resilience4j
|
resilience4j-micrometer/src/main/java/io/github/resilience4j/micrometer/tagged/TaggedTimeLimiterMetrics.java
|
{
"start": 1013,
"end": 3109
}
|
class ____ extends AbstractTimeLimiterMetrics implements MeterBinder {
private final TimeLimiterRegistry timeLimiterRegistry;
private TaggedTimeLimiterMetrics(TimeLimiterMetricNames names, TimeLimiterRegistry timeLimiterRegistry) {
super(names);
this.timeLimiterRegistry = requireNonNull(timeLimiterRegistry);
}
/**
* Creates a new binder that uses given {@code registry} as source of time limiters.
*
* @param timeLimiterRegistry the source of time limiters
* @return The {@link TaggedTimeLimiterMetrics} instance.
*/
public static TaggedTimeLimiterMetrics ofTimeLimiterRegistry(
TimeLimiterRegistry timeLimiterRegistry) {
return new TaggedTimeLimiterMetrics(TimeLimiterMetricNames.ofDefaults(), timeLimiterRegistry);
}
/**
* Creates a new binder that uses given {@code registry} as source of time limiters.
*
* @param names custom metric names
* @param timeLimiterRegistry the source of time limiters
* @return The {@link TaggedTimeLimiterMetrics} instance.
*/
public static TaggedTimeLimiterMetrics ofTimeLimiterRegistry(TimeLimiterMetricNames names,
TimeLimiterRegistry timeLimiterRegistry) {
return new TaggedTimeLimiterMetrics(names, timeLimiterRegistry);
}
@Override
public void bindTo(MeterRegistry registry) {
for (TimeLimiter timeLimiter : timeLimiterRegistry.getAllTimeLimiters()) {
addMetrics(registry, timeLimiter);
}
timeLimiterRegistry.getEventPublisher()
.onEntryAdded(event -> addMetrics(registry, event.getAddedEntry()));
timeLimiterRegistry.getEventPublisher()
.onEntryRemoved(event -> removeMetrics(registry, event.getRemovedEntry().getName()));
timeLimiterRegistry.getEventPublisher().onEntryReplaced(event -> {
removeMetrics(registry, event.getOldEntry().getName());
addMetrics(registry, event.getNewEntry());
});
}
}
|
TaggedTimeLimiterMetrics
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/LimitedPollingConsumerPollStrategyTest.java
|
{
"start": 1332,
"end": 8016
}
|
class ____ extends ContextTestSupport {
private LimitedPollingConsumerPollStrategy strategy;
@Test
public void testLimitedPollingConsumerPollStrategy() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(3);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
consumer.start();
assertTrue(consumer.isStarted(), "Should be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
consumer.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
consumer.stop();
}
@Test
public void testLimitAtTwoLimitedPollingConsumerPollStrategy() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(2);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
consumer.start();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
consumer.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
consumer.stop();
}
@Test
public void testLimitedPollingConsumerPollStrategySuccess() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(3);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
consumer.start();
assertTrue(consumer.isStarted(), "Should be started");
consumer.run();
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
// now force success
consumer.setExceptionToThrowOnPoll(null);
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.stop();
}
@Test
public void testLimitedPollingConsumerPollStrategySuccessThenFail() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(3);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
consumer.start();
// fail 2 times
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
// now force success 2 times
consumer.setExceptionToThrowOnPoll(null);
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
// now fail again, after hitting limit at 3
consumer.setExceptionToThrowOnPoll(expectedException);
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
consumer.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
consumer.stop();
}
@Test
public void testTwoConsumersLimitedPollingConsumerPollStrategy() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(3);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
MockScheduledPollConsumer consumer2 = new MockScheduledPollConsumer(endpoint, null);
consumer2.setPollStrategy(strategy);
consumer.start();
consumer2.start();
consumer.run();
consumer2.run();
assertTrue(consumer.isStarted(), "Should still be started");
assertTrue(consumer2.isStarted(), "Should still be started");
consumer.run();
consumer2.run();
assertTrue(consumer.isStarted(), "Should still be started");
assertTrue(consumer2.isStarted(), "Should still be started");
consumer.run();
consumer2.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
assertTrue(consumer2.isStarted(), "Should still be started");
consumer.stop();
consumer2.stop();
}
@Test
public void testRestartManuallyLimitedPollingConsumerPollStrategy() {
Exception expectedException = new Exception("Hello");
strategy = new LimitedPollingConsumerPollStrategy();
strategy.setLimit(3);
final Endpoint endpoint = getMockEndpoint("mock:foo");
MockScheduledPollConsumer consumer = new MockScheduledPollConsumer(endpoint, expectedException);
consumer.setPollStrategy(strategy);
consumer.start();
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
consumer.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
// now start the consumer again
ServiceHelper.resumeService(consumer);
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
consumer.run();
assertTrue(consumer.isSuspended(), "Should be suspended");
// now start the consumer again
ServiceHelper.resumeService(consumer);
// and let it succeed
consumer.setExceptionToThrowOnPoll(null);
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.run();
assertTrue(consumer.isStarted(), "Should still be started");
consumer.stop();
}
}
|
LimitedPollingConsumerPollStrategyTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/type/LongNullTest.java
|
{
"start": 856,
"end": 1927
}
|
class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void testNull(SessionFactoryScope scope) {
final var expected = scope.fromTransaction( s -> {
final var foo = new Foo();
s.persist( foo );
return foo;
} );
final var actual = scope.fromSession( s -> s.find( Foo.class, expected.id ) );
assertEquals( expected.field, actual.field );
assertEquals( expected.nfield, actual.nfield );
assertArrayEquals( expected.bfield, actual.bfield );
}
@Test
public void testNonNull(SessionFactoryScope scope) {
final var expected = scope.fromTransaction( s -> {
final var foo = new Foo();
foo.bfield = "ABC".getBytes();
foo.field = "DEF";
foo.nfield = "GHI";
s.persist( foo );
return foo;
} );
final var actual = scope.fromSession( s -> s.find( Foo.class, expected.id ) );
assertEquals( expected.field, actual.field );
assertEquals( expected.nfield, actual.nfield );
assertArrayEquals( expected.bfield, actual.bfield );
}
@Entity
public static final
|
LongNullTest
|
java
|
alibaba__nacos
|
sys/src/main/java/com/alibaba/nacos/sys/file/FileWatcher.java
|
{
"start": 907,
"end": 1582
}
|
class ____ {
/**
* Triggered when a file change occurs.
*
* @param event {@link FileChangeEvent}
*/
public abstract void onChange(FileChangeEvent event);
/**
* WatchEvent context information.
*
* @param context {@link WatchEvent#context()}
* @return is this watcher interest context
*/
public abstract boolean interest(String context);
/**
* If the FileWatcher has its own thread pool, use this thread pool to execute, otherwise use the WatchFileManager
* thread.
*
* @return {@link Executor}
*/
public Executor executor() {
return null;
}
}
|
FileWatcher
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java
|
{
"start": 4400,
"end": 181591
}
|
class ____ extends ESTestCase {
private IndexNameExpressionResolver indexNameExpressionResolver;
private ThreadContext threadContext;
private long epochMillis;
private ThreadContext createThreadContext() {
return new ThreadContext(Settings.EMPTY);
}
protected IndexNameExpressionResolver createIndexNameExpressionResolver(ThreadContext threadContext) {
return TestIndexNameExpressionResolver.newInstance(threadContext);
}
@Override
public void setUp() throws Exception {
super.setUp();
threadContext = createThreadContext();
indexNameExpressionResolver = createIndexNameExpressionResolver(threadContext);
epochMillis = randomLongBetween(1580536800000L, 1583042400000L);
}
public void testConcreteIndexNamesStrictExpand() {
final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foofoo-closed").state(State.CLOSE))
.put(indexBuilder("foofoo").putAlias(AliasMetadata.builder("barbaz")))
.build();
for (IndicesOptions options : List.of(IndicesOptions.strictExpandOpen(), IndicesOptions.strictExpand())) {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
randomFrom(SystemIndexAccessLevel.values())
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "bar")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo", "foobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")), new HashSet<>(Arrays.asList(results)));
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "bar")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "foobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "bar")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
results = indexNameExpressionResolver.concreteIndexNames(context, "baz*");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
Matcher<String[]> expectedResults = options == IndicesOptions.strictExpandOpen()
? arrayContainingInAnyOrder("foo", "foobar", "foofoo")
: arrayContainingInAnyOrder("foo", "foobar", "foofoo", "foofoo-closed");
assertThat(results, expectedResults);
results = indexNameExpressionResolver.concreteIndexNames(context, (String[]) null);
assertThat(results, expectedResults);
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*");
assertThat(results, expectedResults);
}
}
public void testConcreteIndexNamesLenientExpand() {
final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foofoo-closed").state(State.CLOSE))
.put(indexBuilder("foofoo").putAlias(AliasMetadata.builder("barbaz")))
.build();
for (IndicesOptions options : List.of(IndicesOptions.lenientExpandOpen(), IndicesOptions.lenientExpand())) {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
randomFrom(SystemIndexAccessLevel.values())
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo", "foobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(2, results.length);
assertEquals(new HashSet<>(Arrays.asList("foo", "foobar")), new HashSet<>(Arrays.asList(results)));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "bar");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder("foo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "foobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foofoo", "foobar"));
results = indexNameExpressionResolver.concreteIndexNames(context, "barbaz", "bar");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder("foofoo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "baz*");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
Matcher<String[]> expectedResults = options == IndicesOptions.lenientExpandOpen()
? arrayContainingInAnyOrder("foo", "foobar", "foofoo")
: arrayContainingInAnyOrder("foo", "foobar", "foofoo", "foofoo-closed");
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertThat(results, expectedResults);
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoo*");
assertThat(results, expectedResults);
}
}
public void testConcreteIndexNamesIgnoreUnavailableDisallowEmpty() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo"))
.put(indexBuilder("foobar"))
.put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE))
.put(indexBuilder("foofoo").putAlias(AliasMetadata.builder("barbaz")))
.build();
IndicesOptions expandOpen = IndicesOptions.fromOptions(true, false, true, false);
IndicesOptions expand = IndicesOptions.fromOptions(true, false, true, true);
for (IndicesOptions options : List.of(expandOpen, expand)) {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
randomFrom(SystemIndexAccessLevel.values())
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "bar")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
Matcher<String[]> expectedResults = options == expandOpen
? arrayContainingInAnyOrder("foo", "foobar", "foofoo")
: arrayContainingInAnyOrder("foo", "foobar", "foofoo", "foofoo-closed");
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertThat(results, expectedResults);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertThat(results, expectedResults);
}
}
public void testConcreteIndexNamesExpandWildcards() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo").state(IndexMetadata.State.CLOSE))
.put(indexBuilder("bar"))
.put(indexBuilder("foobar").putAlias(AliasMetadata.builder("barbaz")))
.put(indexBuilder("hidden", Settings.builder().put("index.hidden", true).build()))
.put(indexBuilder(".hidden", Settings.builder().put("index.hidden", true).build()))
.put(indexBuilder(".hidden-closed", Settings.builder().put("index.hidden", true).build()).state(IndexMetadata.State.CLOSE))
.put(indexBuilder("hidden-closed", Settings.builder().put("index.hidden", true).build()).state(IndexMetadata.State.CLOSE))
.build();
// Only closed
IndicesOptions options = IndicesOptions.fromOptions(false, true, false, true, false);
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
// no wildcards, so wildcard expansion don't apply
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
// implicit hidden for dot indices based on wildcard starting with .
results = indexNameExpressionResolver.concreteIndexNames(context, ".*");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, ".hidd*");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden-closed"));
// Only open
options = IndicesOptions.fromOptions(false, true, true, false, false);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "foobar"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(1, results.length);
assertEquals("foobar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
// implicit hidden for dot indices based on wildcard starting with .
results = indexNameExpressionResolver.concreteIndexNames(context, ".*");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden"));
results = indexNameExpressionResolver.concreteIndexNames(context, ".hidd*");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden"));
// Open and closed
options = IndicesOptions.fromOptions(false, true, true, true, false);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(3, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "foobar", "foo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foobar", "foo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo*");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo", "-foobar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo", "*");
assertEquals(3, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "foobar", "foo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "-*");
assertEquals(0, results.length);
// implicit hidden for dot indices based on wildcard starting with .
results = indexNameExpressionResolver.concreteIndexNames(context, ".*");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, ".hidd*");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder(".hidden", ".hidden-closed"));
// open closed and hidden
options = IndicesOptions.fromOptions(false, true, true, true, true);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(7, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "foobar", "foo", "hidden", "hidden-closed", ".hidden", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foobar", "foo"));
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo*");
assertEquals(5, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "hidden", "hidden-closed", ".hidden", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo", "-foobar");
assertEquals(5, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "hidden", "hidden-closed", ".hidden", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "*", "-foo", "-foobar", "-hidden*");
assertEquals(3, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", ".hidden", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "hidden*");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("hidden", "hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "hidden");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder("hidden"));
results = indexNameExpressionResolver.concreteIndexNames(context, "hidden-closed");
assertEquals(1, results.length);
assertThat(results, arrayContainingInAnyOrder("hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "-*");
assertEquals(0, results.length);
options = IndicesOptions.fromOptions(false, false, true, true, true);
IndexNameExpressionResolver.Context context2 = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context2, "-*")
);
assertThat(infe.getResourceId().toString(), equalTo("[-*]"));
infe = expectThrows(
IndexNotFoundException.class,
// throws error because "-foobar" was not covered by a wildcard that included it
() -> indexNameExpressionResolver.concreteIndexNames(context2, "bar", "hidden", "-foobar")
);
assertThat(
infe.getMessage(),
containsString(
"if you intended to exclude this index, ensure that you use wildcards that include it " + "before explicitly excluding it"
)
);
assertThat(infe.getResourceId().toString(), equalTo("[-foobar]"));
// open and hidden
options = IndicesOptions.fromOptions(false, true, true, false, true);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(4, results.length);
assertThat(results, arrayContainingInAnyOrder("bar", "foobar", "hidden", ".hidden"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(1, results.length);
assertEquals("foobar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "h*");
assertEquals(1, results.length);
assertEquals("hidden", results[0]);
// closed and hidden
options = IndicesOptions.fromOptions(false, true, false, true, true);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(3, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "hidden-closed", ".hidden-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "bar");
assertEquals(1, results.length);
assertEquals("bar", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "h*");
assertEquals(1, results.length);
assertEquals("hidden-closed", results[0]);
// only hidden
options = IndicesOptions.fromOptions(false, true, false, false, true);
context = new IndexNameExpressionResolver.Context(project, options, SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertThat(results, emptyArray());
IndexNameExpressionResolver.Context context3 = context;
infe = expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context3, "h*"));
assertThat(infe.getResourceId().toString(), equalTo("[h*]"));
results = indexNameExpressionResolver.concreteIndexNames(context, "hidden");
assertThat(results, arrayContainingInAnyOrder("hidden"));
results = indexNameExpressionResolver.concreteIndexNames(context, "hidden-closed");
assertThat(results, arrayContainingInAnyOrder("hidden-closed"));
}
public void testConcreteIndexNamesNoExpandWildcards() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE))
.put(indexBuilder("foofoo").putAlias(AliasMetadata.builder("barbaz")))
.build();
// ignore unavailable and allow no indices
{
IndicesOptions noExpandLenient = IndicesOptions.fromOptions(true, true, false, false, randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
noExpandLenient,
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "baz*");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "foobar"));
results = indexNameExpressionResolver.concreteIndexNames(context, (String[]) null);
assertEquals(0, results.length);
results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertEquals(0, results.length);
}
// ignore unavailable but don't allow no indices
{
IndicesOptions noExpandDisallowEmpty = IndicesOptions.fromOptions(true, false, false, false, randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
noExpandDisallowEmpty,
SystemIndexAccessLevel.NONE
);
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*");
assertEquals(1, results.length);
assertEquals("foo", results[0]);
results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "foobar"));
{
// unavailable indices are ignored but no indices are disallowed
expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, "bar", "baz"));
}
}
// error on unavailable but allow no indices
{
IndicesOptions noExpandErrorUnavailable = IndicesOptions.fromOptions(false, true, false, false, randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
noExpandErrorUnavailable,
SystemIndexAccessLevel.NONE
);
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
{
// unavailable indices are not ignored, hence the error on the first unavailable indices encountered
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "bar", "baz")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
{
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "foobar"));
}
}
// error on both unavailable and no indices
{
IndicesOptions noExpandStrict = IndicesOptions.fromOptions(false, false, false, false, randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
noExpandStrict,
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
IndexNotFoundException infe2 = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*")
);
assertThat(infe2.getIndex().getName(), equalTo("baz*"));
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foofoobar");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "foobar"));
}
}
public void testIndexOptionsSingleIndexNoExpandWildcards() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar")))
.put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE))
.put(indexBuilder("foofoo").putAlias(AliasMetadata.builder("barbaz")))
.build();
// error on both unavailable and no indices + every alias needs to expand to a single index
{
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
{
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "baz*")
);
assertThat(infe.getIndex().getName(), equalTo("baz*"));
}
{
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foofoobar")
);
assertThat(e.getMessage(), containsString("alias [foofoobar] has more than one index associated with it"));
}
{
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo", "foofoobar")
);
assertThat(e.getMessage(), containsString("alias [foofoobar] has more than one index associated with it"));
}
{
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
IndexClosedException ince = expectThrows(
IndexClosedException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foofoo-closed", "foofoobar")
);
assertThat(ince.getMessage(), equalTo("closed"));
assertEquals(ince.getIndex().getName(), "foofoo-closed");
}
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo", "barbaz");
assertEquals(2, results.length);
assertThat(results, arrayContainingInAnyOrder("foo", "foofoo"));
}
public void testIndexOptionsEmptyCluster() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build();
IndicesOptions options = IndicesOptions.strictExpandOpen();
final IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, Strings.EMPTY_ARRAY);
assertThat(results, emptyArray());
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo")
);
assertThat(infe.getIndex().getName(), equalTo("foo"));
}
results = indexNameExpressionResolver.concreteIndexNames(context, "foo*");
assertThat(results, emptyArray());
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "foo*", "bar")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
final IndexNameExpressionResolver.Context context2 = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.lenientExpandOpen(),
SystemIndexAccessLevel.NONE
);
results = indexNameExpressionResolver.concreteIndexNames(context2, Strings.EMPTY_ARRAY);
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context2, "foo");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context2, "foo*");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context2, "foo*", "bar");
assertThat(results, emptyArray());
final IndexNameExpressionResolver.Context context3 = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, false, true, false),
SystemIndexAccessLevel.NONE
);
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context3, Strings.EMPTY_ARRAY)
);
assertThat(infe.getResourceId().toString(), equalTo("[_all]"));
}
// no wildcard expand
final IndexNameExpressionResolver.Context context4 = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(false, true, false, false),
randomFrom(SystemIndexAccessLevel.values())
);
results = indexNameExpressionResolver.concreteIndexNames(context4, Strings.EMPTY_ARRAY);
assertThat(results, emptyArray());
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context4, "foo")
);
assertThat(infe.getIndex().getName(), equalTo("foo"));
}
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context4, "foo*")
);
assertThat(infe.getIndex().getName(), equalTo("foo*"));
}
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context4, "bar", "foo*")
);
assertThat(infe.getIndex().getName(), equalTo("bar"));
}
}
public void testConcreteIndicesIgnoreIndicesOneMissingIndex() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX"))
.put(indexBuilder("kuku"))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictExpandOpen(),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "testZZZ")
);
assertThat(infe.getMessage(), is("no such index [testZZZ]"));
// same as above, but DO NOT expand wildcards
IndexNameExpressionResolver.Context context_no_expand = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build(),
randomFrom(SystemIndexAccessLevel.values())
);
IndexNotFoundException infe_no_expand = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context_no_expand, "testZZZ")
);
assertThat(infe_no_expand.getMessage(), is("no such index [testZZZ]"));
}
public void testConcreteIndicesIgnoreIndicesOneMissingIndexOtherFound() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX"))
.put(indexBuilder("kuku"))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.lenientExpandOpen(),
SystemIndexAccessLevel.NONE
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testXXX", "testZZZ")),
equalTo(newHashSet("testXXX"))
);
}
public void testConcreteIndicesIgnoreIndicesAllMissing() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX"))
.put(indexBuilder("kuku"))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictExpandOpen(),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "testMo", "testMahdy")
);
assertThat(infe.getMessage(), is("no such index [testMo]"));
// same as above, but DO NOT expand wildcards
IndexNameExpressionResolver.Context context_no_expand = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build(),
randomFrom(SystemIndexAccessLevel.values())
);
IndexNotFoundException infe_no_expand = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context_no_expand, "testMo", "testMahdy")
);
assertThat(infe_no_expand.getMessage(), is("no such index [testMo]"));
}
public void testConcreteIndicesIgnoreIndicesEmptyRequest() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX"))
.put(indexBuilder("kuku"))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.lenientExpandOpen(),
SystemIndexAccessLevel.NONE
);
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX")));
}
public void testConcreteIndicesNoIndicesErrorMessage() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(false, false, true, true),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(context, new String[] {})
);
assertThat(infe.getMessage(), is("no such index [_all] and no indices exist"));
}
public void testConcreteIndicesNoIndicesErrorMessageNoExpand() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(false, false, false, false),
SystemIndexAccessLevel.NONE
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(context, new String[] {})
);
assertThat(infe.getMessage(), is("no such index [_all] and no indices exist"));
}
public void testConcreteIndicesWildcardExpansion() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX").state(State.OPEN))
.put(indexBuilder("testXXY").state(State.OPEN))
.put(indexBuilder("testXYY").state(State.CLOSE))
.put(indexBuilder("testYYY").state(State.OPEN))
.put(indexBuilder("testYYX").state(State.OPEN))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, true, false, false),
SystemIndexAccessLevel.NONE
);
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(new HashSet<String>()));
context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, true, true, false),
SystemIndexAccessLevel.NONE
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")),
equalTo(newHashSet("testXXX", "testXXY"))
);
context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, true, false, true),
SystemIndexAccessLevel.NONE
);
assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")), equalTo(newHashSet("testXYY")));
context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, true, true, true),
SystemIndexAccessLevel.NONE
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")),
equalTo(newHashSet("testXXX", "testXXY", "testXYY"))
);
}
public void testConcreteIndicesWildcardWithNegation() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("testXXX").state(State.OPEN))
.put(indexBuilder("testXXY").state(State.OPEN))
.put(indexBuilder("testXYY").state(State.OPEN))
.put(indexBuilder("-testXYZ").state(State.OPEN))
.put(indexBuilder("-testXZZ").state(State.OPEN))
.put(indexBuilder("-testYYY").state(State.OPEN))
.put(indexBuilder("testYYY").state(State.OPEN))
.put(indexBuilder("testYYX").state(State.OPEN))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(true, true, true, true),
SystemIndexAccessLevel.NONE
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testX*")),
equalTo(newHashSet("testXXX", "testXXY", "testXYY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "test*", "-testX*")),
equalTo(newHashSet("testYYY", "testYYX"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "-testX*")),
equalTo(newHashSet("-testXYZ", "-testXZZ"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testXXY", "-testX*")),
equalTo(newHashSet("testXXY", "-testXYZ", "-testXZZ"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "*", "--testX*")),
equalTo(newHashSet("testXXX", "testXXY", "testXYY", "testYYX", "testYYY", "-testYYY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "-testXXX", "test*")),
equalTo(newHashSet("testYYX", "testXXX", "testXYY", "testYYY", "testXXY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "test*", "-testXXX")),
equalTo(newHashSet("testYYX", "testXYY", "testYYY", "testXXY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testXXX", "testXXY", "testYYY", "-testYYY")),
equalTo(newHashSet("testXXX", "testXXY", "testYYY", "-testYYY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "testYYY", "testYYX", "testX*", "-testXXX")),
equalTo(newHashSet("testYYY", "testYYX", "testXXY", "testXYY"))
);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(context, "-testXXX", "*testY*", "-testYYY")),
equalTo(newHashSet("testYYX", "-testYYY"))
);
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(project, IndicesOptions.lenientExpandOpen(), "-doesnotexist");
assertEquals(0, indexNames.length);
assertThat(
newHashSet(indexNameExpressionResolver.concreteIndexNames(project, IndicesOptions.lenientExpandOpen(), "-*")),
equalTo(newHashSet("-testXYZ", "-testXZZ", "-testYYY"))
);
assertThat(
newHashSet(
indexNameExpressionResolver.concreteIndexNames(
project,
IndicesOptions.lenientExpandOpen(),
"testXXX",
"testXXY",
"testXYY",
"-testXXY"
)
),
equalTo(newHashSet("testXXX", "testXYY", "testXXY"))
);
indexNames = indexNameExpressionResolver.concreteIndexNames(project, IndicesOptions.lenientExpandOpen(), "*", "-*");
assertEquals(0, indexNames.length);
}
public void testConcreteIndicesWildcardAndAliases() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("foo_foo").state(State.OPEN).putAlias(AliasMetadata.builder("foo")))
.put(indexBuilder("bar_bar").state(State.OPEN).putAlias(AliasMetadata.builder("foo")))
.build();
// when ignoreAliases option is set, concreteIndexNames resolves the provided expressions
// only against the defined indices
IndicesOptions ignoreAliasesOptions = IndicesOptions.fromOptions(false, randomBoolean(), true, false, true, false, true, false);
String[] indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(project, ignoreAliasesOptions, "foo*");
assertEquals(1, indexNamesIndexWildcard.length);
assertEquals("foo_foo", indexNamesIndexWildcard[0]);
indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(project, ignoreAliasesOptions, "*o");
assertEquals(1, indexNamesIndexWildcard.length);
assertEquals("foo_foo", indexNamesIndexWildcard[0]);
indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(project, ignoreAliasesOptions, "f*o");
assertEquals(1, indexNamesIndexWildcard.length);
assertEquals("foo_foo", indexNamesIndexWildcard[0]);
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, ignoreAliasesOptions, "foo")
);
assertEquals(
"The provided expression [foo] matches an alias, specify the corresponding concrete indices instead.",
iae.getMessage()
);
// same as above, but DO NOT expand wildcards
iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(
project,
IndicesOptions.fromOptions(false, randomBoolean(), false, false, true, false, true, false),
"foo"
)
);
assertEquals(
"The provided expression [foo] matches an alias, specify the corresponding concrete indices instead.",
iae.getMessage()
);
// when ignoreAliases option is not set, concreteIndexNames resolves the provided
// expressions against the defined indices and aliases
IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions(
false,
randomBoolean(),
true,
false,
true,
false,
false,
false
);
List<String> indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(project, indicesAndAliasesOptions, "foo*"));
assertEquals(2, indexNames.size());
assertTrue(indexNames.contains("foo_foo"));
assertTrue(indexNames.contains("bar_bar"));
indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(project, indicesAndAliasesOptions, "*o"));
assertEquals(2, indexNames.size());
assertTrue(indexNames.contains("foo_foo"));
assertTrue(indexNames.contains("bar_bar"));
indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(project, indicesAndAliasesOptions, "f*o"));
assertEquals(2, indexNames.size());
assertTrue(indexNames.contains("foo_foo"));
assertTrue(indexNames.contains("bar_bar"));
indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(project, indicesAndAliasesOptions, "foo"));
assertEquals(2, indexNames.size());
assertTrue(indexNames.contains("foo_foo"));
assertTrue(indexNames.contains("bar_bar"));
// same as above, but DO NOT expand wildcards
indicesAndAliasesOptions = IndicesOptions.fromOptions(false, randomBoolean(), false, false, true, false, false, false);
indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(project, indicesAndAliasesOptions, "foo"));
assertEquals(2, indexNames.size());
assertTrue(indexNames.contains("foo_foo"));
assertTrue(indexNames.contains("bar_bar"));
}
public void testHiddenAliasAndHiddenIndexResolution() {
final String visibleIndex = "visible_index";
final String hiddenIndex = "hidden_index";
final String visibleAlias = "visible_alias";
final String hiddenAlias = "hidden_alias";
final String dottedHiddenAlias = ".hidden_alias";
final String dottedHiddenIndex = ".hidden_index";
IndicesOptions excludeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, false, true, false, false, false);
IndicesOptions includeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, true, true, false, false, false);
{
// A visible index with a visible alias and a hidden index with a hidden alias
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(visibleAlias)))
.put(
indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true))
)
.build();
// A total wildcard should only be resolved to visible indices
String[] indexNames;
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex));
// Unless hidden is specified in the options
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// Both hidden indices and hidden aliases should not be included in wildcard resolution
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "hidden*", "visible*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex));
// unless it's specified in the options
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "hidden*", "visible*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// Only visible aliases should be included in wildcard resolution
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex));
// unless, again, it's specified in the options
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// If we specify a hidden alias by name, the options shouldn't matter.
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, hiddenAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, hiddenAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(hiddenIndex));
}
{
// A visible alias that points to one hidden and one visible index
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(visibleAlias)))
.put(
indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(visibleAlias))
)
.build();
// If the alias is resolved to concrete indices, it should resolve to all the indices it points to, hidden or not.
String[] indexNames;
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, visibleAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, visibleAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// total wildcards should also resolve both visible and hidden indices if there is a visible alias
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
}
{
// A hidden alias that points to one hidden and one visible index
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true)))
.put(
indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true))
)
.build();
String[] indexNames;
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// A query that only matches the hidden resolves to no indices
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), empty());
// But if we include hidden it should be resolved to both indices
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
// If we specify the alias by name it should resolve to both indices, regardless of if the options specify hidden
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, hiddenAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, hiddenAlias);
assertThat(Arrays.asList(indexNames), containsInAnyOrder(visibleIndex, hiddenIndex));
}
{
// A hidden alias with a dot-prefixed name that points to one hidden index with a dot prefix, and one hidden index without
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(
indexBuilder(dottedHiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(dottedHiddenAlias).isHidden(true))
)
.put(
indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(dottedHiddenAlias).isHidden(true))
)
.build();
String[] indexNames;
// A dot-prefixed pattern that includes only the hidden alias should resolve to both, regardless of the options
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, ".hidden_a*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(dottedHiddenIndex, hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, ".hidden_a*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(dottedHiddenIndex, hiddenIndex));
// A query that doesn't include the dot resolves to no indices
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), empty());
// But should include both indices if the options do include hidden
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "*_alias");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(dottedHiddenIndex, hiddenIndex));
}
}
public void testHiddenIndexWithVisibleAliasOverlappingNameResolution() {
final String hiddenIndex = "my-hidden-index";
final String hiddenAlias = "my-hidden-alias";
final String visibleAlias = "my-visible-alias";
IndicesOptions excludeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, false, true, false, false, false);
IndicesOptions includeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, true, true, false, false, false);
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(
indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true))
.putAlias(AliasMetadata.builder(visibleAlias).build())
)
.build();
String[] indexNames;
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "my-*");
assertThat(Arrays.asList(indexNames), containsInAnyOrder(hiddenIndex));
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "my-hidden*");
assertThat(Arrays.asList(indexNames), empty());
indexNames = indexNameExpressionResolver.concreteIndexNames(project, excludeHiddenOptions, "my-*", "-my-visible*");
assertThat(Arrays.asList(indexNames), empty());
indexNames = indexNameExpressionResolver.concreteIndexNames(project, includeHiddenOptions, "my-hidden*", "-my-hidden-a*");
assertThat(Arrays.asList(indexNames), empty());
}
/**
* test resolving _all pattern (null, empty array or "_all") for random IndicesOptions
*/
public void testConcreteIndicesAllPatternRandom() {
for (int i = 0; i < 10; i++) {
final String[] allIndices = switch (randomIntBetween(0, 2)) {
case 0 -> null;
case 1 -> new String[0];
case 2 -> new String[] { Metadata.ALL };
default -> throw new UnsupportedOperationException();
};
final IndicesOptions indicesOptions = IndicesOptions.fromOptions(
randomBoolean(),
randomBoolean(),
randomBoolean(),
randomBoolean()
);
{
final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
indicesOptions,
SystemIndexAccessLevel.NONE
);
// with no indices, asking for all indices should return empty list or exception, depending on indices options
if (indicesOptions.allowNoIndices()) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, allIndices);
assertThat(concreteIndices, notNullValue());
assertThat(concreteIndices.length, equalTo(0));
} else {
expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, allIndices));
}
}
{
// with existing indices, asking for all indices should return all open/closed indices depending on options
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("aaa").state(State.OPEN).putAlias(AliasMetadata.builder("aaa_alias1")))
.put(indexBuilder("bbb").state(State.OPEN).putAlias(AliasMetadata.builder("bbb_alias1")))
.put(indexBuilder("ccc").state(State.CLOSE).putAlias(AliasMetadata.builder("ccc_alias1")))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
indicesOptions,
SystemIndexAccessLevel.NONE
);
if (indicesOptions.expandWildcardsOpen() || indicesOptions.expandWildcardsClosed() || indicesOptions.allowNoIndices()) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, allIndices);
assertThat(concreteIndices, notNullValue());
int expectedNumberOfIndices = 0;
if (indicesOptions.expandWildcardsOpen()) {
expectedNumberOfIndices += 2;
}
if (indicesOptions.expandWildcardsClosed()) {
expectedNumberOfIndices += 1;
}
assertThat(concreteIndices.length, equalTo(expectedNumberOfIndices));
} else {
expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, allIndices));
}
}
}
}
/**
* test resolving wildcard pattern that matches no index of alias for random IndicesOptions
*/
public void testConcreteIndicesWildcardNoMatch() {
for (int i = 0; i < 10; i++) {
IndicesOptions indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean());
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("aaa").state(State.OPEN).putAlias(AliasMetadata.builder("aaa_alias1")))
.put(indexBuilder("bbb").state(State.OPEN).putAlias(AliasMetadata.builder("bbb_alias1")))
.put(indexBuilder("ccc").state(State.CLOSE).putAlias(AliasMetadata.builder("ccc_alias1")))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
indicesOptions,
SystemIndexAccessLevel.NONE
);
if (indicesOptions.allowNoIndices() == false
|| indicesOptions.expandWildcardExpressions() == false && indicesOptions.ignoreUnavailable() == false) {
expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, "Foo*"));
} else {
// asking for non existing wildcard pattern should return empty list or exception
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(context, "Foo*");
assertThat(concreteIndices, notNullValue());
assertThat(concreteIndices.length, equalTo(0));
}
}
}
public void testIsAllIndicesNull() {
assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true));
}
public void testIsAllIndicesEmpty() {
assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true));
}
public void testIsAllIndicesExplicitAll() {
assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true));
}
public void testIsAllIndicesExplicitAllPlusOther() {
assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false));
}
public void testIsNoneIndices() {
assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true));
}
public void testIsAllIndicesNormalIndexes() {
assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false));
}
public void testIsAllIndicesWildcard() {
assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false));
}
public void testIsExplicitAllIndicesNull() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false));
}
public void testIsExplicitAllIndicesEmpty() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false));
}
public void testIsExplicitAllIndicesExplicitAll() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true));
}
public void testIsExplicitAllIndicesExplicitAllPlusOther() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false));
}
public void testIsExplicitAllIndicesNormalIndexes() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false));
}
public void testIsExplicitAllIndicesWildcard() {
assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false));
}
public void testIndexOptionsFailClosedIndicesAndAliases() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(
indexBuilder("foo1-closed").state(IndexMetadata.State.CLOSE)
.putAlias(AliasMetadata.builder("foobar1-closed"))
.putAlias(AliasMetadata.builder("foobar2-closed"))
)
.put(indexBuilder("foo2-closed").state(IndexMetadata.State.CLOSE).putAlias(AliasMetadata.builder("foobar2-closed")))
.put(indexBuilder("foo3").putAlias(AliasMetadata.builder("foobar2-closed")))
.build();
IndexNameExpressionResolver.Context contextICE = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictExpandOpenAndForbidClosed(),
SystemIndexAccessLevel.NONE
);
expectThrows(IndexClosedException.class, () -> indexNameExpressionResolver.concreteIndexNames(contextICE, "foo1-closed"));
expectThrows(IndexClosedException.class, () -> indexNameExpressionResolver.concreteIndexNames(contextICE, "foobar1-closed"));
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(
true,
contextICE.getOptions().allowNoIndices(),
contextICE.getOptions().expandWildcardsOpen(),
contextICE.getOptions().expandWildcardsClosed(),
contextICE.getOptions()
),
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "foo1-closed");
assertThat(results, emptyArray());
results = indexNameExpressionResolver.concreteIndexNames(context, "foobar1-closed");
assertThat(results, emptyArray());
context = new IndexNameExpressionResolver.Context(project, IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, "foo1-closed");
assertThat(results, arrayWithSize(1));
assertThat(results, arrayContaining("foo1-closed"));
results = indexNameExpressionResolver.concreteIndexNames(context, "foobar1-closed");
assertThat(results, arrayWithSize(1));
assertThat(results, arrayContaining("foo1-closed"));
// testing an alias pointing to three indices:
context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.strictExpandOpenAndForbidClosed(),
SystemIndexAccessLevel.NONE
);
try {
indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed");
fail("foo2-closed should be closed, but it is open");
} catch (IndexClosedException e) {
// expected
}
context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.fromOptions(
true,
context.getOptions().allowNoIndices(),
context.getOptions().expandWildcardsOpen(),
context.getOptions().expandWildcardsClosed(),
context.getOptions()
),
SystemIndexAccessLevel.NONE
);
results = indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed");
assertThat(results, arrayWithSize(1));
assertThat(results, arrayContaining("foo3"));
context = new IndexNameExpressionResolver.Context(project, IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE);
results = indexNameExpressionResolver.concreteIndexNames(context, "foobar2-closed");
assertThat(results, arrayWithSize(3));
assertThat(results, arrayContainingInAnyOrder("foo1-closed", "foo2-closed", "foo3"));
}
public void testDedupConcreteIndices() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("index1").putAlias(AliasMetadata.builder("alias1")))
.build();
IndicesOptions[] indicesOptions = new IndicesOptions[] {
IndicesOptions.strictExpandOpen(),
IndicesOptions.strictExpand(),
IndicesOptions.lenientExpandOpen(),
IndicesOptions.strictExpandOpenAndForbidClosed() };
for (IndicesOptions options : indicesOptions) {
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
String[] results = indexNameExpressionResolver.concreteIndexNames(context, "index1", "index1", "alias1");
assertThat(results, equalTo(new String[] { "index1" }));
}
}
public void testFilterClosedIndicesOnAliases() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("alias-0")))
.put(indexBuilder("test-1").state(IndexMetadata.State.CLOSE).putAlias(AliasMetadata.builder("alias-1")))
.build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.lenientExpandOpen(),
SystemIndexAccessLevel.NONE
);
String[] strings = indexNameExpressionResolver.concreteIndexNames(context, "alias-*");
assertArrayEquals(new String[] { "test-0" }, strings);
context = new IndexNameExpressionResolver.Context(project, IndicesOptions.strictExpandOpen(), SystemIndexAccessLevel.NONE);
strings = indexNameExpressionResolver.concreteIndexNames(context, "alias-*");
assertArrayEquals(new String[] { "test-0" }, strings);
}
public void testResolveExpressions() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("alias-0").filter("{ \"term\": \"foo\"}")))
.put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1")))
.build();
assertEquals(
Set.of(new ResolvedExpression("alias-0", DATA), new ResolvedExpression("alias-1", DATA)),
indexNameExpressionResolver.resolveExpressions(project, "alias-*")
);
assertEquals(
Set.of(
new ResolvedExpression("test-0", DATA),
new ResolvedExpression("alias-0", DATA),
new ResolvedExpression("alias-1", DATA)
),
indexNameExpressionResolver.resolveExpressions(project, "test-0", "alias-*")
);
assertEquals(
Set.of(
new ResolvedExpression("test-0", DATA),
new ResolvedExpression("test-1", DATA),
new ResolvedExpression("alias-0", DATA),
new ResolvedExpression("alias-1", DATA)
),
indexNameExpressionResolver.resolveExpressions(project, "test-*", "alias-*")
);
assertEquals(
Set.of(new ResolvedExpression("test-1", DATA), new ResolvedExpression("alias-1", DATA)),
indexNameExpressionResolver.resolveExpressions(project, "*-1")
);
}
public void testFilteringAliases() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("alias-0").filter("{ \"term\": \"foo\"}")))
.put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1")))
.build();
Set<ResolvedExpression> resolvedExpressions = resolvedExpressionsSet("alias-0", "alias-1");
String[] strings = indexNameExpressionResolver.filteringAliases(project, "test-0", resolvedExpressions);
assertArrayEquals(new String[] { "alias-0" }, strings);
// concrete index supersedes filtering alias
resolvedExpressions = Set.of(
new ResolvedExpression("test-0"),
new ResolvedExpression("alias-0"),
new ResolvedExpression("alias-1")
);
strings = indexNameExpressionResolver.filteringAliases(project, "test-0", resolvedExpressions);
assertNull(strings);
resolvedExpressions = Set.of(
new ResolvedExpression("test-0"),
new ResolvedExpression("test-1"),
new ResolvedExpression("alias-0"),
new ResolvedExpression("alias-1")
);
strings = indexNameExpressionResolver.filteringAliases(project, "test-0", resolvedExpressions);
assertNull(strings);
}
public void testIndexAliases() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-0").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias-0").filter("{ \"term\": \"foo\"}"))
.putAlias(AliasMetadata.builder("test-alias-1").filter("{ \"term\": \"foo\"}"))
.putAlias(AliasMetadata.builder("test-alias-non-filtering"))
)
.build();
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "test-*");
String[] strings = indexNameExpressionResolver.allIndexAliases(project, "test-0", resolvedExpressions);
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias-0", "test-alias-1", "test-alias-non-filtering" }, strings);
strings = indexNameExpressionResolver.indexAliases(
project,
"test-0",
x -> x.alias().equals("test-alias-1"),
(x, y) -> randomBoolean(),
true,
resolvedExpressions
);
assertArrayEquals(null, strings);
}
public void testIndexAliasesDataStreamAliases() {
final String dataStreamName1 = "logs-foobar";
final String dataStreamName2 = "logs-barbaz";
IndexMetadata backingIndex1 = createBackingIndex(dataStreamName1, 1).build();
IndexMetadata backingIndex2 = createBackingIndex(dataStreamName2, 1).build();
ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(backingIndex1, false)
.put(backingIndex2, false)
.put(newInstance(dataStreamName1, List.of(backingIndex1.getIndex())))
.put(newInstance(dataStreamName2, List.of(backingIndex2.getIndex())));
projectBuilder.put("logs_foo", dataStreamName1, null, "{ \"term\": \"foo\"}");
projectBuilder.put("logs", dataStreamName1, null, "{ \"term\": \"logs\"}");
projectBuilder.put("logs_bar", dataStreamName1, null, null);
projectBuilder.put("logs_baz", dataStreamName2, null, "{ \"term\": \"logs\"}");
projectBuilder.put("logs_baz2", dataStreamName2, null, null);
ProjectMetadata project = projectBuilder.build();
{
// Only resolve aliases that refer to dataStreamName1
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*");
String index = backingIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.allIndexAliases(project, index, resolvedExpressions);
assertThat(result, arrayContainingInAnyOrder("logs_foo", "logs", "logs_bar"));
}
{
// Only resolve aliases that refer to dataStreamName2
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*");
String index = backingIndex2.getIndex().getName();
String[] result = indexNameExpressionResolver.allIndexAliases(project, index, resolvedExpressions);
assertThat(result, arrayContainingInAnyOrder("logs_baz", "logs_baz2"));
}
{
// Null is returned, because skipping identity check and resolvedExpressions contains the backing index name
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*");
String index = backingIndex2.getIndex().getName();
String[] result = indexNameExpressionResolver.indexAliases(
project,
index,
x -> randomBoolean(),
(x, y) -> true,
false,
resolvedExpressions
);
assertThat(result, nullValue());
}
{
// Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*");
String index = backingIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.filteringAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
{
// Null is returned, because an unfiltered alias is targeting the same data stream
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "logs_bar", "logs");
String index = backingIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.filteringAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
{
// Null is returned because we target the data stream name and skipIdentity is false
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, dataStreamName1, "logs");
String index = backingIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.filteringAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
{
// The filtered alias is returned because although we target the data stream name, skipIdentity is true
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, dataStreamName1, "logs");
String index = backingIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.indexAliases(
project,
index,
x -> true,
(alias, isData) -> alias.filteringRequired() && isData,
true,
resolvedExpressions
);
assertThat(result, arrayContainingInAnyOrder("logs"));
}
}
public void testIndexAliasesDataStreamFailureStoreAndAliases() {
final String dataStreamName1 = "logs-foobar";
final String dataStreamName2 = "logs-barbaz";
IndexMetadata backingIndex1 = createBackingIndex(dataStreamName1, 1).build();
IndexMetadata failureIndex1 = createFailureStore(dataStreamName1, 2).build();
IndexMetadata backingIndex2 = createBackingIndex(dataStreamName2, 1).build();
ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(backingIndex1, false)
.put(backingIndex2, false)
.put(failureIndex1, false)
.put(newInstance(dataStreamName1, List.of(backingIndex1.getIndex()), List.of(failureIndex1.getIndex())))
.put(newInstance(dataStreamName2, List.of(backingIndex2.getIndex())));
projectBuilder.put("logs_foo", dataStreamName1, null, "{ \"term\": \"foo\"}");
projectBuilder.put("logs", dataStreamName1, null, "{ \"term\": \"logs\"}");
projectBuilder.put("logs_bar", dataStreamName1, null, null);
projectBuilder.put("logs_baz", dataStreamName2, null, "{ \"term\": \"logs\"}");
projectBuilder.put("logs_baz2", dataStreamName2, null, null);
ProjectMetadata project = projectBuilder.build();
{
// Resolving the failure component with a backing index should return null
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*::failures");
String index = randomBoolean() ? backingIndex1.getIndex().getName() : backingIndex2.getIndex().getName();
String[] result = indexNameExpressionResolver.allIndexAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
{
// Only resolve aliases that refer to dataStreamName1 failure store
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*::failures");
String index = failureIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.allIndexAliases(project, index, resolvedExpressions);
assertThat(result, arrayContainingInAnyOrder("logs_foo::failures", "logs::failures", "logs_bar::failures"));
}
{
// Null is returned, because we perform the identity check and resolvedExpressions contains the failure index name
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*::failures");
String index = failureIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.indexAliases(
project,
index,
x -> true,
(x, y) -> true,
false,
resolvedExpressions
);
assertThat(result, nullValue());
}
{
// Null is returned, because the wildcard expands to a list of aliases containing an unfiltered alias for dataStreamName1
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "l*::failures");
String index = failureIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.filteringAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
{
// Null is returned because we target the failure store of the data stream
Set<ResolvedExpression> resolvedExpressions = indexNameExpressionResolver.resolveExpressions(project, "logs::failures");
String index = failureIndex1.getIndex().getName();
String[] result = indexNameExpressionResolver.filteringAliases(project, index, resolvedExpressions);
assertThat(result, nullValue());
}
}
public void testIndexAliasesSkipIdentity() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-0").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias"))
.putAlias(AliasMetadata.builder("other-alias"))
)
.build();
Set<ResolvedExpression> resolvedExpressions = resolvedExpressionsSet("test-0", "test-alias");
String[] aliases = indexNameExpressionResolver.indexAliases(
project,
"test-0",
x -> true,
(x, y) -> true,
false,
resolvedExpressions
);
assertNull(aliases);
aliases = indexNameExpressionResolver.indexAliases(project, "test-0", x -> true, (x, y) -> true, true, resolvedExpressions);
assertArrayEquals(new String[] { "test-alias" }, aliases);
resolvedExpressions = Collections.singleton(new ResolvedExpression("other-alias"));
aliases = indexNameExpressionResolver.indexAliases(project, "test-0", x -> true, (x, y) -> true, false, resolvedExpressions);
assertArrayEquals(new String[] { "other-alias" }, aliases);
aliases = indexNameExpressionResolver.indexAliases(project, "test-0", x -> true, (x, y) -> true, true, resolvedExpressions);
assertArrayEquals(new String[] { "other-alias" }, aliases);
}
public void testConcreteWriteIndexSuccessful() {
boolean testZeroWriteIndex = randomBoolean();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-0").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))
)
.build();
String[] strings = indexNameExpressionResolver.allIndexAliases(project, "test-0", resolvedExpressionsSet("test-0", "test-alias"));
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias" }, strings);
IndicesRequest request = new IndicesRequest() {
@Override
public String[] indices() {
return new String[] { "test-alias" };
}
@Override
public IndicesOptions indicesOptions() {
return IndicesOptions.strictSingleIndexNoExpandForbidClosed();
}
};
Index writeIndex = indexNameExpressionResolver.concreteWriteIndex(project, request);
assertThat(writeIndex.getName(), equalTo("test-0"));
project = ProjectMetadata.builder(project)
.put(
indexBuilder("test-1").putAlias(
AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true)
)
)
.build();
writeIndex = indexNameExpressionResolver.concreteWriteIndex(project, request);
assertThat(writeIndex.getName(), equalTo(testZeroWriteIndex ? "test-0" : "test-1"));
}
public void testConcreteWriteIndexWithInvalidIndicesRequest() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias")))
.build();
Function<String[], IndicesRequest> requestGen = (indices) -> new IndicesRequest() {
@Override
public String[] indices() {
return indices;
}
@Override
public IndicesOptions indicesOptions() {
return IndicesOptions.strictSingleIndexNoExpandForbidClosed();
}
};
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, requestGen.apply(null))
);
assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression"));
exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, requestGen.apply(new String[] { "too", "many" }))
);
assertThat(exception.getMessage(), equalTo("indices request must specify a single index expression"));
}
public void testConcreteWriteIndexWithWildcardExpansion() {
boolean testZeroWriteIndex = randomBoolean();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-1").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? true : null))
)
.put(
indexBuilder("test-0").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias").writeIndex(testZeroWriteIndex ? randomFrom(false, null) : true))
)
.build();
String[] strings = indexNameExpressionResolver.allIndexAliases(
project,
"test-0",
resolvedExpressionsSet("test-0", "test-1", "test-alias")
);
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias" }, strings);
IndicesRequest request = new IndicesRequest() {
@Override
public String[] indices() {
return new String[] { "test-*" };
}
@Override
public IndicesOptions indicesOptions() {
return IndicesOptions.strictExpandOpenAndForbidClosed();
}
};
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, request)
);
assertThat(
exception.getMessage(),
equalTo("The index expression [test-*] and options provided did not point to a single write-index")
);
}
public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias").writeIndex(false)))
.build();
String[] strings = indexNameExpressionResolver.allIndexAliases(project, "test-0", resolvedExpressionsSet("test-0", "test-alias"));
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias" }, strings);
DocWriteRequest<?> request = randomFrom(
new IndexRequest("test-alias"),
new UpdateRequest("test-alias", "_id"),
new DeleteRequest("test-alias")
);
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, request.indicesOptions(), request.indices()[0], false, false)
);
assertThat(
exception.getMessage(),
equalTo(
"no write index is defined for alias [test-alias]."
+ " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple"
+ " indices without one being designated as a write index"
)
);
}
public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(false, null))))
.put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(false, null))))
.build();
String[] strings = indexNameExpressionResolver.allIndexAliases(
project,
"test-0",
Set.of(new ResolvedExpression("test-0"), new ResolvedExpression("test-1"), new ResolvedExpression("test-alias"))
);
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias" }, strings);
DocWriteRequest<?> request = randomFrom(
new IndexRequest("test-alias"),
new UpdateRequest("test-alias", "_id"),
new DeleteRequest("test-alias")
);
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, request.indicesOptions(), request.indices()[0], false, false)
);
assertThat(
exception.getMessage(),
equalTo(
"no write index is defined for alias [test-alias]."
+ " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple"
+ " indices without one being designated as a write index"
)
);
}
public void testAliasResolutionNotAllowingMultipleIndices() {
boolean test0WriteIndex = randomBoolean();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-0").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(test0WriteIndex, null)))
)
.put(
indexBuilder("test-1").state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias").writeIndex(randomFrom(test0WriteIndex == false, null)))
)
.build();
String[] strings = indexNameExpressionResolver.allIndexAliases(
project,
"test-0",
resolvedExpressionsSet("test-0", "test-1", "test-alias")
);
Arrays.sort(strings);
assertArrayEquals(new String[] { "test-alias" }, strings);
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(
project,
IndicesOptions.strictSingleIndexNoExpandForbidClosed(),
"test-alias"
)
);
assertThat(exception.getMessage(), endsWith(", can't execute a single index op"));
}
public void testDeleteIndexIgnoresAliases() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder("test-index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias")))
.put(indexBuilder("index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias2")))
.build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(project).build();
{
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, new DeleteIndexRequest("does_not_exist"))
);
assertEquals("does_not_exist", infe.getIndex().getName());
assertEquals("no such index [does_not_exist]", infe.getMessage());
}
{
// same delete request but with request options that DO NOT expand wildcards
DeleteIndexRequest request = new DeleteIndexRequest("does_not_exist").indicesOptions(
IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build()
);
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, request)
);
assertEquals("does_not_exist", infe.getIndex().getName());
assertEquals("no such index [does_not_exist]", infe.getMessage());
}
{
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, new DeleteIndexRequest("test-alias"))
);
assertEquals(
"The provided expression [test-alias] matches an alias, specify the corresponding concrete indices instead.",
iae.getMessage()
);
}
{
// same delete request but with request options that DO NOT expand wildcards
DeleteIndexRequest request = new DeleteIndexRequest("test-alias").indicesOptions(
IndicesOptions.fromOptions(false, true, false, false, false, false, true, false)
);
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, request)
);
assertEquals(
"The provided expression [test-alias] matches an alias, specify the corresponding concrete indices instead.",
iae.getMessage()
);
}
{
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest("test-alias");
deleteIndexRequest.indicesOptions(IndicesOptions.fromOptions(true, true, true, true, false, false, true, false));
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, deleteIndexRequest);
assertEquals(0, indices.length);
}
{
// same request as above but with request options that DO NOT expand wildcards
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest("test-alias");
deleteIndexRequest.indicesOptions(IndicesOptions.fromOptions(true, true, false, false, false, false, true, false));
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, deleteIndexRequest);
assertEquals(0, indices.length);
}
{
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest("test-a*");
deleteIndexRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, true, false, false, true, false));
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, deleteIndexRequest)
);
assertEquals(infe.getIndex().getName(), "test-a*");
}
{
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, new DeleteIndexRequest("test-a*"));
assertEquals(0, indices.length);
}
{
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, new DeleteIndexRequest("test-index"));
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
String[] indices = indexNameExpressionResolver.concreteIndexNames(
project,
new DeleteIndexRequest("test-index").indicesOptions(
IndicesOptions.fromOptions(false, true, false, false, false, false, false, false)
)
);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, new DeleteIndexRequest("test-*"));
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
}
public void testIndicesAliasesRequestIgnoresAliases() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(indexBuilder("test-index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias")))
.put(indexBuilder("index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias2")))
.build();
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index("test-alias");
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals(
"The provided expression [test-alias] matches an alias, " + "specify the corresponding concrete indices instead.",
iae.getMessage()
);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index("test-a*");
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals("test-a*", infe.getIndex().getName());
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index("test-index");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index("test-*");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.remove().index("test-alias");
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals(
"The provided expression [test-alias] matches an alias, " + "specify the corresponding concrete indices instead.",
iae.getMessage()
);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.remove().index("test-a*");
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals("test-a*", infe.getIndex().getName());
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.remove().index("test-index");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.remove().index("test-*");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.removeIndex().index("test-alias");
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals(
"The provided expression [test-alias] matches an alias, " + "specify the corresponding concrete indices instead.",
iae.getMessage()
);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.removeIndex().index("test-a*");
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(project, aliasActions)
);
assertEquals("test-a*", infe.getIndex().getName());
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.removeIndex().index("test-index");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.removeIndex().index("test-*");
String[] indices = indexNameExpressionResolver.concreteIndexNames(project, aliasActions);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0]);
}
}
public void testIndicesAliasesRequestTargetDataStreams() {
final String dataStreamName = "my-data-stream";
IndexMetadata backingIndex = createBackingIndex(dataStreamName, 1).build();
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId())
.put(backingIndex, false)
.put(newInstance(dataStreamName, List.of(backingIndex.getIndex())))
.build();
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index(dataStreamName);
assertThat(
indexNameExpressionResolver.concreteIndexNames(project, aliasActions),
arrayContaining(backingIndexEqualTo(dataStreamName, 1))
);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add().index("my-data-*").alias("my-data");
assertThat(
indexNameExpressionResolver.concreteIndexNames(project, aliasActions),
arrayContaining(backingIndexEqualTo(dataStreamName, 1))
);
}
{
IndicesAliasesRequest.AliasActions aliasActions = IndicesAliasesRequest.AliasActions.add()
.index(dataStreamName)
.alias("my-data");
assertThat(
indexNameExpressionResolver.concreteIndexNames(project, aliasActions),
arrayContaining(backingIndexEqualTo(dataStreamName, 1))
);
}
}
public void testInvalidIndex() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).put(indexBuilder("test")).build();
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
IndicesOptions.lenientExpandOpen(),
SystemIndexAccessLevel.NONE
);
InvalidIndexNameException iine = expectThrows(
InvalidIndexNameException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "_foo")
);
assertEquals("Invalid index name [_foo], must not start with '_'.", iine.getMessage());
}
public void testIgnoreThrottled() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(
indexBuilder("test-index", Settings.builder().put("index.frozen", true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias"))
)
.put(
indexBuilder("index", Settings.builder().put("index.search.throttled", true).build()).state(State.OPEN)
.putAlias(AliasMetadata.builder("test-alias2"))
)
.put(
indexBuilder("index-closed", Settings.builder().put("index.frozen", true).build()).state(State.CLOSE)
.putAlias(AliasMetadata.builder("test-alias-closed"))
)
.build();
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED,
"*"
);
assertEquals(1, indices.length);
assertEquals("index", indices[0].getName());
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED,
"test-alias"
);
assertEquals(1, indices.length);
assertEquals("test-index", indices[0].getName());
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED,
"test-alias"
);
assertEquals(0, indices.length);
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED,
"test-*"
);
assertEquals(1, indices.length);
assertEquals("index", indices[0].getName());
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED_IGNORE_THROTTLED,
"ind*",
"test-index"
);
assertEquals(1, indices.length);
Arrays.sort(indices, Index.COMPARE_BY_NAME);
assertEquals("index", indices[0].getName());
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
new IndicesOptions(
IndicesOptions.ConcreteTargetOptions.ERROR_WHEN_UNAVAILABLE_TARGETS,
IndicesOptions.WildcardOptions.DEFAULT,
IndicesOptions.GatekeeperOptions.builder().ignoreThrottled(true).build(),
IndicesOptions.CrossProjectModeOptions.DEFAULT
),
"ind*",
"test-index"
);
assertEquals(1, indices.length);
Arrays.sort(indices, Index.COMPARE_BY_NAME);
assertEquals("index", indices[0].getName());
}
{
Index[] indices = indexNameExpressionResolver.concreteIndices(
project,
IndicesOptions.builder().wildcardOptions(IndicesOptions.WildcardOptions.builder().matchClosed(true)).build(),
"ind*",
"test-index"
);
assertEquals(3, indices.length);
Arrays.sort(indices, Index.COMPARE_BY_NAME);
assertEquals("index", indices[0].getName());
assertEquals("index-closed", indices[1].getName());
assertEquals("test-index", indices[2].getName());
}
}
public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() {
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(randomFrom("*", "_all"));
request.indicesOptions(IndicesOptions.strictExpandHidden());
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches"));
}
public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() {
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".w*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder(".watches"));
}
public void testWildcardSystemIndexResolutionSingleMatchAllowed() {
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".ml-*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff"));
}
public void testSingleSystemIndexResolutionAllowed() {
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".ml-meta");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta"));
}
public void testFullWildcardSystemIndicesAreHidden() {
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(randomFrom("*", "_all"));
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining("some-other-index"));
}
public void testFullWildcardSystemIndexResolutionDeprecated() {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(randomFrom("*", "_all"));
request.indicesOptions(IndicesOptions.strictExpandHidden());
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.ml-meta, .ml-stuff, .watches], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
public void testSingleSystemIndexResolutionDeprecated() {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".ml-meta");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".ml-meta"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.ml-meta], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
public void testWildcardSystemIndexResolutionSingleMatchDeprecated() {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".w*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder(".watches"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.watches], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
ProjectMetadata state = systemIndexTestClusterState();
SearchRequest request = new SearchRequest(".ml-*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.ml-meta, .ml-stuff], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
public void testExternalSystemIndexAccess() {
final ProjectMetadata prev = systemIndexTestClusterState();
ProjectMetadata state = ProjectMetadata.builder(prev)
.put(indexBuilder(".external-sys-idx", Settings.EMPTY).state(State.OPEN).system(true))
.build();
SystemIndices systemIndices = new SystemIndices(
List.of(
new Feature(
"ml",
"ml indices",
List.of(
SystemIndexDescriptorUtils.createUnmanaged(".ml-meta*", "ml meta"),
SystemIndexDescriptorUtils.createUnmanaged(".ml-stuff*", "other ml")
)
),
new Feature(
"watcher",
"watcher indices",
List.of(SystemIndexDescriptorUtils.createUnmanaged(".watches*", "watches index"))
),
new Feature(
"stack-component",
"stack component",
List.of(
SystemIndexDescriptor.builder()
.setIndexPattern(".external-sys-idx*")
.setDescription("external")
.setType(Type.EXTERNAL_UNMANAGED)
.setAllowedElasticProductOrigins(List.of("stack-component", "other"))
.build()
)
)
)
);
indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(threadContext, systemIndices);
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
SearchRequest request = new SearchRequest(".external-*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.external-sys-idx], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
}
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString());
SearchRequest request = new SearchRequest(".external-sys-idx");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings(
true,
new DeprecationWarning(
Level.WARN,
"this request accesses system indices: [.external-sys-idx], "
+ "but in a future major version, direct access to system indices will be prevented by default"
)
);
}
}
// product origin = stack-component
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component");
SearchRequest request = new SearchRequest(".external-*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings();
}
}
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component");
SearchRequest request = new SearchRequest(".external-sys-idx");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings();
}
}
// product origin = other
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other");
SearchRequest request = new SearchRequest(".external-*");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings();
}
}
{
try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.TRUE.toString());
threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other");
SearchRequest request = new SearchRequest(".external-sys-idx");
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request);
assertThat(indexNames, arrayContaining(".external-sys-idx"));
assertWarnings();
}
}
}
public void testConcreteIndicesPreservesOrdering() {
epochMillis = 1582761600L; // set to a date known to fail without #65027
final String dataStreamName = "my-data-stream";
IndexMetadata index1 = createBackingIndex(dataStreamName, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStreamName, 2, epochMillis).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(newInstance(dataStreamName, List.of(index1.getIndex(), index2.getIndex())))
.build();
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, dataStreamName);
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
}
}
public void testDataStreams() {
final String dataStreamName = "my-data-stream";
IndexMetadata index1 = createBackingIndex(dataStreamName, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStreamName, 2, epochMillis).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(newInstance(dataStreamName, List.of(index1.getIndex(), index2.getIndex())))
.build();
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-data-stream");
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
}
{
// Ignore data streams,allow no indices and expand wildcards
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, "my-data-stream")
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// Ignore data streams and DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, "my-data-stream")
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// Ignore data streams, allow no indices and ignore unavailable
IndicesOptions indicesOptions = IndicesOptions.LENIENT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, "my-data-stream");
assertThat(result.length, equalTo(0));
}
{
// Ignore data streams, allow no indices, ignore unavailable and DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder()
.concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS)
.wildcardOptions(doNotExpandWildcards())
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, "my-data-stream");
assertThat(result.length, equalTo(0));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index result = indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, true);
assertThat(result.getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
}
{
// same as above but don't expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Index result = indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, true);
assertThat(result.getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
}
{
// Ignore data streams
IndicesOptions indicesOptions = IndicesOptions.builder()
.wildcardOptions(IndicesOptions.WildcardOptions.builder().allowEmptyExpressions(false).build())
.build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", true, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// same as above but don't expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards(false)).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", true, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// Ignore data streams and allow no indices
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// same as above but don't expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// Ignore data streams, allow no indices and ignore unavailable
IndicesOptions indicesOptions = IndicesOptions.builder()
.concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS)
.build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
{
// same as above but don't expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder()
.concreteTargetOptions(IndicesOptions.ConcreteTargetOptions.ALLOW_UNAVAILABLE_TARGETS)
.wildcardOptions(doNotExpandWildcards())
.build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, "my-data-stream", false, false)
);
assertThat(e.getMessage(), equalTo("no such index [my-data-stream]"));
}
}
public void testDataStreamsWithFailureStore() {
final String dataStreamName = "my-data-stream";
IndexMetadata index1 = createBackingIndex(dataStreamName, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStreamName, 2, epochMillis).build();
IndexMetadata failureIndex1 = createFailureStore(dataStreamName, 1, epochMillis).build();
IndexMetadata failureIndex2 = createFailureStore(dataStreamName, 2, epochMillis).build();
IndexMetadata otherIndex = indexBuilder("my-other-index", Settings.EMPTY).state(State.OPEN).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(failureIndex1, false)
.put(failureIndex2, false)
.put(otherIndex, false)
.put(
newInstance(
dataStreamName,
List.of(index1.getIndex(), index2.getIndex()),
List.of(failureIndex1.getIndex(), failureIndex2.getIndex())
)
)
.build();
// Test default with an exact data stream name
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-data-stream");
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
}
// Test default with an exact data stream name and include failures true
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN_CLOSED_HIDDEN_FAILURE_NO_SELECTORS;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-data-stream");
assertThat(result.length, equalTo(4));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
assertThat(result[2].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis)));
assertThat(result[3].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis)));
}
// Test explicit include failure store with an exact data stream name
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(
project,
indicesOptions,
true,
"my-data-stream::data",
"my-data-stream::failures"
);
assertThat(result.length, equalTo(4));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis)));
assertThat(result[2].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis)));
assertThat(result[3].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis)));
}
// Test explicit include failure store while not allowing selectors
// We expect an error because selectors are disabled and one was provided
{
IndicesOptions indicesOptions = IndicesOptions.builder(IndicesOptions.STRICT_EXPAND_OPEN)
.gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).build())
.build();
expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-data-stream::failures")
);
}
// Test explicitly selecting only failure store with an exact data stream name
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-data-stream::failures");
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis)));
}
// Test default without any expressions
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true);
assertThat(result.length, equalTo(3));
List<String> indexNames = Arrays.stream(result).map(Index::getName).toList();
assertThat(
indexNames,
containsInAnyOrder(
DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis),
DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis),
otherIndex.getIndex().getName()
)
);
}
// Test default without any expressions and include failures
{
IndicesOptions indicesOptions = IndicesOptions.builder()
.gatekeeperOptions(IndicesOptions.GatekeeperOptions.builder().allowSelectors(false).includeFailureIndices(true).build())
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true);
assertThat(result.length, equalTo(5));
List<String> indexNames = Arrays.stream(result).map(Index::getName).toList();
assertThat(
indexNames,
containsInAnyOrder(
DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis),
DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis),
DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis),
DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis),
otherIndex.getIndex().getName()
)
);
}
// Test default with wildcard expression
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-*");
assertThat(result.length, equalTo(3));
List<String> indexNames = Arrays.stream(result).map(Index::getName).toList();
assertThat(
indexNames,
containsInAnyOrder(
DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis),
DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis),
otherIndex.getIndex().getName()
)
);
}
// Test explicit include failure store with wildcard expression
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-*::data", "my-*::failures");
assertThat(result.length, equalTo(5));
List<String> indexNames = Arrays.stream(result).map(Index::getName).toList();
assertThat(
indexNames,
containsInAnyOrder(
DataStream.getDefaultBackingIndexName(dataStreamName, 2, epochMillis),
DataStream.getDefaultBackingIndexName(dataStreamName, 1, epochMillis),
DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis),
DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis),
otherIndex.getIndex().getName()
)
);
}
// Test explicit only failure store with wildcard expression
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-*::failures");
assertThat(result.length, equalTo(2));
List<String> indexNames = Arrays.stream(result).map(Index::getName).toList();
assertThat(
indexNames,
containsInAnyOrder(
DataStream.getDefaultFailureStoreName(dataStreamName, 2, epochMillis),
DataStream.getDefaultFailureStoreName(dataStreamName, 1, epochMillis)
)
);
}
}
public void testDataStreamAliases() {
String dataStream1 = "my-data-stream-1";
IndexMetadata index1 = createBackingIndex(dataStream1, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2, epochMillis).build();
String dataStream2 = "my-data-stream-2";
IndexMetadata index3 = createBackingIndex(dataStream2, 1, epochMillis).build();
IndexMetadata index4 = createBackingIndex(dataStream2, 2, epochMillis).build();
String dataStream3 = "my-data-stream-3";
IndexMetadata index5 = createBackingIndex(dataStream3, 1, epochMillis).build();
IndexMetadata index6 = createBackingIndex(dataStream3, 2, epochMillis).build();
String dataStreamAlias1 = "my-alias1";
String dataStreamAlias2 = "my-alias2";
String dataStreamAlias3 = "my-alias3";
ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(index3, false)
.put(index4, false)
.put(index5, false)
.put(index6, false)
.put(newInstance(dataStream1, List.of(index1.getIndex(), index2.getIndex())))
.put(newInstance(dataStream2, List.of(index3.getIndex(), index4.getIndex())))
.put(newInstance(dataStream3, List.of(index5.getIndex(), index6.getIndex())));
projectBuilder.put(dataStreamAlias1, dataStream1, null, null);
projectBuilder.put(dataStreamAlias1, dataStream2, true, null);
projectBuilder.put(dataStreamAlias2, dataStream2, null, null);
projectBuilder.put(dataStreamAlias3, dataStream3, null, "{\"term\":{\"year\":2021}}");
ProjectMetadata project = projectBuilder.build();
{
IndicesOptions indicesOptions = IndicesOptions.builder()
.wildcardOptions(IndicesOptions.WildcardOptions.builder().matchOpen(randomBoolean()))
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, dataStreamAlias1);
assertThat(result, arrayContainingInAnyOrder(index1.getIndex(), index2.getIndex(), index3.getIndex(), index4.getIndex()));
}
{
IndicesOptions indicesOptions = IndicesOptions.builder()
.wildcardOptions(IndicesOptions.WildcardOptions.builder().matchOpen(randomBoolean()))
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, dataStreamAlias2);
assertThat(result, arrayContainingInAnyOrder(index3.getIndex(), index4.getIndex()));
}
{
IndicesOptions indicesOptions = IndicesOptions.builder()
.wildcardOptions(IndicesOptions.WildcardOptions.builder().matchOpen(randomBoolean()))
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, dataStreamAlias3);
assertThat(result, arrayContainingInAnyOrder(index5.getIndex(), index6.getIndex()));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias1)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias1 + "]"));
}
{
// same as above but DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias1)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias1 + "]"));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias2)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias2 + "]"));
}
{
// same as above but DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias2)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias2 + "]"));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias3)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias3 + "]"));
}
{
// same as above but DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Exception e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, dataStreamAlias3)
);
assertThat(e.getMessage(), equalTo("no such index [" + dataStreamAlias3 + "]"));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "my-alias*");
assertThat(
result,
arrayContainingInAnyOrder(
index1.getIndex(),
index2.getIndex(),
index3.getIndex(),
index4.getIndex(),
index5.getIndex(),
index6.getIndex()
)
);
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, false, "my-alias*");
assertThat(result, arrayWithSize(0));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index result = indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, dataStreamAlias1, false, true);
assertThat(result, notNullValue());
assertThat(result.getName(), backingIndexEqualTo(dataStream2, 2));
}
{
// same as above but DO NOT expand wildcards
IndicesOptions indicesOptions = IndicesOptions.builder().wildcardOptions(doNotExpandWildcards()).build();
Index result = indexNameExpressionResolver.concreteWriteIndex(project, indicesOptions, dataStreamAlias1, false, true);
assertThat(result, notNullValue());
assertThat(result.getName(), backingIndexEqualTo(dataStream2, 2));
}
}
public void testDataStreamsWithWildcardExpression() {
final String dataStream1 = "logs-mysql";
final String dataStream2 = "logs-redis";
IndexMetadata index1 = createBackingIndex(dataStream1, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2, epochMillis).build();
IndexMetadata index3 = createBackingIndex(dataStream2, 1, epochMillis).build();
IndexMetadata index4 = createBackingIndex(dataStream2, 2, epochMillis).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(index3, false)
.put(index4, false)
.put(newInstance(dataStream1, List.of(index1.getIndex(), index2.getIndex())))
.put(newInstance(dataStream2, List.of(index3.getIndex(), index4.getIndex())))
.build();
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "logs-*");
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(4));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis)));
assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(
project,
indicesOptions,
true,
randomFrom(new String[] { "*" }, new String[] { "_all" }, new String[0])
);
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(4));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis)));
assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "logs-m*");
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
}
{
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; // without include data streams
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, "logs-*");
assertThat(result.length, equalTo(0));
}
}
public void testDataStreamsWithClosedBackingIndicesAndWildcardExpressions() {
final String dataStream1 = "logs-mysql";
final String dataStream2 = "logs-redis";
IndexMetadata index1 = createBackingIndex(dataStream1, 1, epochMillis).state(State.CLOSE).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2, epochMillis).build();
IndexMetadata index3 = createBackingIndex(dataStream2, 1, epochMillis).state(State.CLOSE).build();
IndexMetadata index4 = createBackingIndex(dataStream2, 2, epochMillis).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(index3, false)
.put(index4, false)
.put(newInstance(dataStream1, List.of(index1.getIndex(), index2.getIndex())))
.put(newInstance(dataStream2, List.of(index3.getIndex(), index4.getIndex())))
.build();
IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN;
{
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "logs-*");
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));
}
{
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "*");
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(2));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis)));
}
}
public void testDataStreamsWithRegularIndexAndAlias() {
final String dataStream1 = "logs-foobar";
IndexMetadata index1 = createBackingIndex(dataStream1, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2, epochMillis).build();
IndexMetadata justAnIndex = IndexMetadata.builder("logs-foobarbaz-0")
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.putAlias(new AliasMetadata.Builder("logs-foobarbaz"))
.build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(justAnIndex, false)
.put(newInstance(dataStream1, List.of(index1.getIndex(), index2.getIndex())))
.build();
IndicesOptions indicesOptions = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled();
Index[] result = indexNameExpressionResolver.concreteIndices(project, indicesOptions, true, "logs-*");
Arrays.sort(result, Index.COMPARE_BY_NAME);
assertThat(result.length, equalTo(3));
assertThat(result[0].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 1, epochMillis)));
assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis)));
assertThat(result[2].getName(), equalTo("logs-foobarbaz-0"));
}
public void testHiddenDataStreams() {
final String dataStream1 = "logs-foobar";
IndexMetadata index1 = createBackingIndex(dataStream1, 1, epochMillis).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2, epochMillis).build();
IndexMetadata justAnIndex = IndexMetadata.builder("logs-foobarbaz-0")
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(justAnIndex, false)
.put(
DataStream.builder(dataStream1, List.of(index1.getIndex(), index2.getIndex()))
.setGeneration(2)
.setMetadata(Map.of())
.setHidden(true)
.build()
)
.build();
Index[] result = indexNameExpressionResolver.concreteIndices(project, IndicesOptions.strictExpandHidden(), true, "logs-*");
assertThat(result, arrayContainingInAnyOrder(index1.getIndex(), index2.getIndex(), justAnIndex.getIndex()));
result = indexNameExpressionResolver.concreteIndices(project, IndicesOptions.strictExpandOpen(), true, "logs-*");
assertThat(result, arrayContaining(justAnIndex.getIndex()));
}
public void testDataStreamsNames() {
final String dataStream1 = "logs-foobar";
final String dataStream2 = "other-foobar";
IndexMetadata index1 = createBackingIndex(dataStream1, 1).build();
IndexMetadata index2 = createBackingIndex(dataStream1, 2).build();
IndexMetadata justAnIndex = IndexMetadata.builder("logs-foobarbaz-0")
.settings(settings(IndexVersion.current()))
.numberOfShards(1)
.numberOfReplicas(1)
.putAlias(new AliasMetadata.Builder("logs-foobarbaz"))
.build();
IndexMetadata index3 = createBackingIndex(dataStream2, 1).build();
IndexMetadata index4 = createBackingIndex(dataStream2, 2).build();
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(index1, false)
.put(index2, false)
.put(index3, false)
.put(index4, false)
.put(justAnIndex, false)
.put(newInstance(dataStream1, List.of(index1.getIndex(), index2.getIndex())))
.put(newInstance(dataStream2, List.of(index3.getIndex(), index4.getIndex())))
.build();
List<ResolvedExpression> streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "log*");
List<String> names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "log*");
assertEquals(Collections.singletonList(new ResolvedExpression(dataStream1, DATA)), streams);
assertEquals(Collections.singletonList(dataStream1), names);
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), dataStream1);
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), dataStream1);
assertEquals(Collections.singletonList(new ResolvedExpression(dataStream1, DATA)), streams);
assertEquals(Collections.singletonList(dataStream1), names);
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "other*");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "other*");
assertEquals(Collections.singletonList(new ResolvedExpression(dataStream2, DATA)), streams);
assertEquals(Collections.singletonList(dataStream2), names);
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "*foobar");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "*foobar");
assertThat(streams, containsInAnyOrder(new ResolvedExpression(dataStream1, DATA), new ResolvedExpression(dataStream2, DATA)));
assertThat(names, containsInAnyOrder(dataStream1, dataStream2));
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "notmatched");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "notmatched");
assertThat(streams, empty());
assertThat(names, empty());
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), index3.getIndex().getName());
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), index3.getIndex().getName());
assertThat(streams, empty());
assertThat(names, empty());
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "*", "-logs-foobar");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "*", "-logs-foobar");
assertThat(streams, containsInAnyOrder(new ResolvedExpression(dataStream2, DATA)));
assertThat(names, containsInAnyOrder(dataStream2));
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "*", "-*");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "*", "-*");
assertThat(streams, empty());
assertThat(names, empty());
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.strictExpandOpenAndForbidClosed(), "*foobar");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.strictExpandOpenAndForbidClosed(), "*foobar");
assertThat(streams, containsInAnyOrder(new ResolvedExpression(dataStream1, DATA), new ResolvedExpression(dataStream2, DATA)));
assertThat(names, containsInAnyOrder(dataStream1, dataStream2));
streams = indexNameExpressionResolver.dataStreams(project, IndicesOptions.lenientExpand(), "*foobar::data", "*foobar::failures");
names = indexNameExpressionResolver.dataStreamNames(project, IndicesOptions.lenientExpand(), "*foobar::data", "*foobar::failures");
assertThat(
streams,
containsInAnyOrder(
new ResolvedExpression(dataStream1, DATA),
new ResolvedExpression(dataStream1, FAILURES),
new ResolvedExpression(dataStream2, DATA),
new ResolvedExpression(dataStream2, FAILURES)
)
);
assertThat(names, containsInAnyOrder(dataStream1, dataStream2));
}
public void testDateMathMixedArray() {
long now = System.currentTimeMillis();
String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now));
String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1));
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
ProjectMetadata.builder(randomProjectIdOrDefault())
.put(indexBuilder("name1"))
.put(indexBuilder("name2"))
.put(indexBuilder(dataMathIndex1))
.put(indexBuilder(dateMathIndex2))
.build(),
IndicesOptions.strictExpand(),
now,
SystemIndexAccessLevel.NONE,
Predicates.never(),
Predicates.never()
);
Collection<ResolvedExpression> result = IndexNameExpressionResolver.resolveExpressionsToResources(
context,
"name1",
"<.marvel-{now/d}>",
"name2",
"<.logstash-{now/M{uuuu.MM}}>"
);
assertThat(result.size(), equalTo(4));
assertThat(
result,
contains(
new ResolvedExpression("name1", DATA),
new ResolvedExpression(dataMathIndex1, DATA),
new ResolvedExpression("name2", DATA),
new ResolvedExpression(dateMathIndex2, DATA)
)
);
}
public void testMathExpressionSupport() {
Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC);
String resolved = IndexNameExpressionResolver.resolveDateMathExpression("<a-name-{now/M{yyyy-MM}}>", instant.toEpochMilli());
assertEquals(resolved, "a-name-2021-01");
}
public void testMathExpressionSupportWithOlderDate() {
Instant instant = LocalDate.of(2020, 12, 2).atStartOfDay().toInstant(ZoneOffset.UTC);
final String indexName = "<older-date-{now/M{yyyy-MM}}>";
String resolved = IndexNameExpressionResolver.resolveDateMathExpression(indexName, instant.toEpochMilli());
assertEquals(resolved, "older-date-2020-12");
}
public void testRemoteIndex() {
ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build();
{
IndicesOptions options = IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "cluster:index", "local")
);
assertEquals(
"Cross-cluster calls are not supported in this context but remote indices were requested: [cluster:index]",
iae.getMessage()
);
// but datemath with colon doesn't trip cross-cluster check
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.concreteIndexNames(context, "<datemath-{2001-01-01-13||+1h/h{yyyy-MM-dd-HH|-07:00}}>")
);
assertThat(e.getMessage(), containsString("no such index [datemath-2001-01-01-14"));
}
{
IndicesOptions options = IndicesOptions.fromOptions(true, true, randomBoolean(), randomBoolean(), randomBoolean());
IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context(
project,
options,
SystemIndexAccessLevel.NONE
);
String[] indexNames = indexNameExpressionResolver.concreteIndexNames(context, "cluster:index", "local");
assertEquals(0, indexNames.length);
}
}
public void testResolveWriteIndexAbstraction() {
ProjectMetadata project = DataStreamTestHelper.getProjectWithDataStreams(
List.of(new Tuple<>("logs-foobar", 1)),
List.of("my-index")
);
ProjectMetadata finalProject = ProjectMetadata.builder(project)
.put(IndexMetadata.builder(project.index("my-index")).putAlias(new AliasMetadata.Builder("my-alias")))
.build();
Function<String, List<DocWriteRequest<?>>> docWriteRequestsForName = (name) -> List.of(
new IndexRequest(name).opType(DocWriteRequest.OpType.INDEX),
new IndexRequest(name).opType(DocWriteRequest.OpType.CREATE),
new DeleteRequest(name),
new UpdateRequest(name, randomAlphaOfLength(8))
);
for (DocWriteRequest<?> request : docWriteRequestsForName.apply("logs-foobar")) {
if (request.opType() == DocWriteRequest.OpType.CREATE) {
IndexAbstraction result = indexNameExpressionResolver.resolveWriteIndexAbstraction(finalProject, request);
assertThat(result.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM));
assertThat(result.getName(), equalTo("logs-foobar"));
} else {
IndexNotFoundException infe = expectThrows(
IndexNotFoundException.class,
() -> indexNameExpressionResolver.resolveWriteIndexAbstraction(finalProject, request)
);
assertThat(infe.toString(), containsString("logs-foobar"));
assertThat(infe.getMetadataKeys().contains(IndexNameExpressionResolver.EXCLUDED_DATA_STREAMS_KEY), is(true));
}
}
for (DocWriteRequest<?> request : docWriteRequestsForName.apply("my-index")) {
IndexAbstraction result = indexNameExpressionResolver.resolveWriteIndexAbstraction(finalProject, request);
assertThat(result.getName(), equalTo("my-index"));
assertThat(result.getType(), equalTo(IndexAbstraction.Type.CONCRETE_INDEX));
}
for (DocWriteRequest<?> request : docWriteRequestsForName.apply("my-alias")) {
IndexAbstraction result = indexNameExpressionResolver.resolveWriteIndexAbstraction(finalProject, request);
assertThat(result.getName(), equalTo("my-alias"));
assertThat(result.getType(), equalTo(IndexAbstraction.Type.ALIAS));
}
}
public void testResolveWriteIndexAbstractionNoWriteIndexForAlias() {
ProjectMetadata project1 = DataStreamTestHelper.getProjectWithDataStreams(
List.of(new Tuple<>("logs-foobar", 1)),
List.of("my-index", "my-index2")
);
ProjectMetadata project2 = ProjectMetadata.builder(project1)
.put(IndexMetadata.builder(project1.index("my-index")).putAlias(new AliasMetadata.Builder("my-alias")))
.put(IndexMetadata.builder(project1.index("my-index2")).putAlias(new AliasMetadata.Builder("my-alias")))
.build();
DocWriteRequest<?> request = new IndexRequest("my-alias");
var e = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.resolveWriteIndexAbstraction(project2, request)
);
assertThat(
e.getMessage(),
equalTo(
"no write index is defined for alias [my-alias]. The write index may be explicitly disabled using is_write_index=false"
+ " or the alias points to multiple indices without one being designated as a write index"
)
);
}
public void testResolveWriteIndexAbstractionMissing() {
ProjectMetadata project = DataStreamTestHelper.getProjectWithDataStreams(
List.of(new Tuple<>("logs-foobar", 1)),
List.of("my-index")
);
DocWriteRequest<?> request = new IndexRequest("logs-my-index");
expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.resolveWriteIndexAbstraction(project, request));
}
public void testResolveWriteIndexAbstractionMultipleMatches() {
ProjectMetadata project = DataStreamTestHelper.getProjectWithDataStreams(List.of(), List.of("logs-foo", "logs-bar"));
DocWriteRequest<?> request = mock(DocWriteRequest.class);
when(request.index()).thenReturn("logs-*");
when(request.indicesOptions()).thenReturn(IndicesOptions.lenientExpandOpen());
when(request.opType()).thenReturn(DocWriteRequest.OpType.INDEX);
when(request.includeDataStreams()).thenReturn(true);
var e = expectThrows(
IllegalArgumentException.class,
() -> indexNameExpressionResolver.resolveWriteIndexAbstraction(project, request)
);
assertThat(
e.getMessage(),
equalTo("unable to return a single target as the provided expression and options got resolved to multiple targets")
);
}
public static IndexMetadata.Builder indexBuilder(String index) {
return indexBuilder(index, Settings.EMPTY);
}
private ProjectMetadata systemIndexTestClusterState() {
ProjectMetadata project = ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)
.put(indexBuilder(".ml-meta", SystemIndexDescriptor.DEFAULT_SETTINGS).state(State.OPEN).system(true))
.put(indexBuilder(".watches", SystemIndexDescriptor.DEFAULT_SETTINGS).state(State.OPEN).system(true))
.put(indexBuilder(".ml-stuff", SystemIndexDescriptor.DEFAULT_SETTINGS).state(State.OPEN).system(true))
.put(indexBuilder("some-other-index").state(State.OPEN))
.build();
SystemIndices systemIndices = new SystemIndices(
List.of(
new Feature(
"ml",
"ml indices",
List.of(
SystemIndexDescriptorUtils.createUnmanaged(".ml-meta*", "ml meta"),
SystemIndexDescriptorUtils.createUnmanaged(".ml-stuff*", "other ml")
)
),
new Feature("watcher", "watcher indices", List.of(SystemIndexDescriptorUtils.createUnmanaged(".watches*", "watches index")))
)
);
indexNameExpressionResolver = TestIndexNameExpressionResolver.newInstance(threadContext, systemIndices);
return project;
}
private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) {
return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings));
}
private static IndicesOptions.WildcardOptions doNotExpandWildcards() {
return doNotExpandWildcards(true);
}
private static IndicesOptions.WildcardOptions doNotExpandWildcards(boolean lenient) {
return IndicesOptions.WildcardOptions.builder()
.matchOpen(false)
.matchClosed(false)
.includeHidden(randomBoolean())
.allowEmptyExpressions(lenient)
.build();
}
private Set<ResolvedExpression> resolvedExpressionsSet(String... expressions) {
return Arrays.stream(expressions).map(ResolvedExpression::new).collect(Collectors.toSet());
}
}
|
IndexNameExpressionResolverTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/issues569/beans/Dept.java
|
{
"start": 128,
"end": 728
}
|
class ____ {
Long id;
String code;//部门编号
String name;//部门名称
String abbr;//简称
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAbbr() {
return abbr;
}
public void setAbbr(String abbr) {
this.abbr = abbr;
}
}
|
Dept
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/manytomany/ManyToManyOrderByJoinedInheritanceTest.java
|
{
"start": 2398,
"end": 2544
}
|
class ____ extends AnimalBase {
private transient String unrelatedThing;
}
@Entity( name = "Dog" )
@Table( name = "dogs" )
public static
|
Animal
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/CheckpointSubsumeHelper.java
|
{
"start": 1993,
"end": 4967
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(CheckpointSubsumeHelper.class);
public static Optional<CompletedCheckpoint> subsume(
Deque<CompletedCheckpoint> checkpoints, int numRetain, SubsumeAction subsumeAction)
throws Exception {
if (checkpoints.isEmpty() || checkpoints.size() <= numRetain) {
return Optional.empty();
}
CompletedCheckpoint latest = checkpoints.peekLast();
Optional<CompletedCheckpoint> lastSubsumedCheckpoint = Optional.empty();
Optional<CompletedCheckpoint> latestNotSavepoint = getLatestNotSavepoint(checkpoints);
Iterator<CompletedCheckpoint> iterator = checkpoints.iterator();
while (checkpoints.size() > numRetain && iterator.hasNext()) {
CompletedCheckpoint next = iterator.next();
if (canSubsume(next, latest, latestNotSavepoint)) {
// always return the subsumed checkpoint with larger checkpoint id.
if (!lastSubsumedCheckpoint.isPresent()
|| next.getCheckpointID()
> lastSubsumedCheckpoint.get().getCheckpointID()) {
lastSubsumedCheckpoint = Optional.of(next);
}
iterator.remove();
try {
subsumeAction.subsume(next);
} catch (Exception e) {
LOG.warn("Fail to subsume the old checkpoint.", e);
}
}
// Don't break out from the loop to subsume intermediate savepoints
}
return lastSubsumedCheckpoint;
}
private static Optional<CompletedCheckpoint> getLatestNotSavepoint(
Deque<CompletedCheckpoint> completed) {
Iterator<CompletedCheckpoint> descendingIterator = completed.descendingIterator();
while (descendingIterator.hasNext()) {
CompletedCheckpoint next = descendingIterator.next();
if (!next.getProperties().isSavepoint()) {
return Optional.of(next);
}
}
return Optional.empty();
}
private static boolean canSubsume(
CompletedCheckpoint next,
CompletedCheckpoint latest,
Optional<CompletedCheckpoint> latestNonSavepoint) {
if (next == latest) {
return false;
} else if (next.getProperties().isSavepoint()) {
return true;
} else if (latest.getProperties().isSynchronous()) {
// If the job has stopped with a savepoint then it's safe to subsume because no future
// snapshots will be taken during this run
return true;
} else {
// Don't remove the latest non-savepoint lest invalidate future incremental snapshots
return latestNonSavepoint.filter(checkpoint -> checkpoint != next).isPresent();
}
}
@FunctionalInterface
|
CheckpointSubsumeHelper
|
java
|
micronaut-projects__micronaut-core
|
websocket/src/main/java/io/micronaut/websocket/annotation/ClientWebSocket.java
|
{
"start": 1420,
"end": 1742
}
|
class ____ web socket frames.
*
* @author graemerocher
* @since 1.0
*/
@Documented
@Retention(RUNTIME)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@WebSocketComponent
@Introduction(interfaces = WebSocketSessionAware.class)
@Type(ClientWebSocketInterceptor.class)
@DefaultScope(Prototype.class)
public @
|
handles
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jersey/src/test/java/org/springframework/boot/jersey/autoconfigure/JerseyAutoConfigurationCustomObjectMapperProviderTests.java
|
{
"start": 2755,
"end": 3045
}
|
class ____ extends ResourceConfig {
Application() {
register(Application.class);
}
@GET
public Message message() {
return new Message("Jersey", null);
}
static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
public static
|
Application
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/KeyedStateBackend.java
|
{
"start": 7206,
"end": 7340
}
|
interface ____<K> {
/** Callback when key context is switched. */
void keySelected(K newKey);
}
}
|
KeySelectionListener
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionalOnClass.java
|
{
"start": 2410,
"end": 3016
}
|
interface ____ {
/**
* The classes that must be present. Since this annotation is parsed by loading class
* bytecode, it is safe to specify classes here that may ultimately not be on the
* classpath, only if this annotation is directly on the affected component and
* <b>not</b> if this annotation is used as a composed, meta-annotation. In order to
* use this annotation as a meta-annotation, only use the {@link #name} attribute.
* @return the classes that must be present
*/
Class<?>[] value() default {};
/**
* The classes names that must be present.
* @return the
|
ConditionalOnClass
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java
|
{
"start": 27395,
"end": 27490
}
|
class ____ {
@GuardedBy("lock")
int x;
|
Inner
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
|
{
"start": 14312,
"end": 15220
}
|
class ____ {
private String path;
private boolean newBlock;
public Builder path(String path) {
this.path = path;
return this;
}
public Builder newBlock(boolean newBlock) {
this.newBlock = newBlock;
return this;
}
public AppendEvent build() {
return new AppendEvent(this);
}
}
private AppendEvent(Builder b) {
super(EventType.APPEND);
this.path = b.path;
this.newBlock = b.newBlock;
}
public String getPath() {
return path;
}
public boolean toNewBlock() {
return newBlock;
}
@Override
@InterfaceStability.Unstable
public String toString() {
return "AppendEvent [path=" + path + ", newBlock=" + newBlock + "]";
}
}
/**
* Sent when a file, directory, or symlink is deleted.
*/
@InterfaceAudience.Public
public static
|
Builder
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/log4j2/StructuredLogLayoutTests.java
|
{
"start": 6830,
"end": 6995
}
|
class ____ implements StructuredLogFormatter {
@Override
public String format(Object event) {
return "";
}
}
}
|
CustomLog4j2StructuredLoggingFormatterRawType
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/net/Priority.java
|
{
"start": 995,
"end": 2433
}
|
class ____ {
private final Facility facility;
private final Severity severity;
/**
* The Constructor.
* @param facility The Facility.
* @param severity The Severity.
*/
public Priority(final Facility facility, final Severity severity) {
this.facility = facility;
this.severity = severity;
}
/**
* Returns the priority value based on the Facility and Log Level.
* @param facility The Facility.
* @param level The Level.
* @return The integer value of the priority.
*/
public static int getPriority(final Facility facility, final Level level) {
return toPriority(facility, Severity.getSeverity(level));
}
private static int toPriority(final Facility aFacility, final Severity aSeverity) {
return (aFacility.getCode() << 3) + aSeverity.getCode();
}
/**
* Returns the Facility.
* @return the Facility.
*/
public Facility getFacility() {
return facility;
}
/**
* Returns the Severity.
* @return the Severity.
*/
public Severity getSeverity() {
return severity;
}
/**
* Returns the value of this Priority.
* @return the value of this Priority.
*/
public int getValue() {
return toPriority(facility, severity);
}
@Override
public String toString() {
return Integer.toString(getValue());
}
}
|
Priority
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/commit/ITestAbfsLoadManifestsStage.java
|
{
"start": 1479,
"end": 2436
}
|
class ____ extends TestLoadManifestsStage {
private final ABFSContractTestBinding binding;
public ITestAbfsLoadManifestsStage() throws Exception {
binding = new ABFSContractTestBinding();
}
@BeforeEach
@Override
public void setup() throws Exception {
binding.setup();
super.setup();
}
@Override
protected Configuration createConfiguration() {
return AbfsCommitTestHelper.prepareTestConfiguration(binding);
}
@Override
protected AbstractFSContract createContract(final Configuration conf) {
return new AbfsFileSystemContract(conf, binding.isSecureMode());
}
@Override
protected int getTestTimeoutMillis() {
return AzureTestConstants.SCALE_TEST_TIMEOUT_MILLIS;
}
/**
* @return a smaller number of TAs than the base test suite does.
*/
@Override
protected int numberOfTaskAttempts() {
return ManifestCommitterTestSupport.NUMBER_OF_TASK_ATTEMPTS_SMALL;
}
}
|
ITestAbfsLoadManifestsStage
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ChatScriptEndpointBuilderFactory.java
|
{
"start": 5892,
"end": 8014
}
|
interface ____ {
/**
* ChatScript (camel-chatscript)
* Chat with a ChatScript Server.
*
* Category: ai,chat
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-chatscript
*
* Syntax: <code>chatscript:host:port/botName</code>
*
* Path parameter: host (required)
* Hostname or IP of the server on which CS server is running
*
* Path parameter: port
* Port on which ChatScript is listening to
* Default value: 1024
*
* Path parameter: botName (required)
* Name of the Bot in CS to converse with
*
* @param path host:port/botName
* @return the dsl builder
*/
default ChatScriptEndpointBuilder chatscript(String path) {
return ChatScriptEndpointBuilderFactory.endpointBuilder("chatscript", path);
}
/**
* ChatScript (camel-chatscript)
* Chat with a ChatScript Server.
*
* Category: ai,chat
* Since: 3.0
* Maven coordinates: org.apache.camel:camel-chatscript
*
* Syntax: <code>chatscript:host:port/botName</code>
*
* Path parameter: host (required)
* Hostname or IP of the server on which CS server is running
*
* Path parameter: port
* Port on which ChatScript is listening to
* Default value: 1024
*
* Path parameter: botName (required)
* Name of the Bot in CS to converse with
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path host:port/botName
* @return the dsl builder
*/
default ChatScriptEndpointBuilder chatscript(String componentName, String path) {
return ChatScriptEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static ChatScriptEndpointBuilder endpointBuilder(String componentName, String path) {
|
ChatScriptBuilders
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RobotFrameworkEndpointBuilderFactory.java
|
{
"start": 88268,
"end": 90885
}
|
interface ____ extends EndpointProducerBuilder {
default RobotFrameworkEndpointProducerBuilder basic() {
return (RobotFrameworkEndpointProducerBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedRobotFrameworkEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedRobotFrameworkEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Builder for endpoint for the Robot Framework component.
*/
public
|
AdvancedRobotFrameworkEndpointProducerBuilder
|
java
|
apache__camel
|
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpSimpleBasicAuthTest.java
|
{
"start": 1537,
"end": 3828
}
|
class ____ extends BaseNettyTest {
@Override
public void doPreSetup() {
System.setProperty("java.security.auth.login.config", "src/test/resources/myjaas.config");
}
@Override
public void doPostTearDown() {
System.clearProperty("java.security.auth.login.config");
}
private void sendUnauthorizedRequest() {
CamelExecutionException exception = assertThrows(CamelExecutionException.class,
() -> template.requestBody("netty-http:http://localhost:{{port}}/foo", "Hello World", String.class),
"Should have thrown a CamelExecutionException");
NettyHttpOperationFailedException cause
= assertIsInstanceOf(NettyHttpOperationFailedException.class, exception.getCause());
assertEquals(UNAUTHORIZED.code(), cause.getStatusCode(), "Should have sent back HTTP status 401");
}
@Order(1)
@DisplayName("Tests whether it returns unauthorized (HTTP 401) for unauthorized access")
@Test
void testBasicAuth() {
sendUnauthorizedRequest();
}
@Order(2)
@DisplayName("Tests whether it authorized access succeeds")
@Test
void testWithAuth() throws InterruptedException {
getMockEndpoint("mock:input").expectedBodiesReceived("Hello World");
// username:password is scott:secret
String auth = "Basic c2NvdHQ6c2VjcmV0";
String out = template.requestBodyAndHeader("netty-http:http://localhost:{{port}}/foo", "Hello World", "Authorization",
auth, String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Order(3)
@DisplayName("Tests whether it returns unauthorized (HTTP 401) for unauthorized access after a successful authorization")
@Test
void testBasicAuthPostAuth() {
sendUnauthorizedRequest();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("netty-http:http://0.0.0.0:{{port}}/foo?securityConfiguration.realm=karaf")
.to("mock:input")
.transform().constant("Bye World");
}
};
}
}
|
NettyHttpSimpleBasicAuthTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/ClearCcrRestoreSessionAction.java
|
{
"start": 4137,
"end": 4928
}
|
class ____ extends TransportDeleteCcrRestoreSessionAction {
@Inject
public TransportAction(
ActionFilters actionFilters,
TransportService transportService,
CcrRestoreSourceService ccrRestoreService,
NamedWriteableRegistry namedWriteableRegistry
) {
super(NAME, actionFilters, transportService, ccrRestoreService, namedWriteableRegistry);
}
@Override
protected void validate(ClearCcrRestoreSessionRequest request) {
final ShardId shardId = request.getShardId();
assert shardId != null : "shardId must be specified for the request";
ccrRestoreService.ensureSessionShardIdConsistency(request.getSessionUUID(), shardId);
}
}
}
|
TransportAction
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/Repartitioned.java
|
{
"start": 1193,
"end": 1410
}
|
class ____ used to provide the optional parameters for internal repartition topics.
*
* @param <K> key type
* @param <V> value type
* @see KStream#repartition()
* @see KStream#repartition(Repartitioned)
*/
public
|
is
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/client/DefaultRestTestClient.java
|
{
"start": 15843,
"end": 16721
}
|
class ____ implements ClientHttpRequestInterceptor {
private final Map<String, byte[]> requestContentMap = new ConcurrentHashMap<>();
@Override
public ClientHttpResponse intercept(
HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException {
String header = RestTestClient.RESTTESTCLIENT_REQUEST_ID;
String requestId = request.getHeaders().getFirst(header);
Assert.state(requestId != null, () -> "No \"" + header + "\" header");
this.requestContentMap.put(requestId, body);
return execution.execute(request, body);
}
public byte[] getRequestContent(String requestId) {
byte[] bytes = this.requestContentMap.remove(requestId);
Assert.state(bytes != null, () ->
"No match for %s=%s".formatted(RestTestClient.RESTTESTCLIENT_REQUEST_ID, requestId));
return bytes;
}
}
private static
|
WiretapInterceptor
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/config/ViewResolverRegistry.java
|
{
"start": 6377,
"end": 6554
}
|
class ____ extends UrlBasedViewResolverRegistration {
public ScriptRegistration() {
super(new ScriptTemplateViewResolver());
getViewResolver();
}
}
}
|
ScriptRegistration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java
|
{
"start": 3035,
"end": 22659
}
|
class ____ extends ESTestCase {
public void testGetAllPoliciesWithRetentionEnabled() {
SnapshotLifecyclePolicy policyWithout = new SnapshotLifecyclePolicy(
"policyWithout",
"snap",
"1 * * * * ?",
"repo",
null,
SnapshotRetentionConfiguration.EMPTY
);
SnapshotLifecyclePolicy policyWithout2 = new SnapshotLifecyclePolicy(
"policyWithout2",
"snap",
"1 * * * * ?",
"repo",
null,
new SnapshotRetentionConfiguration(null, null, null)
);
SnapshotLifecyclePolicy policyWith = new SnapshotLifecyclePolicy(
"policyWith",
"snap",
"1 * * * * ?",
"repo",
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
// Test with no SLM metadata
ClusterState state = ClusterState.builder(new ClusterName("cluster")).build();
assertThat(SnapshotRetentionTask.getAllPoliciesWithRetentionEnabled(state), equalTo(Collections.emptyMap()));
// Test with empty SLM metadata
Metadata metadata = Metadata.builder()
.putCustom(
SnapshotLifecycleMetadata.TYPE,
new SnapshotLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING, new SnapshotLifecycleStats())
)
.build();
state = ClusterState.builder(new ClusterName("cluster")).metadata(metadata).build();
assertThat(SnapshotRetentionTask.getAllPoliciesWithRetentionEnabled(state), equalTo(Collections.emptyMap()));
// Test with metadata containing only a policy without retention
state = createState(policyWithout);
assertThat(SnapshotRetentionTask.getAllPoliciesWithRetentionEnabled(state), equalTo(Collections.emptyMap()));
// Test with metadata containing a couple of policies
state = createState(policyWithout, policyWithout2, policyWith);
Map<String, SnapshotLifecyclePolicy> policyMap = SnapshotRetentionTask.getAllPoliciesWithRetentionEnabled(state);
assertThat(policyMap.size(), equalTo(1));
assertThat(policyMap.get("policyWith"), equalTo(policyWith));
}
public void testRetentionTaskSuccess() throws Exception {
retentionTaskTest(true);
}
public void testRetentionTaskFailure() throws Exception {
retentionTaskTest(false);
}
private void retentionTaskTest(final boolean deletionSuccess) throws Exception {
ThreadPool threadPool = new TestThreadPool("slm-test");
ClusterSettings settings = new ClusterSettings(
Settings.EMPTY,
Sets.union(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS, Set.of(SLM_HISTORY_INDEX_ENABLED_SETTING))
);
try (
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, settings);
var clientThreadPool = createThreadPool()
) {
final var noOpClient = new NoOpClient(clientThreadPool);
final String policyId = "policy";
final String repoId = "repo";
SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy(
policyId,
"snap",
"1 * * * * ?",
repoId,
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
ClusterState state = createState(policy);
ClusterServiceUtils.setState(clusterService, state);
final SnapshotInfo eligibleSnapshot = new SnapshotInfo(
new Snapshot(repoId, new SnapshotId("name", "uuid")),
Collections.singletonList("index"),
Collections.emptyList(),
Collections.emptyList(),
null,
1L,
1,
Collections.emptyList(),
true,
Collections.singletonMap("policy", policyId),
0L,
Collections.emptyMap()
);
Set<SnapshotId> deleted = ConcurrentHashMap.newKeySet();
Set<String> deletedSnapshotsInHistory = ConcurrentHashMap.newKeySet();
CountDownLatch deletionLatch = new CountDownLatch(1);
CountDownLatch historyLatch = new CountDownLatch(1);
MockSnapshotRetentionTask retentionTask = new MockSnapshotRetentionTask(
noOpClient,
clusterService,
new SnapshotLifecycleTaskTests.VerifyingHistoryStore(noOpClient, clusterService, (historyItem) -> {
assertEquals(deletionSuccess, historyItem.isSuccess());
if (historyItem.isSuccess() == false) {
assertThat(historyItem.getErrorDetails(), containsString("deletion_failed"));
}
assertEquals(policyId, historyItem.getPolicyId());
assertEquals(repoId, historyItem.getRepository());
assertEquals(DELETE_OPERATION, historyItem.getOperation());
deletedSnapshotsInHistory.add(historyItem.getSnapshotName());
historyLatch.countDown();
}),
() -> {
final var result = Collections.singletonMap(repoId, List.of(Tuple.tuple(eligibleSnapshot.snapshotId(), policyId)));
logger.info("--> retrieving snapshots [{}]", result);
return result;
},
(deletionPolicyId, repo, snapId, slmStats, listener) -> {
logger.info("--> deleting {} from repo {}", snapId, repo);
deleted.add(snapId);
if (deletionSuccess) {
listener.onResponse(AcknowledgedResponse.TRUE);
} else {
listener.onFailure(new RuntimeException("deletion_failed"));
}
deletionLatch.countDown();
},
System::nanoTime
);
long time = System.currentTimeMillis();
retentionTask.triggered(new SchedulerEngine.Event(SnapshotRetentionService.SLM_RETENTION_JOB_ID, time, time));
safeAwait(deletionLatch);
assertThat("something should have been deleted", deleted, not(empty()));
assertThat("one snapshot should have been deleted", deleted, hasSize(1));
assertThat(deleted, contains(eligibleSnapshot.snapshotId()));
boolean historySuccess = historyLatch.await(10, TimeUnit.SECONDS);
assertThat("expected history entries for 1 snapshot deletions", historySuccess, equalTo(true));
assertThat(deletedSnapshotsInHistory, contains(eligibleSnapshot.snapshotId().getName()));
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(10, TimeUnit.SECONDS);
}
}
public void testErrStillRunsFailureHandlerWhenRetrieving() throws Exception {
ThreadPool threadPool = new TestThreadPool("slm-test");
ClusterSettings settings = new ClusterSettings(
Settings.EMPTY,
Sets.union(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS, Set.of(SLM_HISTORY_INDEX_ENABLED_SETTING))
);
final String policyId = "policy";
final String repoId = "repo";
try (
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, settings);
var clientThreadPool = createThreadPool()
) {
final var noOpClient = new NoOpClient(clientThreadPool) {
@Override
@SuppressWarnings("unchecked")
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
if (action == TransportSLMGetExpiredSnapshotsAction.INSTANCE) {
logger.info("--> called");
listener.onResponse((Response) new TransportSLMGetExpiredSnapshotsAction.Response(Map.of()));
} else {
super.doExecute(action, request, listener);
}
}
};
SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy(
policyId,
"snap",
"1 * * * * ?",
repoId,
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
ClusterState state = createState(policy);
ClusterServiceUtils.setState(clusterService, state);
SnapshotRetentionTask task = new SnapshotRetentionTask(
noOpClient,
clusterService,
System::nanoTime,
new SnapshotLifecycleTaskTests.VerifyingHistoryStore(
noOpClient,
clusterService,
(historyItem) -> fail("should never write history")
)
);
AtomicReference<Exception> errHandlerCalled = new AtomicReference<>(null);
task.getSnapshotsEligibleForDeletion(
Collections.singleton(repoId),
Map.of(policyId, new SnapshotLifecyclePolicy(policyId, "test", "* * * * *", repoId, null, null)),
new ActionListener<>() {
@Override
public void onResponse(Map<String, List<Tuple<SnapshotId, String>>> snapshotsToBeDeleted) {
logger.info("--> forcing failure");
throw new ElasticsearchException("forced failure");
}
@Override
public void onFailure(Exception e) {
errHandlerCalled.set(e);
}
}
);
assertNotNull(errHandlerCalled.get());
assertThat(errHandlerCalled.get().getMessage(), equalTo("forced failure"));
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(10, TimeUnit.SECONDS);
}
}
public void testErrStillRunsFailureHandlerWhenDeleting() throws Exception {
ThreadPool threadPool = new TestThreadPool("slm-test");
ClusterSettings settings = new ClusterSettings(
Settings.EMPTY,
Sets.union(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS, Set.of(SLM_HISTORY_INDEX_ENABLED_SETTING))
);
try (
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, settings);
var clientThreadPool = createThreadPool()
) {
final var noOpClient = new NoOpClient(clientThreadPool) {
@Override
@SuppressWarnings("unchecked")
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
if (request instanceof DeleteSnapshotRequest) {
logger.info("--> called");
listener.onResponse((Response) AcknowledgedResponse.TRUE);
} else {
super.doExecute(action, request, listener);
}
}
};
final String policyId = "policy";
final String repoId = "repo";
SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy(
policyId,
"snap",
"1 * * * * ?",
repoId,
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
ClusterState state = createState(policy);
ClusterServiceUtils.setState(clusterService, state);
SnapshotRetentionTask task = new SnapshotRetentionTask(
noOpClient,
clusterService,
System::nanoTime,
new SnapshotLifecycleTaskTests.VerifyingHistoryStore(
noOpClient,
clusterService,
(historyItem) -> fail("should never write history")
)
);
AtomicReference<SnapshotLifecycleStats> slmStats = new AtomicReference<>(new SnapshotLifecycleStats());
AtomicBoolean onFailureCalled = new AtomicBoolean(false);
task.deleteSnapshot("policy", "foo", new SnapshotId("name", "uuid"), slmStats, new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
logger.info("--> forcing failure");
throw new ElasticsearchException("forced failure");
}
@Override
public void onFailure(Exception e) {
onFailureCalled.set(true);
}
});
assertThat(onFailureCalled.get(), equalTo(true));
var expectedPolicyStats = Map.of(policyId, new SnapshotLifecycleStats.SnapshotPolicyStats(policyId, 0, 0, 1, 1));
assertThat(slmStats.get(), equalTo(new SnapshotLifecycleStats(0, 0, 0, 0, expectedPolicyStats)));
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(10, TimeUnit.SECONDS);
}
}
public void testSkipWhileStopping() throws Exception {
doTestSkipDuringMode(OperationMode.STOPPING);
}
public void testSkipWhileStopped() throws Exception {
doTestSkipDuringMode(OperationMode.STOPPED);
}
private void doTestSkipDuringMode(OperationMode mode) throws Exception {
ThreadPool threadPool = new TestThreadPool("slm-test");
ClusterSettings settings = new ClusterSettings(
Settings.EMPTY,
Sets.union(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS, Set.of(SLM_HISTORY_INDEX_ENABLED_SETTING))
);
try (
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, settings);
var clientThreadPool = createThreadPool()
) {
final var noOpClient = new NoOpClient(clientThreadPool);
final String policyId = "policy";
final String repoId = "repo";
SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy(
policyId,
"snap",
"1 * * * * ?",
repoId,
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
ClusterState state = createState(mode, policy);
ClusterServiceUtils.setState(clusterService, state);
SnapshotRetentionTask task = new MockSnapshotRetentionTask(
noOpClient,
clusterService,
new SnapshotLifecycleTaskTests.VerifyingHistoryStore(
noOpClient,
clusterService,
(historyItem) -> fail("should never write history")
),
() -> {
fail("should not retrieve snapshots");
return null;
},
(a, b, c, d, e) -> fail("should not delete snapshots"),
System::nanoTime
);
long time = System.currentTimeMillis();
task.triggered(new SchedulerEngine.Event(SnapshotRetentionService.SLM_RETENTION_JOB_ID, time, time));
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(10, TimeUnit.SECONDS);
}
}
public void testRunManuallyWhileStopping() throws Exception {
doTestRunManuallyDuringMode(OperationMode.STOPPING);
}
public void testRunManuallyWhileStopped() throws Exception {
doTestRunManuallyDuringMode(OperationMode.STOPPED);
}
private void doTestRunManuallyDuringMode(OperationMode mode) throws Exception {
ThreadPool threadPool = new TestThreadPool("slm-test");
ClusterSettings settings = new ClusterSettings(
Settings.EMPTY,
Sets.union(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS, Set.of(SLM_HISTORY_INDEX_ENABLED_SETTING))
);
try (
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, settings);
var clientThreadPool = createThreadPool()
) {
final var noOpClient = new NoOpClient(clientThreadPool);
final String policyId = "policy";
final String repoId = "repo";
SnapshotLifecyclePolicy policy = new SnapshotLifecyclePolicy(
policyId,
"snap",
"1 * * * * ?",
repoId,
null,
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
ClusterState state = createState(mode, policy);
ClusterServiceUtils.setState(clusterService, state);
AtomicBoolean retentionWasRun = new AtomicBoolean(false);
MockSnapshotRetentionTask task = new MockSnapshotRetentionTask(
noOpClient,
clusterService,
new SnapshotLifecycleTaskTests.VerifyingHistoryStore(noOpClient, clusterService, (historyItem) -> {}),
() -> {
retentionWasRun.set(true);
return Collections.emptyMap();
},
(deletionPolicyId, repo, snapId, slmStats, listener) -> {},
System::nanoTime
);
long time = System.currentTimeMillis();
task.triggered(new SchedulerEngine.Event(SnapshotRetentionService.SLM_RETENTION_MANUAL_JOB_ID, time, time));
assertTrue("retention should be run manually even if SLM is disabled", retentionWasRun.get());
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(10, TimeUnit.SECONDS);
}
}
public ClusterState createState(SnapshotLifecyclePolicy... policies) {
return createState(OperationMode.RUNNING, policies);
}
public ClusterState createState(OperationMode mode, SnapshotLifecyclePolicy... policies) {
Map<String, SnapshotLifecyclePolicyMetadata> policyMetadataMap = Arrays.stream(policies)
.map(
policy -> SnapshotLifecyclePolicyMetadata.builder()
.setPolicy(policy)
.setHeaders(Collections.emptyMap())
.setModifiedDate(randomNonNegativeLong())
.setVersion(randomNonNegativeLong())
.build()
)
.collect(Collectors.toMap(pm -> pm.getPolicy().getId(), pm -> pm));
Metadata metadata = Metadata.builder()
.putCustom(SnapshotLifecycleMetadata.TYPE, new SnapshotLifecycleMetadata(policyMetadataMap, mode, new SnapshotLifecycleStats()))
.build();
return ClusterState.builder(new ClusterName("cluster")).metadata(metadata).build();
}
private static
|
SnapshotRetentionTaskTests
|
java
|
apache__dubbo
|
dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/support/wrapper/MockClusterInvokerTest.java
|
{
"start": 2055,
"end": 35029
}
|
class ____ {
private static final Logger logger = LoggerFactory.getLogger(MockClusterInvokerTest.class);
List<Invoker<IHelloService>> invokers = new ArrayList<Invoker<IHelloService>>();
@BeforeEach
public void beforeMethod() {
ApplicationModel.defaultModel().getBeanFactory().registerBean(MetricsDispatcher.class);
invokers.clear();
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerInvoke_normal() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName());
url = url.addParameter(
REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail"));
Invoker<IHelloService> cluster = getClusterInvoker(url);
URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName() + "?getSomething.mock=return aa");
Protocol protocol = new MockProtocol();
Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl);
invokers.add(mInvoker1);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("something", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerInvoke_failmock() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail:return null"))
.addParameter("invoke_return_error", "true");
URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName())
.addParameter("mock", "fail:return null")
.addParameter("getSomething.mock", "return aa")
.addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName()))
.addParameter("invoke_return_error", "true");
Protocol protocol = new MockProtocol();
Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl);
Invoker<IHelloService> cluster = getClusterInvokerMock(url, mInvoker1);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("aa", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
/**
* Test if mock policy works fine: force-mock
*/
@Test
void testMockInvokerInvoke_forcemock() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=force:return null"));
URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName())
.addParameter("mock", "force:return null")
.addParameter("getSomething.mock", "return aa")
.addParameter("getSomething3xx.mock", "return xx")
.addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName()));
Protocol protocol = new MockProtocol();
Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl);
Invoker<IHelloService> cluster = getClusterInvokerMock(url, mInvoker1);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("aa", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
@Test
void testMockInvokerInvoke_forcemock_defaultreturn() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=force"));
Invoker<IHelloService> cluster = getClusterInvoker(url);
URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName()
+ "?getSomething.mock=return aa&getSomething3xx.mock=return xx&sayHello.mock=return ")
.addParameters(url.getParameters());
Protocol protocol = new MockProtocol();
Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl);
invokers.add(mInvoker1);
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
Result ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_someMethods() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName()
+ "&" + "getSomething.mock=fail:return x"
+ "&" + "getSomething2.mock=force:return y"));
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("something", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("y", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
ret = cluster.invoke(invocation);
Assertions.assertEquals("something3", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_WithOutDefault() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName()
+ "&" + "getSomething.mock=fail:return x"
+ "&" + "getSomething2.mock=fail:return y"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("y", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
try {
ret = cluster.invoke(invocation);
Assertions.fail();
} catch (RpcException e) {
}
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_WithDefault() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName()
+ "&" + "mock" + "=" + "fail:return null"
+ "&" + "getSomething.mock" + "=" + "fail:return x"
+ "&" + "getSomething2.mock" + "=" + "fail:return y"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("y", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_WithFailDefault() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName()
+ "&" + "mock=fail:return z"
+ "&" + "getSomething.mock=fail:return x"
+ "&" + "getSomething2.mock=force:return y"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("y", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
ret = cluster.invoke(invocation);
Assertions.assertEquals("z", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertEquals("z", ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_WithForceDefault() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName()
+ "&" + "mock=force:return z"
+ "&" + "getSomething.mock=fail:return x"
+ "&" + "getSomething2.mock=force:return y"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("y", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
ret = cluster.invoke(invocation);
Assertions.assertEquals("z", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertEquals("z", ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_Fock_Default() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail:return x"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething2");
ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_checkCompatible_return() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "getSomething.mock=return x"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("x", ret.getValue());
// If no mock was configured, return null directly
invocation = new RpcInvocation();
invocation.setMethodName("getSomething3");
try {
ret = cluster.invoke(invocation);
Assertions.fail("fail invoke");
} catch (RpcException e) {
}
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_checkCompatible_ImplMock() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(
PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=true" + "&" + "proxy=jdk"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("somethingmock", ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_checkCompatible_ImplMock2() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("somethingmock", ret.getValue());
}
/**
* Test if mock policy works fine: fail-mock
*/
@Test
void testMockInvokerFromOverride_Invoke_checkCompatible_ImplMock3() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=force"));
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals("somethingmock", ret.getValue());
}
@Test
void testMockInvokerFromOverride_Invoke_check_String() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter("getSomething.mock", "force:return 1688")
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getSomething.mock=force:return 1688"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getSomething");
Result ret = cluster.invoke(invocation);
Assertions.assertTrue(
ret.getValue() instanceof String,
"result type must be String but was : " + ret.getValue().getClass());
Assertions.assertEquals("1688", ret.getValue());
}
@Test
void testMockInvokerFromOverride_Invoke_check_int() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getInt1.mock=force:return 1688"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getInt1");
Result ret = cluster.invoke(invocation);
Assertions.assertTrue(
ret.getValue() instanceof Integer,
"result type must be integer but was : " + ret.getValue().getClass());
Assertions.assertEquals(new Integer(1688), (Integer) ret.getValue());
}
@Test
void testMockInvokerFromOverride_Invoke_check_boolean() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getBoolean1.mock=force:return true"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean1");
Result ret = cluster.invoke(invocation);
Assertions.assertTrue(
ret.getValue() instanceof Boolean,
"result type must be Boolean but was : " + ret.getValue().getClass());
Assertions.assertTrue(Boolean.parseBoolean(ret.getValue().toString()));
}
@Test
void testMockInvokerFromOverride_Invoke_check_Boolean() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getBoolean2.mock=force:return true"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean2");
Result ret = cluster.invoke(invocation);
Assertions.assertTrue(Boolean.parseBoolean(ret.getValue().toString()));
}
@SuppressWarnings("unchecked")
@Test
void testMockInvokerFromOverride_Invoke_check_ListString_empty() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getListString.mock=force:return empty"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getListString");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals(0, ((List<String>) ret.getValue()).size());
}
@SuppressWarnings("unchecked")
@Test
void testMockInvokerFromOverride_Invoke_check_ListString() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getListString.mock=force:return [\"hi\",\"hi2\"]"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getListString");
Result ret = cluster.invoke(invocation);
List<String> rl = (List<String>) ret.getValue();
Assertions.assertEquals(2, rl.size());
Assertions.assertEquals("hi", rl.get(0));
}
@SuppressWarnings("unchecked")
@Test
void testMockInvokerFromOverride_Invoke_check_ListPojo_empty() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getUsers.mock=force:return empty"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getUsers");
Result ret = cluster.invoke(invocation);
Assertions.assertEquals(0, ((List<User>) ret.getValue()).size());
}
@Test
void testMockInvokerFromOverride_Invoke_check_ListPojoAsync() throws ExecutionException, InterruptedException {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "getUsersAsync.mock=force"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getUsersAsync");
invocation.setReturnType(CompletableFuture.class);
Result ret = cluster.invoke(invocation);
CompletableFuture<List<User>> cf = null;
try {
cf = (CompletableFuture<List<User>>) ret.recreate();
} catch (Throwable e) {
e.printStackTrace();
}
Assertions.assertEquals(2, cf.get().size());
Assertions.assertEquals("Tommock", cf.get().get(0).getName());
}
@SuppressWarnings("unchecked")
@Test
void testMockInvokerFromOverride_Invoke_check_ListPojo() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getUsers.mock=force:return [{id:1, name:\"hi1\"}, {id:2, name:\"hi2\"}]"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getUsers");
Result ret = cluster.invoke(invocation);
List<User> rl = (List<User>) ret.getValue();
Assertions.assertEquals(2, rl.size());
Assertions.assertEquals("hi1", rl.get(0).getName());
}
@Test
void testMockInvokerFromOverride_Invoke_check_ListPojo_error() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getUsers.mock=force:return [{id:x, name:\"hi1\"}]"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getUsers");
try {
cluster.invoke(invocation);
} catch (RpcException e) {
}
}
@Test
void testMockInvokerFromOverride_Invoke_force_throw() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(
PATH_KEY + "=" + IHelloService.class.getName() + "&" + "getBoolean2.mock=force:throw "))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean2");
try {
cluster.invoke(invocation);
Assertions.fail();
} catch (RpcException e) {
Assertions.assertFalse(e.isBiz(), "not custom exception");
}
}
@Test
void testMockInvokerFromOverride_Invoke_force_throwCustemException() throws Throwable {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(
PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getBoolean2.mock=force:throw org.apache.dubbo.rpc.cluster.support.wrapper.MyMockException"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean2");
try {
cluster.invoke(invocation).recreate();
Assertions.fail();
} catch (MyMockException e) {
}
}
@Test
void testMockInvokerFromOverride_Invoke_force_throwCustemExceptionNotFound() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY,
URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&"
+ "getBoolean2.mock=force:throw java.lang.RuntimeException2"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean2");
try {
cluster.invoke(invocation);
Assertions.fail();
} catch (Exception e) {
Assertions.assertTrue(e.getCause() instanceof IllegalStateException);
}
}
@Test
void testMockInvokerFromOverride_Invoke_mock_false() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=false"))
.addParameter("invoke_return_error", "true");
Invoker<IHelloService> cluster = getClusterInvoker(url);
// Configured with mock
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("getBoolean2");
try {
cluster.invoke(invocation);
Assertions.fail();
} catch (RpcException e) {
Assertions.assertTrue(e.isTimeout());
}
}
private Invoker<IHelloService> getClusterInvokerMock(URL url, Invoker<IHelloService> mockInvoker) {
// As `javassist` have a strict restriction of argument types, request will fail if Invocation do not contains
// complete parameter type information
final URL durl = url.addParameter("proxy", "jdk");
invokers.clear();
ProxyFactory proxy =
ExtensionLoader.getExtensionLoader(ProxyFactory.class).getExtension("jdk");
Invoker<IHelloService> invoker1 = proxy.getInvoker(new HelloService(), IHelloService.class, durl);
invokers.add(invoker1);
if (mockInvoker != null) {
invokers.add(mockInvoker);
}
StaticDirectory<IHelloService> dic = new StaticDirectory<IHelloService>(durl, invokers, null);
dic.buildRouterChain();
AbstractClusterInvoker<IHelloService> cluster = new AbstractClusterInvoker(dic) {
@Override
protected Result doInvoke(Invocation invocation, List invokers, LoadBalance loadbalance)
throws RpcException {
if (durl.getParameter("invoke_return_error", false)) {
throw new RpcException(RpcException.TIMEOUT_EXCEPTION, "test rpc exception");
} else {
return ((Invoker<?>) invokers.get(0)).invoke(invocation);
}
}
};
return new MockClusterInvoker<IHelloService>(dic, cluster);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private Invoker<IHelloService> getClusterInvoker(URL url) {
return getClusterInvokerMock(url, null);
}
public
|
MockClusterInvokerTest
|
java
|
grpc__grpc-java
|
examples/example-opentelemetry/src/main/java/io/grpc/example/opentelemetry/logging/LoggingOpenTelemetryServer.java
|
{
"start": 1488,
"end": 4814
}
|
class ____ {
private static final Logger logger = Logger.getLogger(LoggingOpenTelemetryServer.class.getName());
private Server gRPCServer;
private void start(int port) throws IOException {
gRPCServer = Grpc.newServerBuilderForPort(port, InsecureServerCredentials.create())
.addService(new GreeterImpl())
.build()
.start();
logger.info("Server started, listening on " + port);
}
private void stop() throws InterruptedException {
if (gRPCServer != null) {
gRPCServer.shutdown().awaitTermination(30, TimeUnit.SECONDS);
}
}
/**
* Await termination on the main thread since the grpc library uses daemon threads.
*/
private void blockUntilShutdown() throws InterruptedException {
if (gRPCServer != null) {
gRPCServer.awaitTermination();
}
}
/**
* Main launches the server from the command line.
*/
public static void main(String[] args) throws IOException, InterruptedException {
// The port on which the server should run.
int port = 50051;
// The port on which prometheus metrics are exposed.
int prometheusPort = 9464;
// The number of milliseconds between metric exports.
long metricExportInterval = 800L;
if (args.length > 0) {
if ("--help".equals(args[0])) {
System.err.println("Usage: [port]");
System.err.println("");
System.err.println(" port The port on which server will run. Defaults to " + port);
System.exit(1);
}
port = Integer.parseInt(args[0]);
}
// Create an instance of PeriodicMetricReader and configure it to export
// via a logging exporter to the SdkMeterProvider.
SdkMeterProvider sdkMeterProvider = SdkMeterProvider.builder()
.registerMetricReader(
PeriodicMetricReader.builder(LoggingMetricExporter.create())
.setInterval(Duration.ofMillis(metricExportInterval))
.build())
.build();
// Initialize OpenTelemetry SDK with MeterProvider configured with Logging metrics exporter
OpenTelemetrySdk openTelemetrySdk =
OpenTelemetrySdk.builder().setMeterProvider(sdkMeterProvider).build();
// Initialize gRPC OpenTelemetry.
// Following client metrics are enabled by default :
// 1. grpc.server.call.started
// 2. grpc.server.call.sent_total_compressed_message_size
// 3. grpc.server.call.rcvd_total_compressed_message_size
// 4. grpc.server.call.duration
GrpcOpenTelemetry grpcOpenTelmetry = GrpcOpenTelemetry.newBuilder()
.sdk(openTelemetrySdk)
.build();
// Registers gRPC OpenTelemetry globally.
grpcOpenTelmetry.registerGlobal();
final LoggingOpenTelemetryServer server = new LoggingOpenTelemetryServer();
server.start(port);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
System.err.println("*** shutting down gRPC server since JVM is shutting down");
try {
server.stop();
} catch (InterruptedException e) {
e.printStackTrace(System.err);
}
// Shut down OpenTelemetry SDK.
openTelemetrySdk.close();
System.err.println("*** server shut down");
}
});
server.blockUntilShutdown();
}
static
|
LoggingOpenTelemetryServer
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/factories/Target.java
|
{
"start": 230,
"end": 1325
}
|
class ____ {
private Bar1 prop1;
private Bar2 prop2;
private Bar3 prop3;
private Bar4 prop4;
private CustomList<String> propList;
private CustomMap<String, String> propMap;
public Bar1 getProp1() {
return prop1;
}
public void setProp1(Bar1 prop1) {
this.prop1 = prop1;
}
public Bar2 getProp2() {
return prop2;
}
public void setProp2(Bar2 prop2) {
this.prop2 = prop2;
}
public Bar3 getProp3() {
return prop3;
}
public void setProp3(Bar3 prop3) {
this.prop3 = prop3;
}
public Bar4 getProp4() {
return prop4;
}
public void setProp4(Bar4 prop4) {
this.prop4 = prop4;
}
public CustomList<String> getPropList() {
return propList;
}
public void setPropList(CustomList<String> propList) {
this.propList = propList;
}
public CustomMap<String, String> getPropMap() {
return propMap;
}
public void setPropMap(CustomMap<String, String> propMap) {
this.propMap = propMap;
}
}
|
Target
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/UsingJsr305CheckReturnValueTest.java
|
{
"start": 2697,
"end": 2961
}
|
class ____ {",
// NOTE: wildcard-imported annotations are not currently re-written
" @CheckReturnValue",
" public int getValue() {",
" return 42;",
" }",
"}")
.doTest();
}
}
|
Client
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/reactive/person/ReactivePersonRepository.java
|
{
"start": 268,
"end": 363
}
|
class ____ implements ReactivePanacheMongoRepositoryBase<Person, Long> {
}
|
ReactivePersonRepository
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RMapRx.java
|
{
"start": 1433,
"end": 23749
}
|
interface ____<K, V> extends RExpirableRx {
/**
* Associates specified key with the given value if key isn't already associated with a value.
* Otherwise, replaces the associated value with the results of the given
* remapping function, or removes if the result is {@code null}.
*
* @param key - map key
* @param value - value to be merged with the existing value
* associated with the key or to be associated with the key,
* if no existing value
* @param remappingFunction - the function is invoked with the existing value to compute new value
* @return new value associated with the specified key or
* {@code null} if no value associated with the key
*/
Maybe<V> merge(K key, V value, BiFunction<? super V, ? super V, ? extends V> remappingFunction);
/**
* Computes a new mapping for the specified key and its current mapped value.
*
* @param key - map key
* @param remappingFunction - function to compute a value
* @return the new value associated with the specified key, or {@code null} if none
*/
Maybe<V> compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction);
/**
* Computes a mapping for the specified key if it's not mapped before.
*
* @param key - map key
* @param mappingFunction - function to compute a value
* @return current or new computed value associated with
* the specified key, or {@code null} if the computed value is null
*/
Maybe<V> computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction);
/**
* Computes a mapping for the specified key only if it's already mapped.
*
* @param key - map key
* @param remappingFunction - function to compute a value
* @return the new value associated with the specified key, or null if none
*/
Maybe<V> computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction);
/**
* Loads all map entries to this Redis map using {@link org.redisson.api.map.MapLoader}.
*
* @param replaceExistingValues - <code>true</code> if existed values should be replaced, <code>false</code> otherwise.
* @param parallelism - parallelism level, used to increase speed of process execution
* @return void
*/
Completable loadAll(boolean replaceExistingValues, int parallelism);
/**
* Loads map entries using {@link org.redisson.api.map.MapLoader} whose keys are listed in defined <code>keys</code> parameter.
*
* @param keys - map keys
* @param replaceExistingValues - <code>true</code> if existed values should be replaced, <code>false</code> otherwise.
* @param parallelism - parallelism level, used to increase speed of process execution
* @return void
*/
Completable loadAll(Set<? extends K> keys, boolean replaceExistingValues, int parallelism);
/**
* Returns size of value mapped by key in bytes
*
* @param key - map key
* @return size of value
*/
Single<Integer> valueSize(K key);
/**
* Returns map slice contained the mappings with defined <code>keys</code>.
* <p>
* If map doesn't contain value/values for specified key/keys and {@link MapLoader} is defined
* then value/values will be loaded in read-through mode.
* <p>
* The returned map is <b>NOT</b> backed by the original map.
*
* @param keys - map keys
* @return Map slice
*/
Single<Map<K, V>> getAll(Set<K> keys);
/**
* Stores map entries specified in <code>map</code> object in batch mode.
* <p>
* If {@link MapWriter} is defined then map entries will be stored in write-through mode.
*
* @param map mappings to be stored in this map
* @return void
*/
Completable putAll(Map<? extends K, ? extends V> map);
/**
* Adds the given <code>delta</code> to the current value
* by mapped <code>key</code>.
* <p>
* Works only with codecs below
* <p>
* {@link org.redisson.codec.JsonJacksonCodec},
* <p>
* {@link org.redisson.client.codec.StringCodec},
* <p>
* {@link org.redisson.client.codec.IntegerCodec},
* <p>
* {@link org.redisson.client.codec.DoubleCodec}
* <p>
* {@link org.redisson.client.codec.LongCodec}
*
* @param key - map key
* @param delta the value to add
* @return the updated value
*/
Single<V> addAndGet(K key, Number delta);
/**
* Returns <code>true</code> if this map contains any map entry
* with specified <code>value</code>, otherwise <code>false</code>
*
* @param value - map value
* @return <code>true</code> if this map contains any map entry
* with specified <code>value</code>, otherwise <code>false</code>
*/
Single<Boolean> containsValue(Object value);
/**
* Returns <code>true</code> if this map contains map entry
* mapped by specified <code>key</code>, otherwise <code>false</code>
*
* @param key - map key
* @return <code>true</code> if this map contains map entry
* mapped by specified <code>key</code>, otherwise <code>false</code>
*/
Single<Boolean> containsKey(Object key);
/**
* Returns size of this map
*
* @return size
*/
Single<Integer> size();
/**
* Removes map entries mapped by specified <code>keys</code>.
* <p>
* Works faster than <code>{@link #remove(Object)}</code> but not returning
* the value.
* <p>
* If {@link MapWriter} is defined then <code>keys</code>are deleted in write-through mode.
*
* @param keys - map keys
* @return the number of keys that were removed from the hash, not including specified but non existing keys
*/
Single<Long> fastRemove(K... keys);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>.
* <p>
* Works faster than <code>{@link #put(Object, Object)}</code> but not returning
* previous value.
* <p>
* Returns <code>true</code> if key is a new key in the hash and value was set or
* <code>false</code> if key already exists in the hash and the value was updated.
* <p>
* If {@link MapWriter} is defined then map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key is a new key in the hash and value was set.
* <code>false</code> if key already exists in the hash and the value was updated.
*/
Single<Boolean> fastPut(K key, V value);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>
* only if there is no value with specified<code>key</code> stored before.
* <p>
* Returns <code>true</code> if key is a new one in the hash and value was set or
* <code>false</code> if key already exists in the hash and change hasn't been made.
* <p>
* Works faster than <code>{@link #putIfAbsent(Object, Object)}</code> but not returning
* the previous value associated with <code>key</code>
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key is a new one in the hash and value was set.
* <code>false</code> if key already exists in the hash and change hasn't been made.
*/
Single<Boolean> fastPutIfAbsent(K key, V value);
/**
* Read all keys at once
*
* @return keys
*/
Single<Set<K>> readAllKeySet();
/**
* Read all values at once
*
* @return values
*/
Single<Collection<V>> readAllValues();
/**
* Read all map entries at once
*
* @return entries
*/
Single<Set<Entry<K, V>>> readAllEntrySet();
/**
* Read all map as local instance at once
*
* @return map
*/
Single<Map<K, V>> readAllMap();
/**
* Returns the value mapped by defined <code>key</code> or {@code null} if value is absent.
* <p>
* If map doesn't contain value for specified key and {@link MapLoader} is defined
* then value will be loaded in read-through mode.
*
* @param key the key
* @return the value mapped by defined <code>key</code> or {@code null} if value is absent
*/
Maybe<V> get(K key);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>.
* Returns previous value if map entry with specified <code>key</code> already existed.
* <p>
* If {@link MapWriter} is defined then map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return previous associated value
*/
Maybe<V> put(K key, V value);
/**
* Removes map entry by specified <code>key</code> and returns value.
* <p>
* If {@link MapWriter} is defined then <code>key</code>is deleted in write-through mode.
*
* @param key - map key
* @return deleted value, <code>null</code> if map entry doesn't exist
*/
Maybe<V> remove(K key);
/**
* Replaces previous value with a new <code>value</code> mapped by specified <code>key</code>.
* Returns <code>null</code> if there is no map entry stored before and doesn't store new map entry.
* <p>
* If {@link MapWriter} is defined then new <code>value</code>is written in write-through mode.
*
* @param key - map key
* @param value - map value
* @return previous associated value
* or <code>null</code> if there is no map entry stored before and doesn't store new map entry
*/
Maybe<V> replace(K key, V value);
/**
* Replaces previous <code>oldValue</code> with a <code>newValue</code> mapped by specified <code>key</code>.
* Returns <code>false</code> if previous value doesn't exist or equal to <code>oldValue</code>.
* <p>
* If {@link MapWriter} is defined then <code>newValue</code>is written in write-through mode.
*
* @param key - map key
* @param oldValue - map old value
* @param newValue - map new value
* @return <code>true</code> if value has been replaced otherwise <code>false</code>.
*/
Single<Boolean> replace(K key, V oldValue, V newValue);
/**
* Removes map entry only if it exists with specified <code>key</code> and <code>value</code>.
* <p>
* If {@link MapWriter} is defined then <code>key</code>is deleted in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if map entry has been removed otherwise <code>false</code>.
*/
Single<Boolean> remove(Object key, Object value);
/**
* Stores the specified <code>value</code> mapped by specified <code>key</code>
* only if there is no value with specified<code>key</code> stored before.
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>null</code> if key is a new one in the hash and value was set.
* Previous value if key already exists in the hash and change hasn't been made.
*/
Maybe<V> putIfAbsent(K key, V value);
/**
* Stores the specified <code>value</code> mapped by <code>key</code>
* only if mapping already exists.
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>null</code> if key doesn't exist in the hash and value hasn't been set.
* Previous value if key already exists in the hash and new value has been stored.
*/
Maybe<V> putIfExists(K key, V value);
/**
* Returns random keys from this map limited by <code>count</code>
*
* @param count - keys amount to return
* @return random keys
*/
Single<Set<K>> randomKeys(int count);
/**
* Returns random map entries from this map limited by <code>count</code>
*
* @param count - entries amount to return
* @return random entries
*/
Single<Map<K, V>> randomEntries(int count);
/**
* Stores the specified <code>value</code> mapped by <code>key</code>
* only if mapping already exists.
* <p>
* Returns <code>true</code> if key is a new one in the hash and value was set or
* <code>false</code> if key already exists in the hash and change hasn't been made.
* <p>
* Works faster than <code>{@link #putIfExists(Object, Object)}</code> but doesn't return
* previous value associated with <code>key</code>
* <p>
* If {@link MapWriter} is defined then new map entry is stored in write-through mode.
*
* @param key - map key
* @param value - map value
* @return <code>true</code> if key already exists in the hash and new value has been stored.
* <code>false</code> if key doesn't exist in the hash and value hasn't been set.
*/
Single<Boolean> fastPutIfExists(K key, V value);
/**
* Returns iterator over map entries collection.
* Map entries are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllEntrySet()
*
* @return iterator
*/
Flowable<Map.Entry<K, V>> entryIterator();
/**
* Returns iterator over map entries collection.
* Map entries are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllEntrySet()
*
* @param count - size of entries batch
* @return iterator
*/
Flowable<Map.Entry<K, V>> entryIterator(int count);
/**
* Returns iterator over map entries collection.
* Map entries are loaded in batch. Batch size is <code>10</code>.
* If <code>keyPattern</code> is not null then only entries mapped by matched keys of this pattern are loaded.
*
* Supported glob-style patterns:
* <p>
* h?llo subscribes to hello, hallo and hxllo
* <p>
* h*llo subscribes to hllo and heeeello
* <p>
* h[ae]llo subscribes to hello and hallo, but not hillo
*
* @see #readAllEntrySet()
*
* @param pattern - key pattern
* @return iterator
*/
Flowable<Map.Entry<K, V>> entryIterator(String pattern);
/**
* Returns iterator over map entries collection.
* Map entries are loaded in batch. Batch size is defined by <code>count</code> param.
* If <code>keyPattern</code> is not null then only entries mapped by matched keys of this pattern are loaded.
*
* Supported glob-style patterns:
* <p>
* h?llo subscribes to hello, hallo and hxllo
* <p>
* h*llo subscribes to hllo and heeeello
* <p>
* h[ae]llo subscribes to hello and hallo, but not hillo
*
* @see #readAllEntrySet()
*
* @param pattern - key pattern
* @param count - size of entries batch
* @return iterator
*/
Flowable<Map.Entry<K, V>> entryIterator(String pattern, int count);
/**
* Returns iterator over values collection of this map.
* Values are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllValues()
*
* @return iterator
*/
Flowable<V> valueIterator();
/**
* Returns iterator over values collection of this map.
* Values are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllValues()
*
* @param count - size of values batch
* @return iterator
*/
Flowable<V> valueIterator(int count);
/**
* Returns iterator over values collection of this map.
* Values are loaded in batch. Batch size is <code>10</code>.
* If <code>keyPattern</code> is not null then only values mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMapRx<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMapRx<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllValues()
*
* @param pattern - key pattern
* @return iterator
*/
Flowable<V> valueIterator(String pattern);
/**
* Returns iterator over values collection of this map.
* Values are loaded in batch. Batch size is defined by <code>count</code> param.
* If <code>keyPattern</code> is not null then only values mapped by matched keys of this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMapRx<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMapRx<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllValues()
*
* @param pattern - key pattern
* @param count - size of values batch
* @return iterator
*/
Flowable<V> valueIterator(String pattern, int count);
/**
* Returns iterator over key set of this map.
* Keys are loaded in batch. Batch size is <code>10</code>.
*
* @see #readAllKeySet()
*
* @return iterator
*/
Flowable<K> keyIterator();
/**
* Returns iterator over key set of this map.
* Keys are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @see #readAllKeySet()
*
* @param count - size of keys batch
* @return iterator
*/
Flowable<K> keyIterator(int count);
/**
* Returns iterator over key set of this map.
* If <code>pattern</code> is not null then only keys match this pattern are loaded.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMapRx<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMapRx<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllKeySet()
*
* @param pattern key pattern
* @return iterator
*/
Flowable<K> keyIterator(String pattern);
/**
* Returns iterator over key set of this map.
* If <code>pattern</code> is not null then only keys match this pattern are loaded.
* Keys are loaded in batch. Batch size is defined by <code>count</code> param.
* <p>
* Use <code>org.redisson.client.codec.StringCodec</code> for Map keys.
* <p>
* Usage example:
* <pre>
* Codec valueCodec = ...
* RMapRx<String, MyObject> map = redissonClient.getMap("simpleMap", new CompositeCodec(StringCodec.INSTANCE, valueCodec, valueCodec));
*
* // or
*
* RMapRx<String, String> map = redissonClient.getMap("simpleMap", StringCodec.INSTANCE);
* </pre>
* <pre>
* Supported glob-style patterns:
* h?llo subscribes to hello, hallo and hxllo
* h*llo subscribes to hllo and heeeello
* h[ae]llo subscribes to hello and hallo, but not hillo
* </pre>
* @see #readAllKeySet()
*
* @param pattern key pattern
* @param count size of keys batch
* @return iterator
*/
Flowable<K> keyIterator(String pattern, int count);
/**
* Returns <code>RPermitExpirableSemaphore</code> instance associated with key
*
* @param key - map key
* @return permitExpirableSemaphore
*/
RPermitExpirableSemaphoreRx getPermitExpirableSemaphore(K key);
/**
* Returns <code>RSemaphore</code> instance associated with key
*
* @param key - map key
* @return semaphore
*/
RSemaphoreRx getSemaphore(K key);
/**
* Returns <code>RLock</code> instance associated with key
*
* @param key - map key
* @return fairLock
*/
RLockRx getFairLock(K key);
/**
* Returns <code>RReadWriteLock</code> instance associated with key
*
* @param key - map key
* @return readWriteLock
*/
RReadWriteLockRx getReadWriteLock(K key);
/**
* Returns <code>RLock</code> instance associated with key
*
* @param key - map key
* @return lock
*/
RLockRx getLock(K key);
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.MapPutListener
* @see org.redisson.api.listener.MapRemoveListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener object event listener
* @return listener id
*/
Single<Integer> addListener(ObjectListener listener);
}
|
RMapRx
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.