language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/embeddables/GenericEmbeddableWthSubclassTest.java | {
"start": 4495,
"end": 4862
} | class ____ extends Edition<String> {
@Column(name = "YEAR_COLUMN")
private Integer year;
public PopularEdition() {
}
public PopularEdition(String editorName, String code, Integer year) {
super( editorName, code );
this.year = year;
}
}
@Entity(name = "Base")
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
public static | PopularEdition |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/util/reflection/GenericMetadataSupport.java | {
"start": 13684,
"end": 15098
} | class ____ which the {@link GenericMetadataSupport} should be built.
* @return The new {@link GenericMetadataSupport}.
* @throws MockitoException Raised if type is not a {@link Class} or a {@link ParameterizedType}.
*/
public static GenericMetadataSupport inferFrom(Type type) {
Checks.checkNotNull(type, "type");
if (type instanceof Class) {
return new FromClassGenericMetadataSupport((Class<?>) type);
}
if (type instanceof ParameterizedType) {
return new FromParameterizedTypeGenericMetadataSupport((ParameterizedType) type);
}
throw new MockitoException(
"Type meta-data for this Type ("
+ type.getClass().getCanonicalName()
+ ") is not supported : "
+ type);
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//// Below are specializations of GenericMetadataSupport that could handle retrieval of possible
// Types
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Generic metadata implementation for {@link Class}.
* <p>
* Offer support to retrieve generic metadata on a {@link Class} by reading type parameters and type variables on
* the | from |
java | elastic__elasticsearch | x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalyzeAction.java | {
"start": 46118,
"end": 55175
} | class ____ extends LegacyActionRequest {
private final String repositoryName;
private int blobCount = 100;
private int concurrency = 10;
private int registerOperationCount = 10;
private int readNodeCount = 10;
private int earlyReadNodeCount = 2;
private long seed = 0L;
private double rareActionProbability = 0.02;
private TimeValue timeout = TimeValue.timeValueSeconds(30);
private ByteSizeValue maxBlobSize = ByteSizeValue.ofMb(10);
private ByteSizeValue maxTotalDataSize = ByteSizeValue.ofGb(1);
private boolean detailed = false;
private DiscoveryNode reroutedFrom = null;
private boolean abortWritePermitted = true;
public Request(String repositoryName) {
this.repositoryName = repositoryName;
}
public Request(StreamInput in) throws IOException {
super(in);
repositoryName = in.readString();
seed = in.readLong();
rareActionProbability = in.readDouble();
blobCount = in.readVInt();
concurrency = in.readVInt();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
registerOperationCount = in.readVInt();
} else {
registerOperationCount = concurrency;
}
readNodeCount = in.readVInt();
earlyReadNodeCount = in.readVInt();
timeout = in.readTimeValue();
maxBlobSize = ByteSizeValue.readFrom(in);
maxTotalDataSize = ByteSizeValue.readFrom(in);
detailed = in.readBoolean();
reroutedFrom = in.readOptionalWriteable(DiscoveryNode::new);
abortWritePermitted = in.readBoolean();
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(repositoryName);
out.writeLong(seed);
out.writeDouble(rareActionProbability);
out.writeVInt(blobCount);
out.writeVInt(concurrency);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) {
out.writeVInt(registerOperationCount);
} else if (registerOperationCount != concurrency) {
throw new IllegalArgumentException(
"cannot send request with registerOperationCount != concurrency to version ["
+ out.getTransportVersion().toReleaseVersion()
+ "]"
);
}
out.writeVInt(readNodeCount);
out.writeVInt(earlyReadNodeCount);
out.writeTimeValue(timeout);
maxBlobSize.writeTo(out);
maxTotalDataSize.writeTo(out);
out.writeBoolean(detailed);
out.writeOptionalWriteable(reroutedFrom);
out.writeBoolean(abortWritePermitted);
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers);
}
public void blobCount(int blobCount) {
if (blobCount <= 0) {
throw new IllegalArgumentException("blobCount must be >0, but was [" + blobCount + "]");
}
if (blobCount > 100000) {
// Coordination work is O(blobCount) but is supposed to be lightweight, so limit the blob count.
throw new IllegalArgumentException("blobCount must be <= 100000, but was [" + blobCount + "]");
}
this.blobCount = blobCount;
}
public void concurrency(int concurrency) {
if (concurrency <= 0) {
throw new IllegalArgumentException("concurrency must be >0, but was [" + concurrency + "]");
}
this.concurrency = concurrency;
}
public void registerOperationCount(int registerOperationCount) {
if (registerOperationCount <= 0) {
throw new IllegalArgumentException("registerOperationCount must be >0, but was [" + registerOperationCount + "]");
}
this.registerOperationCount = registerOperationCount;
}
public void seed(long seed) {
this.seed = seed;
}
public void timeout(TimeValue timeout) {
this.timeout = timeout;
}
public void maxBlobSize(ByteSizeValue maxBlobSize) {
if (maxBlobSize.getBytes() <= 0) {
throw new IllegalArgumentException("maxBlobSize must be >0, but was [" + maxBlobSize + "]");
}
this.maxBlobSize = maxBlobSize;
}
public void maxTotalDataSize(ByteSizeValue maxTotalDataSize) {
if (maxTotalDataSize.getBytes() <= 0) {
throw new IllegalArgumentException("maxTotalDataSize must be >0, but was [" + maxTotalDataSize + "]");
}
this.maxTotalDataSize = maxTotalDataSize;
}
public void detailed(boolean detailed) {
this.detailed = detailed;
}
public int getBlobCount() {
return blobCount;
}
public int getConcurrency() {
return concurrency;
}
public int getRegisterOperationCount() {
return registerOperationCount;
}
public String getRepositoryName() {
return repositoryName;
}
public TimeValue getTimeout() {
return timeout;
}
public long getSeed() {
return seed;
}
public ByteSizeValue getMaxBlobSize() {
return maxBlobSize;
}
public ByteSizeValue getMaxTotalDataSize() {
return maxTotalDataSize;
}
public boolean getDetailed() {
return detailed;
}
public DiscoveryNode getReroutedFrom() {
return reroutedFrom;
}
public void reroutedFrom(DiscoveryNode discoveryNode) {
reroutedFrom = discoveryNode;
}
public void readNodeCount(int readNodeCount) {
if (readNodeCount <= 0) {
throw new IllegalArgumentException("readNodeCount must be >0, but was [" + readNodeCount + "]");
}
this.readNodeCount = readNodeCount;
}
public int getReadNodeCount() {
return readNodeCount;
}
public void earlyReadNodeCount(int earlyReadNodeCount) {
if (earlyReadNodeCount < 0) {
throw new IllegalArgumentException("earlyReadNodeCount must be >=0, but was [" + earlyReadNodeCount + "]");
}
this.earlyReadNodeCount = earlyReadNodeCount;
}
public int getEarlyReadNodeCount() {
return earlyReadNodeCount;
}
public void rareActionProbability(double rareActionProbability) {
if (rareActionProbability < 0. || rareActionProbability > 1.) {
throw new IllegalArgumentException(
"rareActionProbability must be between 0 and 1, but was [" + rareActionProbability + "]"
);
}
this.rareActionProbability = rareActionProbability;
}
public double getRareActionProbability() {
return rareActionProbability;
}
public void abortWritePermitted(boolean abortWritePermitted) {
this.abortWritePermitted = abortWritePermitted;
}
public boolean isAbortWritePermitted() {
return abortWritePermitted;
}
@Override
public String toString() {
return "Request{" + getDescription() + '}';
}
@Override
public String getDescription() {
return "analysis [repository="
+ repositoryName
+ ", blobCount="
+ blobCount
+ ", concurrency="
+ concurrency
+ ", readNodeCount="
+ readNodeCount
+ ", earlyReadNodeCount="
+ earlyReadNodeCount
+ ", seed="
+ seed
+ ", rareActionProbability="
+ rareActionProbability
+ ", timeout="
+ timeout
+ ", maxBlobSize="
+ maxBlobSize
+ ", maxTotalDataSize="
+ maxTotalDataSize
+ ", detailed="
+ detailed
+ ", abortWritePermitted="
+ abortWritePermitted
+ "]";
}
public void reseed(long newSeed) {
if (seed == 0L) {
seed = newSeed;
}
}
}
public static | Request |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/order/DestinationOrder.java | {
"start": 1131,
"end": 1716
} | enum ____ {
HASH, // Follow consistent hashing in the first folder level
LOCAL, // Local first
RANDOM, // Random order
HASH_ALL, // Follow consistent hashing
SPACE, // Available space based order
LEADER_FOLLOWER; // Try leader sub-cluster first, if failed, try followers
/** Approaches that write folders in all subclusters. */
public static final EnumSet<DestinationOrder> FOLDER_ALL = EnumSet.of(
HASH_ALL,
RANDOM,
SPACE,
// leader-follower mode should make sure all directory exists in case of switching
LEADER_FOLLOWER);
} | DestinationOrder |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/AnnotationBundlesTest.java | {
"start": 587,
"end": 741
} | class ____ extends DatabindTestUtil
{
@Retention(RetentionPolicy.RUNTIME)
@JacksonAnnotationsInside
@JsonIgnore
private @ | AnnotationBundlesTest |
java | apache__camel | components/camel-azure/camel-azure-eventgrid/src/main/java/org/apache/camel/component/azure/eventgrid/EventGridConfiguration.java | {
"start": 1240,
"end": 4212
} | class ____ implements Cloneable {
@UriPath
@Metadata(required = true)
private String topicEndpoint;
@UriParam(label = "security", secret = true)
private String accessKey;
@UriParam(label = "security", secret = true)
@Metadata(autowired = true)
private AzureKeyCredential azureKeyCredential;
@UriParam(label = "security", secret = true)
@Metadata(autowired = true)
private TokenCredential tokenCredential;
@UriParam(label = "security", enums = "ACCESS_KEY,AZURE_IDENTITY,TOKEN_CREDENTIAL", defaultValue = "ACCESS_KEY")
private CredentialType credentialType = CredentialType.ACCESS_KEY;
@UriParam(label = "producer")
@Metadata(autowired = true)
private EventGridPublisherClient<com.azure.core.models.CloudEvent> publisherClient;
/**
* The topic endpoint URL where events will be published.
*/
public String getTopicEndpoint() {
return topicEndpoint;
}
public void setTopicEndpoint(String topicEndpoint) {
this.topicEndpoint = topicEndpoint;
}
/**
* The access key for the Event Grid topic. Required when using ACCESS_KEY credential type.
*/
public String getAccessKey() {
return accessKey;
}
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
/**
* The Azure Key Credential for authentication. This is automatically created from the accessKey if not provided.
*/
public AzureKeyCredential getAzureKeyCredential() {
return azureKeyCredential;
}
public void setAzureKeyCredential(AzureKeyCredential azureKeyCredential) {
this.azureKeyCredential = azureKeyCredential;
}
/**
* Provide custom authentication credentials using an implementation of {@link TokenCredential}.
*/
public TokenCredential getTokenCredential() {
return tokenCredential;
}
public void setTokenCredential(TokenCredential tokenCredential) {
this.tokenCredential = tokenCredential;
}
/**
* Determines the credential strategy to adopt
*/
public CredentialType getCredentialType() {
return credentialType;
}
public void setCredentialType(CredentialType credentialType) {
this.credentialType = credentialType;
}
/**
* The EventGrid publisher client. If provided, it will be used instead of creating a new one.
*/
public EventGridPublisherClient<com.azure.core.models.CloudEvent> getPublisherClient() {
return publisherClient;
}
public void setPublisherClient(EventGridPublisherClient<com.azure.core.models.CloudEvent> publisherClient) {
this.publisherClient = publisherClient;
}
public EventGridConfiguration copy() {
try {
return (EventGridConfiguration) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
| EventGridConfiguration |
java | google__dagger | javatests/dagger/internal/codegen/ComponentProcessorTest.java | {
"start": 21149,
"end": 21474
} | class ____ {",
" @Provides AClass aClass() {",
" return new AClass();",
" }",
"}");
Source component = CompilerTests.javaSource("SomeComponent",
"import dagger.Component;",
"",
"@Component(modules = AModule.class)",
" | AModule |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassReader.java | {
"start": 39785,
"end": 47774
} | class ____ parsed.
* @param recordComponentOffset the offset of the current record component.
* @return the offset of the first byte following the record component.
*/
private int readRecordComponent(
final ClassVisitor classVisitor, final Context context, final int recordComponentOffset) {
char[] charBuffer = context.charBuffer;
int currentOffset = recordComponentOffset;
String name = readUTF8(currentOffset, charBuffer);
String descriptor = readUTF8(currentOffset + 2, charBuffer);
currentOffset += 4;
// Read the record component attributes (the variables are ordered as in Section 4.7 of the
// JVMS).
// Attribute offsets exclude the attribute_name_index and attribute_length fields.
// - The string corresponding to the Signature attribute, or null.
String signature = null;
// - The offset of the RuntimeVisibleAnnotations attribute, or 0.
int runtimeVisibleAnnotationsOffset = 0;
// - The offset of the RuntimeInvisibleAnnotations attribute, or 0.
int runtimeInvisibleAnnotationsOffset = 0;
// - The offset of the RuntimeVisibleTypeAnnotations attribute, or 0.
int runtimeVisibleTypeAnnotationsOffset = 0;
// - The offset of the RuntimeInvisibleTypeAnnotations attribute, or 0.
int runtimeInvisibleTypeAnnotationsOffset = 0;
// - The non standard attributes (linked with their {@link Attribute#nextAttribute} field).
// This list in the <i>reverse order</i> or their order in the ClassFile structure.
Attribute attributes = null;
int attributesCount = readUnsignedShort(currentOffset);
currentOffset += 2;
while (attributesCount-- > 0) {
// Read the attribute_info's attribute_name and attribute_length fields.
String attributeName = readUTF8(currentOffset, charBuffer);
int attributeLength = readInt(currentOffset + 2);
currentOffset += 6;
// The tests are sorted in decreasing frequency order (based on frequencies observed on
// typical classes).
if (Constants.SIGNATURE.equals(attributeName)) {
signature = readUTF8(currentOffset, charBuffer);
} else if (Constants.RUNTIME_VISIBLE_ANNOTATIONS.equals(attributeName)) {
runtimeVisibleAnnotationsOffset = currentOffset;
} else if (Constants.RUNTIME_VISIBLE_TYPE_ANNOTATIONS.equals(attributeName)) {
runtimeVisibleTypeAnnotationsOffset = currentOffset;
} else if (Constants.RUNTIME_INVISIBLE_ANNOTATIONS.equals(attributeName)) {
runtimeInvisibleAnnotationsOffset = currentOffset;
} else if (Constants.RUNTIME_INVISIBLE_TYPE_ANNOTATIONS.equals(attributeName)) {
runtimeInvisibleTypeAnnotationsOffset = currentOffset;
} else {
Attribute attribute =
readAttribute(
context.attributePrototypes,
attributeName,
currentOffset,
attributeLength,
charBuffer,
-1,
null);
attribute.nextAttribute = attributes;
attributes = attribute;
}
currentOffset += attributeLength;
}
RecordComponentVisitor recordComponentVisitor =
classVisitor.visitRecordComponent(name, descriptor, signature);
if (recordComponentVisitor == null) {
return currentOffset;
}
// Visit the RuntimeVisibleAnnotations attribute.
if (runtimeVisibleAnnotationsOffset != 0) {
int numAnnotations = readUnsignedShort(runtimeVisibleAnnotationsOffset);
int currentAnnotationOffset = runtimeVisibleAnnotationsOffset + 2;
while (numAnnotations-- > 0) {
// Parse the type_index field.
String annotationDescriptor = readUTF8(currentAnnotationOffset, charBuffer);
currentAnnotationOffset += 2;
// Parse num_element_value_pairs and element_value_pairs and visit these values.
currentAnnotationOffset =
readElementValues(
recordComponentVisitor.visitAnnotation(annotationDescriptor, /* visible= */ true),
currentAnnotationOffset,
/* named= */ true,
charBuffer);
}
}
// Visit the RuntimeInvisibleAnnotations attribute.
if (runtimeInvisibleAnnotationsOffset != 0) {
int numAnnotations = readUnsignedShort(runtimeInvisibleAnnotationsOffset);
int currentAnnotationOffset = runtimeInvisibleAnnotationsOffset + 2;
while (numAnnotations-- > 0) {
// Parse the type_index field.
String annotationDescriptor = readUTF8(currentAnnotationOffset, charBuffer);
currentAnnotationOffset += 2;
// Parse num_element_value_pairs and element_value_pairs and visit these values.
currentAnnotationOffset =
readElementValues(
recordComponentVisitor.visitAnnotation(annotationDescriptor, /* visible= */ false),
currentAnnotationOffset,
/* named= */ true,
charBuffer);
}
}
// Visit the RuntimeVisibleTypeAnnotations attribute.
if (runtimeVisibleTypeAnnotationsOffset != 0) {
int numAnnotations = readUnsignedShort(runtimeVisibleTypeAnnotationsOffset);
int currentAnnotationOffset = runtimeVisibleTypeAnnotationsOffset + 2;
while (numAnnotations-- > 0) {
// Parse the target_type, target_info and target_path fields.
currentAnnotationOffset = readTypeAnnotationTarget(context, currentAnnotationOffset);
// Parse the type_index field.
String annotationDescriptor = readUTF8(currentAnnotationOffset, charBuffer);
currentAnnotationOffset += 2;
// Parse num_element_value_pairs and element_value_pairs and visit these values.
currentAnnotationOffset =
readElementValues(
recordComponentVisitor.visitTypeAnnotation(
context.currentTypeAnnotationTarget,
context.currentTypeAnnotationTargetPath,
annotationDescriptor,
/* visible= */ true),
currentAnnotationOffset,
/* named= */ true,
charBuffer);
}
}
// Visit the RuntimeInvisibleTypeAnnotations attribute.
if (runtimeInvisibleTypeAnnotationsOffset != 0) {
int numAnnotations = readUnsignedShort(runtimeInvisibleTypeAnnotationsOffset);
int currentAnnotationOffset = runtimeInvisibleTypeAnnotationsOffset + 2;
while (numAnnotations-- > 0) {
// Parse the target_type, target_info and target_path fields.
currentAnnotationOffset = readTypeAnnotationTarget(context, currentAnnotationOffset);
// Parse the type_index field.
String annotationDescriptor = readUTF8(currentAnnotationOffset, charBuffer);
currentAnnotationOffset += 2;
// Parse num_element_value_pairs and element_value_pairs and visit these values.
currentAnnotationOffset =
readElementValues(
recordComponentVisitor.visitTypeAnnotation(
context.currentTypeAnnotationTarget,
context.currentTypeAnnotationTargetPath,
annotationDescriptor,
/* visible= */ false),
currentAnnotationOffset,
/* named= */ true,
charBuffer);
}
}
// Visit the non standard attributes.
while (attributes != null) {
// Copy and reset the nextAttribute field so that it can also be used in FieldWriter.
Attribute nextAttribute = attributes.nextAttribute;
attributes.nextAttribute = null;
recordComponentVisitor.visitAttribute(attributes);
attributes = nextAttribute;
}
// Visit the end of the field.
recordComponentVisitor.visitEnd();
return currentOffset;
}
/**
* Reads a JVMS field_info structure and makes the given visitor visit it.
*
* @param classVisitor the visitor that must visit the field.
* @param context information about the | being |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/TestCheckRemoveZKNodeRMStateStore.java | {
"start": 4548,
"end": 16428
} | class ____ extends ZKRMStateStore {
private ResourceManager resourceManager;
private ZKCuratorManager zkCuratorManager;
TestZKRMStateStoreInternal(Configuration conf, String workingZnode)
throws Exception {
resourceManager = mock(ResourceManager.class);
zkCuratorManager = mock(ZKCuratorManager.class, RETURNS_DEEP_STUBS);
when(resourceManager.getZKManager()).thenReturn(zkCuratorManager);
when(resourceManager.createAndStartZKManager(conf)).thenReturn(zkCuratorManager);
when(zkCuratorManager.exists(getAppNode("application_1708333280_0001")))
.thenReturn(true);
when(zkCuratorManager.exists(getAppNode("application_1708334188_0001")))
.thenReturn(true).thenReturn(false);
when(zkCuratorManager.exists(getDelegationTokenNode(0, 0)))
.thenReturn(true).thenReturn(false);
when(zkCuratorManager.exists(getAppNode("application_1709705779_0001")))
.thenReturn(true);
when(zkCuratorManager.exists(getAttemptNode("application_1709705779_0001",
"appattempt_1709705779_0001_000001")))
.thenReturn(true);
doThrow(new KeeperException.NoNodeException()).when(zkCuratorManager)
.safeDelete(anyString(), anyList(), anyString());
setResourceManager(resourceManager);
init(conf);
dispatcher.disableExitOnDispatchException();
start();
assertTrue(znodeWorkingPath.equals(workingZnode));
}
private String getVersionNode() {
return znodeWorkingPath + "/" + ROOT_ZNODE_NAME + "/" + VERSION_NODE;
}
@Override
public Version getCurrentVersion() {
return CURRENT_VERSION_INFO;
}
private String getAppNode(String appId, int splitIdx) {
String rootPath = workingZnode + "/" + ROOT_ZNODE_NAME + "/" +
RM_APP_ROOT;
String appPath = appId;
if (splitIdx != 0) {
int idx = appId.length() - splitIdx;
appPath = appId.substring(0, idx) + "/" + appId.substring(idx);
return rootPath + "/" + RM_APP_ROOT_HIERARCHIES + "/" +
Integer.toString(splitIdx) + "/" + appPath;
}
return rootPath + "/" + appPath;
}
private String getAppNode(String appId) {
return getAppNode(appId, 0);
}
private String getAttemptNode(String appId, String attemptId) {
return getAppNode(appId) + "/" + attemptId;
}
private String getDelegationTokenNode(int rmDTSequenceNumber, int splitIdx) {
String rootPath = workingZnode + "/" + ROOT_ZNODE_NAME + "/" +
RM_DT_SECRET_MANAGER_ROOT + "/" +
RMStateStore.RM_DELEGATION_TOKENS_ROOT_ZNODE_NAME;
String nodeName = DELEGATION_TOKEN_PREFIX;
if (splitIdx == 0) {
nodeName += rmDTSequenceNumber;
} else {
nodeName += String.format("%04d", rmDTSequenceNumber);
}
String path = nodeName;
if (splitIdx != 0) {
int idx = nodeName.length() - splitIdx;
path = splitIdx + "/" + nodeName.substring(0, idx) + "/"
+ nodeName.substring(idx);
}
return rootPath + "/" + path;
}
}
private RMStateStore createStore(Configuration conf) throws Exception {
workingZnode = "/jira/issue/11626/rmstore";
conf.set(CommonConfigurationKeys.ZK_ADDRESS,
curatorTestingServer.getConnectString());
conf.set(YarnConfiguration.ZK_RM_STATE_STORE_PARENT_PATH, workingZnode);
conf.setLong(YarnConfiguration.RM_EPOCH, epoch);
conf.setLong(YarnConfiguration.RM_EPOCH_RANGE, getEpochRange());
this.store = new TestZKRMStateStoreInternal(conf, workingZnode);
return this.store;
}
public RMStateStore getRMStateStore(Configuration conf) throws Exception {
return createStore(conf);
}
@Override
public RMStateStore getRMStateStore() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
return createStore(conf);
}
@Override
public boolean isFinalStateValid() throws Exception {
return 1 ==
curatorFramework.getChildren().forPath(store.znodeWorkingPath).size();
}
@Override
public void writeVersion(Version version) throws Exception {
curatorFramework.setData().withVersion(-1)
.forPath(store.getVersionNode(),
((VersionPBImpl) version).getProto().toByteArray());
}
@Override
public Version getCurrentVersion() throws Exception {
return store.getCurrentVersion();
}
@Override
public boolean appExists(RMApp app) throws Exception {
String appIdPath = app.getApplicationId().toString();
int split =
store.getConfig().getInt(YarnConfiguration.ZK_APPID_NODE_SPLIT_INDEX,
YarnConfiguration.DEFAULT_ZK_APPID_NODE_SPLIT_INDEX);
return null != curatorFramework.checkExists()
.forPath(store.getAppNode(appIdPath, split));
}
@Override
public boolean attemptExists(RMAppAttempt attempt) throws Exception {
ApplicationAttemptId attemptId = attempt.getAppAttemptId();
return null != curatorFramework.checkExists()
.forPath(store.getAttemptNode(
attemptId.getApplicationId().toString(), attemptId.toString()));
}
}
@Test
@Timeout(value = 60)
public void testSafeDeleteZKNode() throws Exception {
TestZKRMStateStoreTester zkTester = new TestZKRMStateStoreTester();
testRemoveAttempt(zkTester);
testRemoveApplication(zkTester);
testRemoveRMDelegationToken(zkTester);
testRemoveRMDTMasterKeyState(zkTester);
testRemoveReservationState(zkTester);
testTransitionedToStandbyAfterCheckNode(zkTester);
}
public void testRemoveAttempt(RMStateStoreHelper stateStoreHelper) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
ApplicationId appIdRemoved = ApplicationId.newInstance(1708333280, 1);
storeApp(store, appIdRemoved, 123456, 654321);
ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.newInstance(appIdRemoved, 1);
storeAttempt(store, attemptIdRemoved,
ContainerId.newContainerId(attemptIdRemoved, 1).toString(), null, null, dispatcher);
try {
store.removeApplicationAttemptInternal(attemptIdRemoved);
} catch (KeeperException.NoNodeException nne) {
fail("NoNodeException should not happen.");
}
// The verification method safeDelete is called once.
verify(store.resourceManager.getZKManager(), times(1))
.safeDelete(anyString(), anyList(), anyString());
store.close();
}
public void testRemoveApplication(RMStateStoreHelper stateStoreHelper) throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
ApplicationId appIdRemoved = ApplicationId.newInstance(1708334188, 1);
storeApp(store, appIdRemoved, 123456, 654321);
ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.newInstance(appIdRemoved, 1);
storeAttempt(store, attemptIdRemoved,
ContainerId.newContainerId(attemptIdRemoved, 1).toString(), null, null, dispatcher);
ApplicationSubmissionContext context = new ApplicationSubmissionContextPBImpl();
context.setApplicationId(appIdRemoved);
ApplicationStateData appStateRemoved =
ApplicationStateData.newInstance(
123456, 654321, context, "user1");
appStateRemoved.attempts.put(attemptIdRemoved, null);
try {
// The occurrence of NoNodeException is induced by calling the safeDelete method.
store.removeApplicationStateInternal(appStateRemoved);
} catch (KeeperException.NoNodeException nne) {
fail("NoNodeException should not happen.");
}
store.close();
}
public void testRemoveRMDelegationToken(RMStateStoreHelper stateStoreHelper) throws Exception{
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
RMDelegationTokenIdentifier tokenIdRemoved = new RMDelegationTokenIdentifier();
try {
store.removeRMDelegationTokenState(tokenIdRemoved);
} catch (KeeperException.NoNodeException nne) {
fail("NoNodeException should not happen.");
}
// The verification method safeDelete is called once.
verify(store.resourceManager.getZKManager(), times(1))
.safeDelete(anyString(), anyList(), anyString());
store.close();
}
public void testRemoveRMDTMasterKeyState(RMStateStoreHelper stateStoreHelper) throws Exception{
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
DelegationKey keyRemoved = new DelegationKey();
try {
store.removeRMDTMasterKeyState(keyRemoved);
} catch (KeeperException.NoNodeException nne) {
fail("NoNodeException should not happen.");
}
// The verification method safeDelete is called once.
verify(store.resourceManager.getZKManager(), times(1))
.safeDelete(anyString(), anyList(), anyString());
store.close();
}
public void testRemoveReservationState(RMStateStoreHelper stateStoreHelper) throws Exception{
RMStateStore store = stateStoreHelper.getRMStateStore();
TestDispatcher dispatcher = new TestDispatcher();
store.setRMDispatcher(dispatcher);
String planName = "test-reservation";
ReservationId reservationIdRemoved = ReservationId.newInstance(1708414427, 1);
try {
store.removeReservationState(planName, reservationIdRemoved.toString());
} catch (KeeperException.NoNodeException nne) {
fail("NoNodeException should not happen.");
}
// The verification method safeDelete is called once.
verify(store.resourceManager.getZKManager(), times(1))
.safeDelete(anyString(), anyList(), anyString());
store.close();
}
public void testTransitionedToStandbyAfterCheckNode(RMStateStoreHelper stateStoreHelper)
throws Exception {
RMStateStore store = stateStoreHelper.getRMStateStore();
HAServiceProtocol.StateChangeRequestInfo req = new HAServiceProtocol.StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
Configuration conf = new YarnConfiguration();
ResourceManager rm = new MockRM(conf, store);
rm.init(conf);
rm.start();
// Transition to active.
rm.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals(Service.STATE.STARTED, rm.getServiceState(),
"RM with ZKStore didn't start");
assertEquals(HAServiceProtocol.HAServiceState.ACTIVE,
rm.getRMContext().getRMAdminService().getServiceStatus().getState(),
"RM should be Active");
// Simulate throw NodeExistsException
ZKRMStateStore zKStore = (ZKRMStateStore) rm.getRMContext().getStateStore();
TestDispatcher dispatcher = new TestDispatcher();
zKStore.setRMDispatcher(dispatcher);
ApplicationId appIdRemoved = ApplicationId.newInstance(1709705779, 1);
storeApp(zKStore, appIdRemoved, 123456, 654321);
ApplicationAttemptId attemptIdRemoved = ApplicationAttemptId.newInstance(appIdRemoved, 1);
storeAttempt(zKStore, attemptIdRemoved,
ContainerId.newContainerId(attemptIdRemoved, 1).toString(), null, null, dispatcher);
try {
zKStore.removeApplicationAttemptInternal(attemptIdRemoved);
} catch (Exception e) {
assertTrue(e instanceof KeeperException.NodeExistsException);
}
rm.close();
}
} | TestZKRMStateStoreInternal |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/LoaderUtil.java | {
"start": 10021,
"end": 10265
} | class ____ by the {@code Class} object.
* @param clazz The class.
* @return new instance of the class.
* @throws NoSuchMethodException if no zero-arg constructor exists
* @throws SecurityException if this | modeled |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableMergeDelayErrorTest.java | {
"start": 19806,
"end": 27394
} | class ____ implements Publisher<String> {
Thread t;
@Override
public void subscribe(final Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
t = new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
subscriber.onError(e);
}
subscriber.onNext("hello");
subscriber.onComplete();
}
});
t.start();
}
}
@Test
public void delayErrorMaxConcurrent() {
final List<Long> requests = new ArrayList<>();
Flowable<Integer> source = Flowable.mergeDelayError(Flowable.just(
Flowable.just(1).hide(),
Flowable.<Integer>error(new TestException()))
.doOnRequest(new LongConsumer() {
@Override
public void accept(long t1) {
requests.add(t1);
}
}), 1);
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
source.subscribe(ts);
ts.assertValue(1);
ts.assertTerminated();
ts.assertError(TestException.class);
assertEquals(Arrays.asList(1L, 1L, 1L), requests);
}
// This is pretty much a clone of testMergeList but with the overloaded MergeDelayError for Iterables
@Test
public void mergeIterable() {
final Flowable<String> f1 = Flowable.unsafeCreate(new TestSynchronousFlowable());
final Flowable<String> f2 = Flowable.unsafeCreate(new TestSynchronousFlowable());
List<Flowable<String>> listOfFlowables = new ArrayList<>();
listOfFlowables.add(f1);
listOfFlowables.add(f2);
Flowable<String> m = Flowable.mergeDelayError(listOfFlowables);
m.subscribe(stringSubscriber);
verify(stringSubscriber, never()).onError(any(Throwable.class));
verify(stringSubscriber, times(1)).onComplete();
verify(stringSubscriber, times(2)).onNext("hello");
}
@Test
public void iterableMaxConcurrent() {
TestSubscriber<Integer> ts = TestSubscriber.create();
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
Flowable.mergeDelayError(Arrays.asList(pp1, pp2), 1).subscribe(ts);
assertTrue("ps1 has no subscribers?!", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?!", pp2.hasSubscribers());
pp1.onNext(1);
pp1.onComplete();
assertFalse("ps1 has subscribers?!", pp1.hasSubscribers());
assertTrue("ps2 has no subscribers?!", pp2.hasSubscribers());
pp2.onNext(2);
pp2.onComplete();
ts.assertValues(1, 2);
ts.assertNoErrors();
ts.assertComplete();
}
@Test
public void iterableMaxConcurrentError() {
TestSubscriberEx<Integer> ts = new TestSubscriberEx<>();
PublishProcessor<Integer> pp1 = PublishProcessor.create();
PublishProcessor<Integer> pp2 = PublishProcessor.create();
Flowable.mergeDelayError(Arrays.asList(pp1, pp2), 1).subscribe(ts);
assertTrue("ps1 has no subscribers?!", pp1.hasSubscribers());
assertFalse("ps2 has subscribers?!", pp2.hasSubscribers());
pp1.onNext(1);
pp1.onError(new TestException());
assertFalse("ps1 has subscribers?!", pp1.hasSubscribers());
assertTrue("ps2 has no subscribers?!", pp2.hasSubscribers());
pp2.onNext(2);
pp2.onError(new TestException());
ts.assertValues(1, 2);
ts.assertError(CompositeException.class);
ts.assertNotComplete();
CompositeException ce = (CompositeException)ts.errors().get(0);
assertEquals(2, ce.getExceptions().size());
}
static <T> Flowable<T> withError(Flowable<T> source) {
return source.concatWith(Flowable.<T>error(new TestException()));
}
@Test
public void array() {
for (int i = 1; i < 100; i++) {
@SuppressWarnings("unchecked")
Flowable<Integer>[] sources = new Flowable[i];
Arrays.fill(sources, Flowable.just(1));
Integer[] expected = new Integer[i];
for (int j = 0; j < i; j++) {
expected[j] = 1;
}
Flowable.mergeArrayDelayError(sources)
.test()
.assertResult(expected);
}
}
@Test
public void mergeArrayDelayError() {
Flowable.mergeArrayDelayError(Flowable.just(1), Flowable.just(2))
.test()
.assertResult(1, 2);
}
@Test
public void mergeIterableDelayErrorWithError() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1),
Flowable.just(2)))
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithError() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)))
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayErrorMaxConcurrency() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1),
Flowable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@Test
public void mergeDelayErrorWithErrorMaxConcurrency() {
Flowable.mergeDelayError(
Flowable.just(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeIterableDelayErrorMaxConcurrency() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1),
Flowable.just(2)), 1)
.test()
.assertResult(1, 2);
}
@Test
public void mergeIterableDelayErrorWithErrorMaxConcurrency() {
Flowable.mergeDelayError(
Arrays.asList(Flowable.just(1).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(2)), 1)
.test()
.assertFailure(TestException.class, 1, 2);
}
@Test
public void mergeDelayError3() {
Flowable.mergeDelayError(
Flowable.just(1),
Flowable.just(2),
Flowable.just(3)
)
.test()
.assertResult(1, 2, 3);
}
@Test
public void mergeDelayError3WithError() {
Flowable.mergeDelayError(
Flowable.just(1),
Flowable.just(2).concatWith(Flowable.<Integer>error(new TestException())),
Flowable.just(3)
)
.test()
.assertFailure(TestException.class, 1, 2, 3);
}
@Test
public void mergeIterableDelayError() {
Flowable.mergeDelayError(Arrays.asList(Flowable.just(1), Flowable.just(2)))
.test()
.assertResult(1, 2);
}
}
| TestASynchronous1sDelayedFlowable |
java | apache__camel | components/camel-zipfile/src/test/java/org/apache/camel/dataformat/zipfile/ZipFileSplitIteratorEmptyTest.java | {
"start": 1112,
"end": 2047
} | class ____ extends CamelTestSupport {
@Test
public void testZipFileUnmarshal() throws Exception {
getMockEndpoint("mock:end").expectedMessageCount(0);
getMockEndpoint("mock:end").setSleepForEmptyTest(1000);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
ZipFileDataFormat zf = new ZipFileDataFormat();
zf.setUsingIterator(true);
from("file://src/test/resources?delay=10&fileName=empty.zip&noop=true")
.unmarshal(zf)
.split(bodyAs(Iterator.class)).streaming()
.convertBodyTo(String.class)
.to("mock:end")
.end();
}
};
}
}
| ZipFileSplitIteratorEmptyTest |
java | apache__flink | flink-python/src/main/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactory.java | {
"start": 23489,
"end": 25318
} | class ____ {
private BundleProcessor processor;
private ExecutableProcessBundleDescriptor processBundleDescriptor;
private WrappedSdkHarnessClient wrappedClient;
}
private PreparedClient prepare(
WrappedSdkHarnessClient wrappedClient, ExecutableStage executableStage) {
PreparedClient preparedClient = new PreparedClient();
try {
preparedClient.wrappedClient = wrappedClient;
preparedClient.processBundleDescriptor =
ProcessBundleDescriptors.fromExecutableStage(
stageIdGenerator.getId(),
executableStage,
wrappedClient.getServerInfo().getDataServer().getApiServiceDescriptor(),
wrappedClient
.getServerInfo()
.getStateServer()
.getApiServiceDescriptor());
} catch (IOException e) {
throw new RuntimeException("Failed to create ProcessBundleDescriptor.", e);
}
preparedClient.processor =
wrappedClient
.getClient()
.getProcessor(
preparedClient.processBundleDescriptor.getProcessBundleDescriptor(),
preparedClient.processBundleDescriptor.getRemoteInputDestinations(),
wrappedClient.getServerInfo().getStateServer().getService(),
preparedClient.processBundleDescriptor.getTimerSpecs());
return preparedClient;
}
/**
* A {@link StageBundleFactory} for remotely processing bundles that supports environment
* expiration.
*/
private | PreparedClient |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/UnusedStateRemover.java | {
"start": 7778,
"end": 8522
} | class ____ {
private static final List<Function<String, String>> extractors = Arrays.asList(
ModelState::extractJobId,
Quantiles::extractJobId,
CategorizerState::extractJobId,
Classification::extractJobIdFromStateDoc,
Regression::extractJobIdFromStateDoc,
StoredProgress::extractJobIdFromDocId
);
private static String extractJobId(String docId) {
String jobId;
for (Function<String, String> extractor : extractors) {
jobId = extractor.apply(docId);
if (jobId != null) {
return jobId;
}
}
return null;
}
}
}
| JobIdExtractor |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksRequestTests.java | {
"start": 886,
"end": 2983
} | class ____ extends ESTestCase {
public void testValidation() {
ActionRequestValidationException ex = new ListTasksRequest().setDescriptions("foo*").validate();
assertThat(ex, is(not(nullValue())));
ex = new ListTasksRequest().setDescriptions("foo*").setDetailed(true).validate();
assertThat(ex, is(nullValue()));
}
public void testMatch() {
ListTasksRequest filterOnDescription = new ListTasksRequest().setDescriptions("foo*", "*bar", "absolute").setActions("my_action*");
assertTrue(filterOnDescription.match(taskWithActionDescription("my_action_foo", "foo_action")));
assertTrue(filterOnDescription.match(taskWithActionDescription("my_action_bar", "absolute")));
assertTrue(filterOnDescription.match(taskWithActionDescription("my_action_baz", "action_bar")));
assertFalse(filterOnDescription.match(taskWithActionDescription("my_action_foo", "not_wanted")));
assertFalse(filterOnDescription.match(taskWithActionDescription("not_wanted_action", "foo_action")));
ListTasksRequest notFilterOnDescription = new ListTasksRequest().setActions("my_action*");
assertTrue(notFilterOnDescription.match(taskWithActionDescription("my_action_foo", "foo_action")));
assertTrue(notFilterOnDescription.match(taskWithActionDescription("my_action_bar", "absolute")));
assertTrue(notFilterOnDescription.match(taskWithActionDescription("my_action_baz", "action_bar")));
assertTrue(notFilterOnDescription.match(taskWithActionDescription("my_action_baz", randomAlphaOfLength(10))));
assertFalse(notFilterOnDescription.match(taskWithActionDescription("not_wanted_action", randomAlphaOfLength(10))));
}
private static Task taskWithActionDescription(String action, String description) {
return new Task(
randomNonNegativeLong(),
randomAlphaOfLength(10),
action,
description,
new TaskId("test_node", randomNonNegativeLong()),
Collections.emptyMap()
);
}
}
| ListTasksRequestTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/StreamTaskInput.java | {
"start": 1177,
"end": 1581
} | interface ____<T> extends PushingAsyncDataInput<T>, Closeable {
int UNSPECIFIED = -1;
/** Returns the input index of this input. */
int getInputIndex();
/** Prepares to spill the in-flight input buffers as checkpoint snapshot. */
CompletableFuture<Void> prepareSnapshot(
ChannelStateWriter channelStateWriter, long checkpointId) throws CheckpointException;
}
| StreamTaskInput |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/health/DataSourceHealthContributorAutoConfigurationTests.java | {
"start": 11965,
"end": 12639
} | class ____ {
@Bean
AbstractRoutingDataSource routingDataSource() throws SQLException {
Map<Object, DataSource> dataSources = new HashMap<>();
dataSources.put("one", mock(DataSource.class));
dataSources.put("two", mock(DataSource.class));
AbstractRoutingDataSource routingDataSource = mock(AbstractRoutingDataSource.class);
given(routingDataSource.isWrapperFor(AbstractRoutingDataSource.class)).willReturn(true);
given(routingDataSource.unwrap(AbstractRoutingDataSource.class)).willReturn(routingDataSource);
given(routingDataSource.getResolvedDataSources()).willReturn(dataSources);
return routingDataSource;
}
}
static | RoutingDataSourceConfig |
java | apache__kafka | examples/src/main/java/kafka/examples/Consumer.java | {
"start": 2108,
"end": 3457
} | class ____ extends Thread implements ConsumerRebalanceListener {
private final String bootstrapServers;
private final String topic;
private final String groupId;
private final Optional<String> instanceId;
private final boolean readCommitted;
private final int numRecords;
private final CountDownLatch latch;
private volatile boolean closed;
private int remainingRecords;
public Consumer(String threadName,
String bootstrapServers,
String topic,
String groupId,
Optional<String> instanceId,
boolean readCommitted,
int numRecords,
CountDownLatch latch) {
super(threadName);
this.bootstrapServers = bootstrapServers;
this.topic = topic;
this.groupId = groupId;
this.instanceId = instanceId;
this.readCommitted = readCommitted;
this.numRecords = numRecords;
this.remainingRecords = numRecords;
this.latch = latch;
}
@Override
public void run() {
// the consumer instance is NOT thread safe
try (KafkaConsumer<Integer, String> consumer = createKafkaConsumer()) {
// subscribes to a list of topics to get dynamically assigned partitions
// this | Consumer |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/ComponentQueryTest.java | {
"start": 2059,
"end": 2417
} | class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity(name = "Asset")
@Audited
public static | SymbolType |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java | {
"start": 4817,
"end": 16468
} | class ____ implements TestRpcService2Legacy {
@Override
public TestProtosLegacy.EmptyResponseProto ping2(
com.google.protobuf.RpcController unused,
TestProtosLegacy.EmptyRequestProto request)
throws com.google.protobuf.ServiceException {
return TestProtosLegacy.EmptyResponseProto.newBuilder().build();
}
@Override
public TestProtosLegacy.EchoResponseProto echo2(
com.google.protobuf.RpcController unused,
TestProtosLegacy.EchoRequestProto request)
throws com.google.protobuf.ServiceException {
return TestProtosLegacy.EchoResponseProto.newBuilder()
.setMessage(request.getMessage()).build();
}
@Override
public TestProtosLegacy.SleepResponseProto sleep(
com.google.protobuf.RpcController controller,
TestProtosLegacy.SleepRequestProto request)
throws com.google.protobuf.ServiceException {
try {
Thread.sleep(request.getMilliSeconds());
} catch (InterruptedException ex) {
}
return TestProtosLegacy.SleepResponseProto.newBuilder().build();
}
}
public static Collection<Object[]> params() {
Collection<Object[]> params = new ArrayList<Object[]>();
params.add(new Object[] {Boolean.TRUE, Boolean.TRUE });
params.add(new Object[] {Boolean.TRUE, Boolean.FALSE });
params.add(new Object[] {Boolean.FALSE, Boolean.FALSE });
return params;
}
@SuppressWarnings("deprecation")
public void setUp() throws IOException { // Setup server for both protocols
conf = new Configuration();
conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024);
conf.setBoolean(CommonConfigurationKeys.IPC_SERVER_LOG_SLOW_RPC, true);
// Set RPC engine to protobuf RPC engine
if (testWithLegacy) {
RPC.setProtocolEngine(conf, TestRpcService2Legacy.class,
ProtobufRpcEngine.class);
}
RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class);
RPC.setProtocolEngine(conf, TestRpcService2.class,
ProtobufRpcEngine2.class);
// Create server side implementation
PBServerImpl serverImpl = new PBServerImpl();
BlockingService service = TestProtobufRpcProto
.newReflectiveBlockingService(serverImpl);
if (testWithLegacy && testWithLegacyFirst) {
PBServer2ImplLegacy server2ImplLegacy = new PBServer2ImplLegacy();
com.google.protobuf.BlockingService legacyService =
TestRpcServiceProtosLegacy.TestProtobufRpc2Proto
.newReflectiveBlockingService(server2ImplLegacy);
server = new RPC.Builder(conf).setProtocol(TestRpcService2Legacy.class)
.setInstance(legacyService).setBindAddress(ADDRESS).setPort(PORT)
.build();
server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService.class,
service);
} else {
// Get RPC server for server side implementation
server = new RPC.Builder(conf).setProtocol(TestRpcService.class)
.setInstance(service).setBindAddress(ADDRESS).setPort(PORT).build();
}
addr = NetUtils.getConnectAddress(server);
// now the second protocol
PBServer2Impl server2Impl = new PBServer2Impl();
BlockingService service2 = TestProtobufRpc2Proto
.newReflectiveBlockingService(server2Impl);
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2.class,
service2);
if (testWithLegacy && !testWithLegacyFirst) {
PBServer2ImplLegacy server2ImplLegacy = new PBServer2ImplLegacy();
com.google.protobuf.BlockingService legacyService =
TestRpcServiceProtosLegacy.TestProtobufRpc2Proto
.newReflectiveBlockingService(server2ImplLegacy);
server
.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, TestRpcService2Legacy.class,
legacyService);
}
server.start();
}
@AfterEach
public void tearDown() throws Exception {
server.stop();
}
private TestRpcService2 getClient2() throws IOException {
return RPC.getProxy(TestRpcService2.class, 0, addr, conf);
}
private TestRpcService2Legacy getClientLegacy() throws IOException {
return RPC.getProxy(TestRpcService2Legacy.class, 0, addr, conf);
}
@ParameterizedTest
@Timeout(value = 5)
@MethodSource("params")
public void testProtoBufRpc(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws Exception {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
TestRpcService client = getClient(addr, conf);
testProtoBufRpc(client);
}
// separated test out so that other tests can call it.
public static void testProtoBufRpc(TestRpcService client) throws Exception {
// Test ping method
client.ping(null, newEmptyRequest());
// Test echo method
EchoRequestProto echoRequest = EchoRequestProto.newBuilder()
.setMessage("hello").build();
EchoResponseProto echoResponse = client.echo(null, echoRequest);
assertThat(echoResponse.getMessage()).isEqualTo("hello");
// Test error method - error should be thrown as RemoteException
try {
client.error(null, newEmptyRequest());
fail("Expected exception is not thrown");
} catch (ServiceException e) {
RemoteException re = (RemoteException)e.getCause();
RpcServerException rse = (RpcServerException) re
.unwrapRemoteException(RpcServerException.class);
assertThat(rse).isNotNull();
assertThat(re.getErrorCode())
.isEqualTo(RpcErrorCodeProto.ERROR_RPC_SERVER);
}
}
@ParameterizedTest
// @Timeout(value = 5)
@MethodSource("params")
public void testProtoBufRpc2(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws Exception {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
TestRpcService2 client = getClient2();
// Test ping method
client.ping2(null, newEmptyRequest());
// Test echo method
EchoResponseProto echoResponse = client.echo2(null,
newEchoRequest("hello"));
assertThat(echoResponse.getMessage()).isEqualTo("hello");
// Ensure RPC metrics are updated
MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name());
assertCounterGt("RpcQueueTimeNumOps", 0L, rpcMetrics);
assertCounterGt("RpcProcessingTimeNumOps", 0L, rpcMetrics);
MetricsRecordBuilder rpcDetailedMetrics =
getMetrics(server.getRpcDetailedMetrics().name());
assertCounterGt("Echo2NumOps", 0L, rpcDetailedMetrics);
if (testWithLegacy) {
testProtobufLegacy();
}
}
private void testProtobufLegacy()
throws IOException, com.google.protobuf.ServiceException {
TestRpcService2Legacy client = getClientLegacy();
// Test ping method
client.ping2(null, TestProtosLegacy.EmptyRequestProto.newBuilder().build());
// Test echo method
TestProtosLegacy.EchoResponseProto echoResponse = client.echo2(null,
TestProtosLegacy.EchoRequestProto.newBuilder().setMessage("hello")
.build());
assertThat(echoResponse.getMessage()).isEqualTo("hello");
// Ensure RPC metrics are updated
MetricsRecordBuilder rpcMetrics = getMetrics(server.getRpcMetrics().name());
assertCounterGt("RpcQueueTimeNumOps", 0L, rpcMetrics);
assertCounterGt("RpcProcessingTimeNumOps", 0L, rpcMetrics);
MetricsRecordBuilder rpcDetailedMetrics =
getMetrics(server.getRpcDetailedMetrics().name());
assertCounterGt("Echo2NumOps", 0L, rpcDetailedMetrics);
}
@ParameterizedTest
@Timeout(value = 5)
@MethodSource("params")
public void testProtoBufRandomException(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws Exception {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
//No test with legacy
assumeFalse(testWithLegacy);
TestRpcService client = getClient(addr, conf);
try {
client.error2(null, newEmptyRequest());
} catch (ServiceException se) {
assertThat(se.getCause()).isInstanceOf(RemoteException.class);
RemoteException re = (RemoteException) se.getCause();
assertThat(re.getClassName())
.isEqualTo(URISyntaxException.class.getName());
assertThat(re.getMessage()).contains("testException");
assertThat(re.getErrorCode())
.isEqualTo(RpcErrorCodeProto.ERROR_APPLICATION);
}
}
@ParameterizedTest
@Timeout(value = 6)
@MethodSource("params")
public void testExtraLongRpc(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws Exception {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
//No test with legacy
assumeFalse(testWithLegacy);
TestRpcService2 client = getClient2();
final String shortString = StringUtils.repeat("X", 4);
// short message goes through
EchoResponseProto echoResponse = client.echo2(null,
newEchoRequest(shortString));
assertThat(echoResponse.getMessage()).isEqualTo(shortString);
final String longString = StringUtils.repeat("X", 4096);
try {
client.echo2(null, newEchoRequest(longString));
fail("expected extra-long RPC to fail");
} catch (ServiceException se) {
// expected
}
}
@ParameterizedTest
@Timeout(value = 12)
@MethodSource("params")
public void testLogSlowRPC(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws IOException, ServiceException,
TimeoutException, InterruptedException {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
//No test with legacy
assumeFalse(testWithLegacy);
server.setLogSlowRPCThresholdTime(SLEEP_DURATION);
TestRpcService2 client = getClient2();
// make 10 K fast calls
for (int x = 0; x < 10000; x++) {
try {
client.ping2(null, newEmptyRequest());
} catch (Exception ex) {
throw ex;
}
}
// Ensure RPC metrics are updated
RpcMetrics rpcMetrics = server.getRpcMetrics();
assertThat(rpcMetrics.getProcessingSampleCount()).isGreaterThan(999L);
long before = rpcMetrics.getRpcSlowCalls();
// Sleep sleeps for 500ms(less than `logSlowRPCThresholdTime`),
// make sure we never called into Log slow RPC routine.
client.sleep(null, newSleepRequest(SLEEP_DURATION / 2));
long after = rpcMetrics.getRpcSlowCalls();
assertThat(before).isEqualTo(after);
// Make a really slow call. Sleep sleeps for 3000ms.
client.sleep(null, newSleepRequest(SLEEP_DURATION * 3));
// Ensure slow call is logged.
GenericTestUtils.waitFor(()
-> rpcMetrics.getRpcSlowCalls() == before + 1L, 10, 1000);
}
@ParameterizedTest
@Timeout(value = 12)
@MethodSource("params")
public void testEnsureNoLogIfDisabled(boolean pTestWithLegacy,
boolean pTestWithLegacyFirst) throws IOException, ServiceException {
initTestProtoBufRpc(pTestWithLegacy, pTestWithLegacyFirst);
//No test with legacy
assumeFalse(testWithLegacy);
// disable slow RPC logging
server.setLogSlowRPC(false);
TestRpcService2 client = getClient2();
// make 10 K fast calls
for (int x = 0; x < 10000; x++) {
client.ping2(null, newEmptyRequest());
}
// Ensure RPC metrics are updated
RpcMetrics rpcMetrics = server.getRpcMetrics();
assertThat(rpcMetrics.getProcessingSampleCount()).isGreaterThan(999L);
long before = rpcMetrics.getRpcSlowCalls();
// make a really slow call. Sleep sleeps for 1000ms
client.sleep(null, newSleepRequest(SLEEP_DURATION));
long after = rpcMetrics.getRpcSlowCalls();
// make sure we never called into Log slow RPC routine.
assertThat(before).isEqualTo(after);
}
}
| PBServer2ImplLegacy |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java | {
"start": 1263,
"end": 1409
} | class ____ extends Codec {
public final static int DEFAULT_COMPRESSION = 6;
private static final int DEFAULT_BUFFER_SIZE = 8192;
static | XZCodec |
java | google__dagger | hilt-compiler/main/java/dagger/hilt/processor/internal/root/Root.java | {
"start": 2873,
"end": 2994
} | class ____ of the root element. */
ClassName classname() {
return element().getClassName();
}
/** Returns the | name |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java | {
"start": 27430,
"end": 27596
} | class ____ {
@Bean TestBean tb1() {
return new TestBean();
}
@Bean TestBean tb2() {
return new TestBean();
}
}
@Configuration
static | TwoTestBeanConfig |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java | {
"start": 3072,
"end": 3280
} | class ____ checks whether temp directory exists
* and check the value of java.io.tmpdir
* Creates a tempfile and checks whether that is created in
* temp directory specified.
*/
public static | which |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java | {
"start": 127869,
"end": 137107
} | class ____ {
final String directory;
final long usedSpace; // size of space used by HDFS
final long freeSpace; // size of free space excluding reserved space
final long reservedSpace; // size of space reserved for non-HDFS
final long reservedSpaceForReplicas; // size of space reserved RBW or
// re-replication
final long numBlocks;
final StorageType storageType;
VolumeInfo(FsVolumeImpl v, long usedSpace, long freeSpace) {
this.directory = v.toString();
this.usedSpace = usedSpace;
this.freeSpace = freeSpace;
this.reservedSpace = v.getReserved();
this.reservedSpaceForReplicas = v.getReservedForReplicas();
this.numBlocks = v.getNumBlocks();
this.storageType = v.getStorageType();
}
}
private Collection<VolumeInfo> getVolumeInfo() {
Collection<VolumeInfo> info = new ArrayList<VolumeInfo>();
for (FsVolumeImpl volume : volumes.getVolumes()) {
long used = 0;
long free = 0;
try (FsVolumeReference ref = volume.obtainReference()) {
used = volume.getDfsUsed();
free = volume.getAvailable();
} catch (ClosedChannelException e) {
continue;
} catch (IOException e) {
LOG.warn(e.getMessage());
used = 0;
free = 0;
}
info.add(new VolumeInfo(volume, used, free));
}
return info;
}
@Override
public Map<String, Object> getVolumeInfoMap() {
final Map<String, Object> info = new HashMap<String, Object>();
Collection<VolumeInfo> volumes = getVolumeInfo();
for (VolumeInfo v : volumes) {
final Map<String, Object> innerInfo = new HashMap<String, Object>();
innerInfo.put("usedSpace", v.usedSpace);
innerInfo.put("freeSpace", v.freeSpace);
innerInfo.put("reservedSpace", v.reservedSpace);
innerInfo.put("reservedSpaceForReplicas", v.reservedSpaceForReplicas);
innerInfo.put("numBlocks", v.numBlocks);
innerInfo.put("storageType", v.storageType);
info.put(v.directory, innerInfo);
}
return info;
}
@Override //FsDatasetSpi
public void deleteBlockPool(String bpid, boolean force)
throws IOException {
try (AutoCloseableLock lock = lockManager.writeLock(LockLevel.BLOCK_POOl, bpid)) {
List<FsVolumeImpl> curVolumes = volumes.getVolumes();
if (!force) {
for (FsVolumeImpl volume : curVolumes) {
try (FsVolumeReference ref = volume.obtainReference()) {
if (!volume.isBPDirEmpty(bpid)) {
LOG.warn(bpid
+ " has some block files, cannot delete unless forced");
throw new IOException("Cannot delete block pool, "
+ "it contains some block files");
}
} catch (ClosedChannelException e) {
// ignore.
}
}
}
for (FsVolumeImpl volume : curVolumes) {
try (FsVolumeReference ref = volume.obtainReference()) {
volume.deleteBPDirectories(bpid, force);
} catch (ClosedChannelException e) {
// ignore.
}
}
}
}
@Override // FsDatasetSpi
public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock block)
throws IOException {
try (AutoCloseableLock lock = lockManager.readLock(LockLevel.DIR,
block.getBlockPoolId(), getStorageUuidForLock(block),
datasetSubLockStrategy.blockIdToSubLock(block.getBlockId()))) {
final Replica replica = volumeMap.get(block.getBlockPoolId(),
block.getBlockId());
if (replica == null) {
throw new ReplicaNotFoundException(block);
}
synchronized(replica) {
if (replica.getGenerationStamp() < block.getGenerationStamp()) {
throw new IOException(
"Replica generation stamp < block generation stamp, block="
+ block + ", replica=" + replica);
} else if (replica.getGenerationStamp() > block.getGenerationStamp()) {
block.setGenerationStamp(replica.getGenerationStamp());
}
}
}
ReplicaInfo r = getBlockReplica(block);
File blockFile = new File(r.getBlockURI());
File metaFile = new File(r.getMetadataURI());
BlockLocalPathInfo info = new BlockLocalPathInfo(block,
blockFile.getAbsolutePath(), metaFile.toString());
return info;
}
@Override
public void enableTrash(String bpid) {
dataStorage.enableTrash(bpid);
}
@Override
public void clearTrash(String bpid) {
dataStorage.clearTrash(bpid);
}
@Override
public boolean trashEnabled(String bpid) {
return dataStorage.trashEnabled(bpid);
}
@Override
public void setRollingUpgradeMarker(String bpid) throws IOException {
dataStorage.setRollingUpgradeMarker(bpid);
}
@Override
public void clearRollingUpgradeMarker(String bpid) throws IOException {
dataStorage.clearRollingUpgradeMarker(bpid);
}
@Override
public void onCompleteLazyPersist(String bpId, long blockId,
long creationTime, File[] savedFiles, FsVolumeImpl targetVolume) {
try (AutoCloseableLock lock = lockManager.writeLock(LockLevel.BLOCK_POOl, bpId)) {
ramDiskReplicaTracker.recordEndLazyPersist(bpId, blockId, savedFiles);
targetVolume.incDfsUsedAndNumBlocks(bpId, savedFiles[0].length()
+ savedFiles[1].length());
// Update metrics (ignore the metadata file size)
datanode.getMetrics().incrRamDiskBlocksLazyPersisted();
datanode.getMetrics().incrRamDiskBytesLazyPersisted(savedFiles[1].length());
datanode.getMetrics().addRamDiskBlocksLazyPersistWindowMs(
Time.monotonicNow() - creationTime);
if (LOG.isDebugEnabled()) {
LOG.debug("LazyWriter: Finish persisting RamDisk block: "
+ " block pool Id: " + bpId + " block id: " + blockId
+ " to block file " + savedFiles[1] + " and meta file " + savedFiles[0]
+ " on target volume " + targetVolume);
}
}
}
@Override
public void onFailLazyPersist(String bpId, long blockId) {
RamDiskReplica block = null;
block = ramDiskReplicaTracker.getReplica(bpId, blockId);
if (block != null) {
LOG.warn("Failed to save replica " + block + ". re-enqueueing it.");
ramDiskReplicaTracker.reenqueueReplicaNotPersisted(block);
}
}
@Override
public void submitBackgroundSyncFileRangeRequest(ExtendedBlock block,
ReplicaOutputStreams outs, long offset, long nbytes, int flags) {
FsVolumeImpl fsVolumeImpl = this.getVolume(block);
asyncDiskService.submitSyncFileRangeRequest(fsVolumeImpl, outs, offset,
nbytes, flags);
}
private boolean ramDiskConfigured() {
for (FsVolumeImpl v: volumes.getVolumes()){
if (v.isTransientStorage()) {
return true;
}
}
return false;
}
// Add/Remove per DISK volume async lazy persist thread when RamDisk volume is
// added or removed.
// This should only be called when the FsDataSetImpl#volumes list is finalized.
private void setupAsyncLazyPersistThreads() {
for (FsVolumeImpl v: volumes.getVolumes()){
setupAsyncLazyPersistThread(v);
}
}
private void setupAsyncLazyPersistThread(final FsVolumeImpl v) {
// Skip transient volumes
if (v.isTransientStorage()) {
return;
}
boolean ramDiskConfigured = ramDiskConfigured();
// Add thread for DISK volume if RamDisk is configured
if (ramDiskConfigured &&
asyncLazyPersistService != null &&
!asyncLazyPersistService.queryVolume(v)) {
asyncLazyPersistService.addVolume(v);
}
// Remove thread for DISK volume if RamDisk is not configured
if (!ramDiskConfigured &&
asyncLazyPersistService != null &&
asyncLazyPersistService.queryVolume(v)) {
asyncLazyPersistService.removeVolume(v);
}
}
/**
* Cleanup the old replica and notifies the NN about new replica.
*
* @param replicaInfo - Old replica to be deleted
* @param newReplicaInfo - New replica object
* @param bpid - block pool id
*/
private void removeOldReplica(ReplicaInfo replicaInfo,
ReplicaInfo newReplicaInfo, final String bpid) {
// Before deleting the files from old storage we must notify the
// NN that the files are on the new storage. Else a blockReport from
// the transient storage might cause the NN to think the blocks are lost.
// Replicas must be evicted from client short-circuit caches, because the
// storage will no longer be same, and thus will require validating
// checksum. This also stops a client from holding file descriptors,
// which would prevent the OS from reclaiming the memory.
ExtendedBlock extendedBlock =
new ExtendedBlock(bpid, newReplicaInfo);
datanode.getShortCircuitRegistry().processBlockInvalidation(
ExtendedBlockId.fromExtendedBlock(extendedBlock));
datanode.notifyNamenodeReceivedBlock(
extendedBlock, null, newReplicaInfo.getStorageUuid(),
newReplicaInfo.isOnTransientStorage());
// Remove the old replicas
cleanupReplica(bpid, replicaInfo);
// If deletion failed then the directory scanner will cleanup the blocks
// eventually.
}
| VolumeInfo |
java | apache__thrift | lib/javame/src/org/apache/thrift/protocol/TBase64Utils.java | {
"start": 1409,
"end": 5360
} | class ____ {
private static final String ENCODE_TABLE =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
/**
* Encode len bytes of data in src at offset srcOff, storing the result into
* dst at offset dstOff. len must be 1, 2, or 3. dst must have at least len+1
* bytes of space at dstOff. src and dst should not be the same object. This
* method does no validation of the input values in the interest of
* performance.
*
* @param src the source of bytes to encode
* @param srcOff the offset into the source to read the unencoded bytes
* @param len the number of bytes to encode (must be 1, 2, or 3).
* @param dst the destination for the encoding
* @param dstOff the offset into the destination to place the encoded bytes
*/
static final void encode(byte[] src, int srcOff, int len, byte[] dst,
int dstOff) {
dst[dstOff] = (byte)ENCODE_TABLE.charAt((src[srcOff] >> 2) & 0x3F);
if (len == 3) {
dst[dstOff + 1] =
(byte)ENCODE_TABLE.charAt(
((src[srcOff] << 4) & 0x30) | ((src[srcOff+1] >> 4) & 0x0F));
dst[dstOff + 2] =
(byte)ENCODE_TABLE.charAt(
((src[srcOff+1] << 2) & 0x3C) | ((src[srcOff+2] >> 6) & 0x03));
dst[dstOff + 3] =
(byte)ENCODE_TABLE.charAt(src[srcOff+2] & 0x3F);
}
else if (len == 2) {
dst[dstOff+1] =
(byte)ENCODE_TABLE.charAt(
((src[srcOff] << 4) & 0x30) | ((src[srcOff+1] >> 4) & 0x0F));
dst[dstOff + 2] =
(byte)ENCODE_TABLE.charAt((src[srcOff+1] << 2) & 0x3C);
}
else { // len == 1) {
dst[dstOff + 1] =
(byte)ENCODE_TABLE.charAt((src[srcOff] << 4) & 0x30);
}
}
private static final byte[] DECODE_TABLE = {
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,62,-1,-1,-1,63,
52,53,54,55,56,57,58,59,60,61,-1,-1,-1,-1,-1,-1,
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,
15,16,17,18,19,20,21,22,23,24,25,-1,-1,-1,-1,-1,
-1,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,
41,42,43,44,45,46,47,48,49,50,51,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,
};
/**
* Decode len bytes of data in src at offset srcOff, storing the result into
* dst at offset dstOff. len must be 2, 3, or 4. dst must have at least len-1
* bytes of space at dstOff. src and dst may be the same object as long as
* dstoff <= srcOff. This method does no validation of the input values in
* the interest of performance.
*
* @param src the source of bytes to decode
* @param srcOff the offset into the source to read the encoded bytes
* @param len the number of bytes to decode (must be 2, 3, or 4)
* @param dst the destination for the decoding
* @param dstOff the offset into the destination to place the decoded bytes
*/
static final void decode(byte[] src, int srcOff, int len, byte[] dst,
int dstOff) {
dst[dstOff] = (byte)
((DECODE_TABLE[src[srcOff] & 0x0FF] << 2) |
(DECODE_TABLE[src[srcOff+1] & 0x0FF] >> 4));
if (len > 2) {
dst[dstOff+1] = (byte)
(((DECODE_TABLE[src[srcOff+1] & 0x0FF] << 4) & 0xF0) |
(DECODE_TABLE[src[srcOff+2] & 0x0FF] >> 2));
if (len > 3) {
dst[dstOff+2] = (byte)
(((DECODE_TABLE[src[srcOff+2] & 0x0FF] << 6) & 0xC0) |
DECODE_TABLE[src[srcOff+3] & 0x0FF]);
}
}
}
}
| TBase64Utils |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/commit/ITestAbfsRenameStageFailure.java | {
"start": 1544,
"end": 2751
} | class ____ extends TestRenameStageFailure {
/**
* How many files to create.
*/
private static final int FILES_TO_CREATE = 20;
private final ABFSContractTestBinding binding;
public ITestAbfsRenameStageFailure() throws Exception {
binding = new ABFSContractTestBinding();
}
protected boolean isNamespaceEnabled() throws AzureBlobFileSystemException {
AzureBlobFileSystem fs = (AzureBlobFileSystem) getFileSystem();
return fs.getAbfsStore().getIsNamespaceEnabled(AbstractAbfsIntegrationTest.getSampleTracingContext(fs, false));
}
@BeforeEach
@Override
public void setup() throws Exception {
binding.setup();
super.setup();
}
@Override
protected Configuration createConfiguration() {
return AbfsCommitTestHelper.prepareTestConfiguration(binding);
}
@Override
protected AbstractFSContract createContract(final Configuration conf) {
return new AbfsFileSystemContract(conf, binding.isSecureMode());
}
@Override
protected boolean requireRenameResilience() throws AzureBlobFileSystemException {
return isNamespaceEnabled();
}
@Override
protected int filesToCreate() {
return FILES_TO_CREATE;
}
}
| ITestAbfsRenameStageFailure |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/plugins/validation/ValidatingPlugin.java | {
"start": 1894,
"end": 2357
} | class ____ implements org.apache.logging.log4j.core.util.Builder<ValidatingPlugin> {
@PluginBuilderAttribute
@Required(message = "The name given by the builder is null")
private String name;
public Builder setName(final String name) {
this.name = name;
return this;
}
@Override
public ValidatingPlugin build() {
return new ValidatingPlugin(name);
}
}
}
| Builder |
java | redisson__redisson | redisson/src/test/java/org/redisson/RedissonSetReactiveTest.java | {
"start": 479,
"end": 9026
} | class ____ implements Serializable {
private Long lng;
public Long getLng() {
return lng;
}
public void setLng(Long lng) {
this.lng = lng;
}
}
@Test
public void testAddAllReactive() {
RSetReactive<Integer> list = redisson.getSet("set");
sync(list.add(1));
sync(list.add(2));
sync(list.add(3));
sync(list.add(4));
sync(list.add(5));
RSetReactive<Integer> list2 = redisson.getSet("set2");
Assertions.assertEquals(true, sync(list2.addAll(list.iterator())));
Assertions.assertEquals(5, sync(list2.size()).intValue());
}
@Test
public void testRemoveRandom() {
RSetReactive<Integer> set = redisson.getSet("simple");
sync(set.add(1));
sync(set.add(2));
sync(set.add(3));
assertThat(sync(set.removeRandom())).isIn(1, 2, 3);
assertThat(sync(set.removeRandom())).isIn(1, 2, 3);
assertThat(sync(set.removeRandom())).isIn(1, 2, 3);
Assertions.assertNull(sync(set.removeRandom()));
}
@Test
public void testRandom() {
RSetReactive<Integer> set = redisson.getSet("simple");
sync(set.add(1));
sync(set.add(2));
sync(set.add(3));
assertThat(sync(set.random())).isIn(1, 2, 3);
assertThat(sync(set.random())).isIn(1, 2, 3);
assertThat(sync(set.random())).isIn(1, 2, 3);
assertThat(sync(set)).containsExactlyInAnyOrder(1, 2, 3);
}
@Test
public void testAddBean() throws InterruptedException, ExecutionException {
SimpleBean sb = new SimpleBean();
sb.setLng(1L);
RSetReactive<SimpleBean> set = redisson.getSet("simple");
sync(set.add(sb));
Assertions.assertEquals(sb.getLng(), toIterator(set.iterator()).next().getLng());
}
@Test
public void testAddLong() throws InterruptedException, ExecutionException {
Long sb = 1l;
RSetReactive<Long> set = redisson.getSet("simple_longs");
sync(set.add(sb));
for (Long l : sync(set)) {
Assertions.assertEquals(sb.getClass(), l.getClass());
}
}
@Test
public void testRemove() throws InterruptedException, ExecutionException {
RSetReactive<Integer> set = redisson.getSet("simple");
sync(set.add(1));
sync(set.add(3));
sync(set.add(7));
Assertions.assertTrue(sync(set.remove(1)));
Assertions.assertFalse(sync(set.contains(1)));
assertThat(sync(set)).containsExactlyInAnyOrder(3, 7);
Assertions.assertFalse(sync(set.remove(1)));
assertThat(sync(set)).containsExactlyInAnyOrder(3, 7);
sync(set.remove(3));
Assertions.assertFalse(sync(set.contains(3)));
assertThat(sync(set)).containsExactlyInAnyOrder(7);
}
@Test
public void testIteratorSequence() {
RSetReactive<Long> set = redisson.getSet("set");
for (int i = 0; i < 1000; i++) {
sync(set.add(Long.valueOf(i)));
}
Set<Long> setCopy = new HashSet<Long>();
for (int i = 0; i < 1000; i++) {
setCopy.add(Long.valueOf(i));
}
checkIterator(set, setCopy);
}
private void checkIterator(RSetReactive<Long> set, Set<Long> setCopy) {
for (Iterator<Long> iterator = toIterator(set.iterator()); iterator.hasNext(); ) {
Long value = iterator.next();
if (!setCopy.remove(value)) {
Assertions.fail();
}
}
Assertions.assertEquals(0, setCopy.size());
}
@Test
public void testLong() {
RSetReactive<Long> set = redisson.getSet("set");
sync(set.add(1L));
sync(set.add(2L));
assertThat(sync(set)).containsOnly(1L, 2L);
}
@Test
public void testRetainAll() {
RSetReactive<Integer> set = redisson.getSet("set");
for (int i = 0; i < 20000; i++) {
sync(set.add(i));
}
Assertions.assertTrue(sync(set.retainAll(Arrays.asList(1, 2))));
assertThat(sync(set)).containsExactlyInAnyOrder(1, 2);
Assertions.assertEquals(2, sync(set.size()).intValue());
}
@Test
public void testContainsAll() {
RSetReactive<Integer> set = redisson.getSet("set");
for (int i = 0; i < 200; i++) {
sync(set.add(i));
}
Assertions.assertTrue(sync(set.containsAll(Collections.emptyList())));
Assertions.assertTrue(sync(set.containsAll(Arrays.asList(30, 11))));
Assertions.assertFalse(sync(set.containsAll(Arrays.asList(30, 711, 11))));
}
@Test
public void testContains() {
RSetReactive<TestObject> set = redisson.getSet("set");
sync(set.add(new TestObject("1", "2")));
sync(set.add(new TestObject("1", "2")));
sync(set.add(new TestObject("2", "3")));
sync(set.add(new TestObject("3", "4")));
sync(set.add(new TestObject("5", "6")));
Assertions.assertTrue(sync(set.contains(new TestObject("2", "3"))));
Assertions.assertTrue(sync(set.contains(new TestObject("1", "2"))));
Assertions.assertFalse(sync(set.contains(new TestObject("1", "9"))));
}
@Test
public void testDuplicates() {
RSetReactive<TestObject> set = redisson.getSet("set");
sync(set.add(new TestObject("1", "2")));
sync(set.add(new TestObject("1", "2")));
sync(set.add(new TestObject("2", "3")));
sync(set.add(new TestObject("3", "4")));
sync(set.add(new TestObject("5", "6")));
Assertions.assertEquals(4, sync(set.size()).intValue());
}
@Test
public void testSize() {
RSetReactive<Integer> set = redisson.getSet("set");
sync(set.add(1));
sync(set.add(2));
sync(set.add(3));
sync(set.add(3));
sync(set.add(4));
sync(set.add(5));
sync(set.add(5));
Assertions.assertEquals(5, sync(set.size()).intValue());
}
@Test
public void testRetainAllEmpty() {
RSetReactive<Integer> set = redisson.getSet("set");
sync(set.add(1));
sync(set.add(2));
sync(set.add(3));
sync(set.add(4));
sync(set.add(5));
Assertions.assertTrue(sync(set.retainAll(Collections.<Integer>emptyList())));
Assertions.assertEquals(0, sync(set.size()).intValue());
}
@Test
public void testRetainAllNoModify() {
RSetReactive<Integer> set = redisson.getSet("set");
sync(set.add(1));
sync(set.add(2));
Assertions.assertFalse(sync(set.retainAll(Arrays.asList(1, 2)))); // nothing changed
assertThat(sync(set)).containsExactly(1, 2);
}
@Test
public void testMove() throws Exception {
RSetReactive<Integer> set = redisson.getSet("set");
RSetReactive<Integer> otherSet = redisson.getSet("otherSet");
sync(set.add(1));
sync(set.add(2));
Assertions.assertTrue(sync(set.move("otherSet", 1)));
Assertions.assertEquals(1, sync(set.size()).intValue());
assertThat(sync(set)).containsExactly(2);
Assertions.assertEquals(1, sync(otherSet.size()).intValue());
assertThat(sync(otherSet)).containsExactly(1);
}
@Test
public void testMoveNoMember() throws Exception {
RSetReactive<Integer> set = redisson.getSet("set");
RSetReactive<Integer> otherSet = redisson.getSet("otherSet");
sync(set.add(1));
Assertions.assertFalse(sync(set.move("otherSet", 2)));
Assertions.assertEquals(1, sync(set.size()).intValue());
Assertions.assertEquals(0, sync(otherSet.size()).intValue());
}
@Test
public void testIntersection() {
final String firstSetName = "firstSet";
RSetReactive<Integer> firstSet = redisson.getSet(firstSetName);
sync(firstSet.add(1));
sync(firstSet.add(2));
sync(firstSet.add(3));
final String secondSetName = "secondSet";
RSetReactive<Integer> secondSet = redisson.getSet(secondSetName);
sync(secondSet.add(3));
sync(secondSet.add(4));
sync(secondSet.add(1));
final RSetReactive<Object> tmp = redisson.getSet("tmp");
final Integer count = sync(tmp.intersection(firstSetName, secondSetName));
Assertions.assertEquals(2, count.intValue());
Assertions.assertTrue(sync(tmp.contains(1)));
Assertions.assertTrue(sync(tmp.contains(3)));
}
}
| SimpleBean |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/AnnotationPositionTest.java | {
"start": 18422,
"end": 18755
} | class ____ {
/** Javadoc! */
@NonTypeUse int x;
}
""")
.doTest(TEXT_MATCH);
}
@Test
public void variable_genericType_modifiers() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import java.util.List;
| Test |
java | google__guava | android/guava/src/com/google/common/collect/LinkedListMultimap.java | {
"start": 22548,
"end": 23727
} | class ____ extends Sets.ImprovedAbstractSet<K> {
@Override
public int size() {
return keyToKeyList.size();
}
@Override
public Iterator<K> iterator() {
return new DistinctKeyIterator();
}
@Override
public boolean contains(@Nullable Object key) { // for performance
return containsKey(key);
}
@Override
public boolean remove(@Nullable Object o) { // for performance
return !LinkedListMultimap.this.removeAll(o).isEmpty();
}
}
return new KeySetImpl();
}
@Override
Multiset<K> createKeys() {
return new Multimaps.Keys<K, V>(this);
}
/**
* {@inheritDoc}
*
* <p>The iterator generated by the returned collection traverses the values in the order they
* were added to the multimap. Because the values may have duplicates and follow the insertion
* ordering, this method returns a {@link List}, instead of the {@link Collection} specified in
* the {@link ListMultimap} interface.
*/
@Override
public List<V> values() {
return (List<V>) super.values();
}
@Override
List<V> createValues() {
@WeakOuter
final | KeySetImpl |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/jsonptr/JsonPointerTest.java | {
"start": 211,
"end": 10336
} | class ____ extends JacksonCoreTestBase
{
private final JsonPointer EMPTY_PTR = JsonPointer.empty();
@Test
void simplePath() throws Exception
{
final String INPUT = "/Image/15/name";
JsonPointer ptr = JsonPointer.compile(INPUT);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("Image", ptr.getMatchingProperty());
assertEquals("/Image/15", ptr.head().toString());
assertEquals(INPUT, ptr.toString());
ptr = ptr.tail();
assertNotNull(ptr);
assertFalse(ptr.matches());
assertEquals(15, ptr.getMatchingIndex());
assertEquals("15", ptr.getMatchingProperty());
assertEquals("/15/name", ptr.toString());
assertEquals("/15", ptr.head().toString());
assertEquals("", ptr.head().head().toString());
assertNull(ptr.head().head().head());
ptr = ptr.tail();
assertNotNull(ptr);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("name", ptr.getMatchingProperty());
assertEquals("/name", ptr.toString());
assertEquals("", ptr.head().toString());
assertSame(EMPTY_PTR, ptr.head());
// done!
ptr = ptr.tail();
assertTrue(ptr.matches());
assertNull(ptr.tail());
assertNull(ptr.head());
assertNull(ptr.getMatchingProperty());
assertEquals(-1, ptr.getMatchingIndex());
}
@Test
void simplePathLonger() throws Exception
{
final String INPUT = "/a/b/c/d/e/f/0";
JsonPointer ptr = JsonPointer.compile(INPUT);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("a", ptr.getMatchingProperty());
assertEquals("/a/b/c/d/e/f", ptr.head().toString());
assertEquals("/b/c/d/e/f/0", ptr.tail().toString());
assertEquals("/0", ptr.last().toString());
assertEquals(INPUT, ptr.toString());
}
@Test
void simpleTail() throws Exception
{
final String INPUT = "/root/leaf";
JsonPointer ptr = JsonPointer.compile(INPUT);
assertEquals("/leaf", ptr.tail().toString());
assertEquals("", ptr.tail().tail().toString());
}
@Test
void wonkyNumber173() throws Exception
{
JsonPointer ptr = JsonPointer.compile("/1e0");
assertFalse(ptr.matches());
}
// [core#176]: do not allow leading zeroes
@Test
void iZeroIndex() throws Exception
{
JsonPointer ptr = JsonPointer.compile("/0");
assertEquals(0, ptr.getMatchingIndex());
ptr = JsonPointer.compile("/00");
assertEquals(-1, ptr.getMatchingIndex());
}
@Test
void last()
{
String INPUT = "/Image/name";
JsonPointer ptr = JsonPointer.compile(INPUT);
JsonPointer leaf = ptr.last();
assertEquals("/name", leaf.toString());
assertEquals("name", leaf.getMatchingProperty());
INPUT = "/Image/15/name";
ptr = JsonPointer.compile(INPUT);
leaf = ptr.last();
assertEquals("/name", leaf.toString());
assertEquals("name", leaf.getMatchingProperty());
}
@Test
void emptyPointer()
{
assertSame(EMPTY_PTR, JsonPointer.compile(""));
assertEquals("", EMPTY_PTR.toString());
// As per [core#788], should NOT match Property with "empty String"
assertFalse(EMPTY_PTR.mayMatchProperty());
assertFalse(EMPTY_PTR.mayMatchElement());
assertEquals(-1, EMPTY_PTR.getMatchingIndex());
assertNull(EMPTY_PTR.getMatchingProperty());
}
@Test
void pointerWithEmptyPropertyName()
{
// note: this is acceptable, to match property in '{"":3}', for example
// and NOT same as what empty point, "", is.
JsonPointer ptr = JsonPointer.compile("/");
assertNotNull(ptr);
assertNotSame(EMPTY_PTR, ptr);
assertEquals("/", ptr.toString());
assertTrue(ptr.mayMatchProperty());
assertFalse(ptr.mayMatchElement());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("", ptr.getMatchingProperty());
assertTrue(ptr.matchesProperty(""));
assertFalse(ptr.matchesElement(0));
assertFalse(ptr.matchesElement(-1));
assertFalse(ptr.matchesProperty("1"));
}
// mostly for test coverage, really...
@Test
void equality() {
assertNotEquals(JsonPointer.empty(), JsonPointer.compile("/"));
assertEquals(JsonPointer.compile("/foo/3"), JsonPointer.compile("/foo/3"));
assertNotEquals(JsonPointer.empty(), JsonPointer.compile("/12"));
assertNotEquals(JsonPointer.compile("/12"), JsonPointer.empty());
assertEquals(JsonPointer.compile("/a/b/c").tail(),
JsonPointer.compile("/foo/b/c").tail());
JsonPointer abcDef = JsonPointer.compile("/abc/def");
JsonPointer def = JsonPointer.compile("/def");
assertEquals(abcDef.tail(), def);
assertEquals(def, abcDef.tail());
// expr != String
assertNotEquals("/", JsonPointer.empty());
}
@Test
void properties() {
assertTrue(JsonPointer.compile("/foo").mayMatchProperty());
assertFalse(JsonPointer.compile("/foo").mayMatchElement());
assertTrue(JsonPointer.compile("/12").mayMatchElement());
// Interestingly enough, since Json Pointer is just String, could
// ALSO match property with name "12"
assertTrue(JsonPointer.compile("/12").mayMatchProperty());
}
@Test
void append()
{
final String INPUT = "/Image/15/name";
final String APPEND = "/extension";
JsonPointer ptr = JsonPointer.compile(INPUT);
JsonPointer apd = JsonPointer.compile(APPEND);
JsonPointer appended = ptr.append(apd);
assertEquals("extension", appended.last().getMatchingProperty());
assertEquals("/Image/15/name/extension", appended.toString());
}
@Test
void appendWithFinalSlash()
{
final String INPUT = "/Image/15/name/";
final String APPEND = "/extension";
JsonPointer ptr = JsonPointer.compile(INPUT);
// 14-Dec-2023, tatu: Not sure WTH was slash being removed for...
assertEquals("/Image/15/name/", ptr.toString());
JsonPointer apd = JsonPointer.compile(APPEND);
JsonPointer appended = ptr.append(apd);
assertEquals("extension", appended.last().getMatchingProperty());
assertEquals("/Image/15/name//extension", appended.toString());
}
@Test
void appendProperty()
{
final String INPUT = "/Image/15/name";
final String APPEND_NO_SLASH = "extension";
final String APPEND_WITH_SLASH = "/extension~";
JsonPointer ptr = JsonPointer.compile(INPUT);
JsonPointer appendedNoSlash = ptr.appendProperty(APPEND_NO_SLASH);
JsonPointer appendedWithSlash = ptr.appendProperty(APPEND_WITH_SLASH);
assertEquals(APPEND_NO_SLASH, appendedNoSlash.last().getMatchingProperty());
assertEquals("/Image/15/name/extension", appendedNoSlash.toString());
assertEquals(APPEND_WITH_SLASH, appendedWithSlash.last().getMatchingProperty());
assertEquals("/Image/15/name/~1extension~0", appendedWithSlash.toString());
}
// [core#1145]: Escape property
@Test
void appendPropertyEmpty()
{
final String BASE = "/Image/72/src";
JsonPointer basePtr = JsonPointer.compile(BASE);
// Same as before 2.17
assertSame(basePtr, basePtr.appendProperty(null));
// but this is different:
JsonPointer sub = basePtr.appendProperty("");
assertNotSame(basePtr, sub);
assertEquals(BASE+"/", sub.toString());
}
@Test
void appendIndex()
{
final String INPUT = "/Image/15/name";
final int INDEX = 12;
JsonPointer ptr = JsonPointer.compile(INPUT);
JsonPointer appended = ptr.appendIndex(INDEX);
assertEquals(12, appended.last().getMatchingIndex());
}
@Test
void quotedPath() throws Exception
{
final String INPUT = "/w~1out/til~0de/~1ab";
JsonPointer ptr = JsonPointer.compile(INPUT);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("w/out", ptr.getMatchingProperty());
assertEquals("/w~1out/til~0de", ptr.head().toString());
assertEquals(INPUT, ptr.toString());
ptr = ptr.tail();
assertNotNull(ptr);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("til~de", ptr.getMatchingProperty());
assertEquals("/til~0de", ptr.head().toString());
assertEquals("/til~0de/~1ab", ptr.toString());
ptr = ptr.tail();
assertNotNull(ptr);
assertFalse(ptr.matches());
assertEquals(-1, ptr.getMatchingIndex());
assertEquals("/ab", ptr.getMatchingProperty());
assertEquals("/~1ab", ptr.toString());
assertEquals("", ptr.head().toString());
// done!
ptr = ptr.tail();
assertTrue(ptr.matches());
assertNull(ptr.tail());
}
// [core#133]
@Test
void longNumbers() throws Exception
{
final long LONG_ID = (Integer.MAX_VALUE) + 1L;
final String INPUT = "/User/"+LONG_ID;
JsonPointer ptr = JsonPointer.compile(INPUT);
assertEquals("User", ptr.getMatchingProperty());
assertEquals(INPUT, ptr.toString());
ptr = ptr.tail();
assertNotNull(ptr);
assertFalse(ptr.matches());
/* 14-Mar-2014, tatu: We do not support array indexes beyond 32-bit
* range; can still match textually of course.
*/
assertEquals(-1, ptr.getMatchingIndex());
assertEquals(String.valueOf(LONG_ID), ptr.getMatchingProperty());
// done!
ptr = ptr.tail();
assertTrue(ptr.matches());
assertNull(ptr.tail());
}
}
| JsonPointerTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TimeSyncConfig.java | {
"start": 1184,
"end": 5091
} | class ____ implements SyncConfig {
public static final TimeValue DEFAULT_DELAY = TimeValue.timeValueSeconds(60);
private static final String NAME = "data_frame_transform_pivot_sync_time";
private final String field;
private final TimeValue delay;
private static final ConstructingObjectParser<TimeSyncConfig, Void> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<TimeSyncConfig, Void> LENIENT_PARSER = createParser(true);
private static ConstructingObjectParser<TimeSyncConfig, Void> createParser(boolean lenient) {
ConstructingObjectParser<TimeSyncConfig, Void> parser = new ConstructingObjectParser<>(NAME, lenient, args -> {
String field = (String) args[0];
TimeValue delay = (TimeValue) args[1];
return new TimeSyncConfig(field, delay);
});
parser.declareString(constructorArg(), TransformField.FIELD);
parser.declareField(
optionalConstructorArg(),
(p, c) -> TimeValue.parseTimeValue(p.text(), DEFAULT_DELAY, TransformField.DELAY.getPreferredName()),
TransformField.DELAY,
ObjectParser.ValueType.STRING
);
return parser;
}
public TimeSyncConfig() {
this(null, null);
}
public TimeSyncConfig(final String field, final TimeValue delay) {
this.field = ExceptionsHelper.requireNonNull(field, TransformField.FIELD.getPreferredName());
this.delay = delay == null ? DEFAULT_DELAY : delay;
}
public TimeSyncConfig(StreamInput in) throws IOException {
this.field = in.readString();
this.delay = in.readTimeValue();
}
@Override
public String getField() {
return field;
}
public TimeValue getDelay() {
return delay;
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeString(field);
out.writeTimeValue(delay);
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.field(TransformField.FIELD.getPreferredName(), field);
builder.field(TransformField.DELAY.getPreferredName(), delay.getStringRep());
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TimeSyncConfig that = (TimeSyncConfig) other;
return Objects.equals(this.field, that.field) && Objects.equals(this.delay, that.delay);
}
@Override
public int hashCode() {
return Objects.hash(field, delay);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static TimeSyncConfig parse(final XContentParser parser) {
return LENIENT_PARSER.apply(parser, null);
}
public static TimeSyncConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException {
return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null);
}
@Override
public String getWriteableName() {
return TransformField.TIME.getPreferredName();
}
@Override
public QueryBuilder getRangeQuery(TransformCheckpoint newCheckpoint) {
return new RangeQueryBuilder(field).lt(newCheckpoint.getTimeUpperBound()).format("epoch_millis");
}
@Override
public QueryBuilder getRangeQuery(TransformCheckpoint oldCheckpoint, TransformCheckpoint newCheckpoint) {
return new RangeQueryBuilder(field).gte(oldCheckpoint.getTimeUpperBound())
.lt(newCheckpoint.getTimeUpperBound())
.format("epoch_millis");
}
}
| TimeSyncConfig |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/ErrorHandler.java | {
"start": 949,
"end": 1234
} | interface ____ simplified, then error handling could just be done by wrapping
* a nested appender. (RG) Please look at DefaultErrorHandler. It's purpose is to make sure the console
* or error log isn't flooded with messages. I'm still considering the Appender refactoring.
*/
public | is |
java | google__auto | factory/src/test/resources/good/OnlyPrimitives.java | {
"start": 701,
"end": 761
} | class ____ {
OnlyPrimitives(int i, long l) {}
}
| OnlyPrimitives |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGenericJournalConf.java | {
"start": 7155,
"end": 7307
} | class ____ extends DummyJournalManager {
public BadConstructorJournalManager() {
super(null, null, null);
}
}
}
| BadConstructorJournalManager |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/annotation/WebSocketMessageBrokerConfigurationSupportTests.java | {
"start": 15485,
"end": 16131
} | class ____ extends WebSocketMessageBrokerConfigurationSupport {
@Override
protected void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/test");
}
@Override
protected void configureWebSocketTransport(WebSocketTransportRegistration registry) {
registry.addDecoratorFactory(handler -> new WebSocketHandlerDecorator(handler) {
@Override
public void afterConnectionEstablished(WebSocketSession session) throws Exception {
session.getAttributes().put("decorated", true);
super.afterConnectionEstablished(session);
}
});
}
}
private static | WebSocketHandlerDecoratorConfig |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/test/java/org/springframework/boot/jdbc/autoconfigure/DataSourceJmxConfigurationTests.java | {
"start": 8530,
"end": 8934
} | class ____ {
@Bean
static DataSourceBeanPostProcessor dataSourceBeanPostProcessor() {
return new DataSourceBeanPostProcessor() {
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
return (bean instanceof javax.sql.DataSource)
? new DelegatingDataSource((javax.sql.DataSource) bean) : bean;
}
};
}
}
}
| DataSourceDelegateConfiguration |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-panache/runtime/src/main/java/io/quarkus/hibernate/orm/panache/PanacheRepositoryBase.java | {
"start": 1253,
"end": 1577
} | class ____ extra operations (eg. CriteriaQueries)
*
* @return the {@link EntityManager} for the <Entity> entity class
*/
@GenerateBridge
default EntityManager getEntityManager() {
throw implementationInjectionMissing();
}
/**
* Returns the {@link Session} for the <Entity> entity | for |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/AbstractGenericTypeComparatorTest.java | {
"start": 11201,
"end": 12704
} | class ____ implements Comparable<BookAuthor> {
private long authorId;
private List<String> bookTitles;
private String authorName;
public BookAuthor() {}
public BookAuthor(long authorId, List<String> bookTitles, String authorName) {
this.authorId = authorId;
this.bookTitles = bookTitles;
this.authorName = authorName;
}
@Override
public boolean equals(Object obj) {
if (obj.getClass() == BookAuthor.class) {
BookAuthor other = (BookAuthor) obj;
return other.authorName.equals(this.authorName)
&& other.authorId == this.authorId
&& other.bookTitles.equals(this.bookTitles);
} else {
return false;
}
}
@Override
public int compareTo(BookAuthor o) {
int cmp = (this.authorId < o.authorId ? -1 : (this.authorId == o.authorId ? 0 : 1));
if (cmp != 0) return cmp;
int size = this.bookTitles.size();
int oSize = o.bookTitles.size();
cmp = (size < oSize ? -1 : (size == oSize ? 0 : 1));
if (cmp != 0) return cmp;
for (int i = 0; i < size; i++) {
cmp = this.bookTitles.get(i).compareTo(o.bookTitles.get(i));
if (cmp != 0) return cmp;
}
return this.authorName.compareTo(o.authorName);
}
}
}
| BookAuthor |
java | google__dagger | javatests/dagger/internal/codegen/DuplicateBindingsValidationTest.java | {
"start": 25959,
"end": 26394
} | class ____");
}
});
}
@Test
public void childBindingConflictsWithParent() {
Source aComponent =
CompilerTests.javaSource(
"test.A",
"package test;",
"",
"import dagger.Component;",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Component(modules = A.AModule.class)",
" | Modules |
java | google__error-prone | core/src/test/java/com/google/errorprone/scanner/ScannerSupplierTest.java | {
"start": 26703,
"end": 27197
} | class ____ extends BugChecker {
public MissingInject(ErrorProneFlags flags) {}
}
@Test
public void missingInject_stillProvisioned() {
ScannerSupplier ss1 = ScannerSupplier.fromBugCheckerClasses(MissingInject.class);
// We're only testing that this doesn't fail.
var unused = ss1.get();
}
private static ScannerSupplierSubject assertScanner(ScannerSupplier scannerSupplier) {
return assertAbout(ScannerSupplierSubject::new).that(scannerSupplier);
}
}
| MissingInject |
java | apache__camel | components/camel-debezium/camel-debezium-common/camel-debezium-common-component/src/main/java/org/apache/camel/component/debezium/configuration/EmbeddedDebeziumConfiguration.java | {
"start": 17014,
"end": 18086
} | class ____ should be used to serialize and deserialize value data for offsets. The default is JSON
* converter.
*/
public String getInternalValueConverter() {
return internalValueConverter;
}
public void setInternalValueConverter(String internalValueConverter) {
this.internalValueConverter = internalValueConverter;
}
/**
* Sets additional properties for debezium components in case they can't be set directly on the camel configurations
* (e.g: setting Kafka Connect properties needed by Debezium engine, for example, setting KafkaOffsetBackingStore),
* the properties have to be prefixed with `additionalProperties.`. E.g.:
* `additionalProperties.transactional.id=12345&additionalProperties.schema.registry.url=http://localhost:8811/avro`
*/
public void setAdditionalProperties(Map<String, Object> additionalProperties) {
this.additionalProperties = additionalProperties;
}
public Map<String, Object> getAdditionalProperties() {
return additionalProperties;
}
}
| that |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/log/LogComponentCustomizeTest.java | {
"start": 1057,
"end": 2025
} | class ____ extends ContextTestSupport {
private final LogCustomFormatterTest.TestExchangeFormatter formatter = new LogCustomFormatterTest.TestExchangeFormatter();
@Test
public void testCustomize() throws Exception {
Assertions.assertEquals(0, formatter.getCounter());
template.sendBody("direct:start", "Hello World");
Assertions.assertEquals(1, formatter.getCounter());
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// customize the log component using java lambda style (using the default name)
customize(LogComponent.class, l -> {
l.setExchangeFormatter(formatter);
});
from("direct:start")
.to("log:foo");
}
};
}
}
| LogComponentCustomizeTest |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/AbstractNestedConditionTests.java | {
"start": 3236,
"end": 3579
} | class ____ extends AbstractNestedCondition {
InvalidNestedCondition() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@Override
protected ConditionOutcome getFinalMatchOutcome(MemberMatchOutcomes memberOutcomes) {
return ConditionOutcome.match();
}
@ConditionalOnMissingBean(name = "myBean")
static | InvalidNestedCondition |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestMappingHandlerAdapterTests.java | {
"start": 15827,
"end": 15999
} | class ____ {
public String handle(Model model) {
model.addAttribute("someAttr", "someAttrValue");
return "redirect:/path";
}
}
static | RedirectAttributeController |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/datasource/embedded/HsqlEmbeddedDatabaseConfigurer.java | {
"start": 1034,
"end": 2194
} | class ____ extends AbstractEmbeddedDatabaseConfigurer {
private static @Nullable HsqlEmbeddedDatabaseConfigurer instance;
private final Class<? extends Driver> driverClass;
/**
* Get the singleton {@link HsqlEmbeddedDatabaseConfigurer} instance.
* @return the configurer instance
* @throws ClassNotFoundException if HSQL is not on the classpath
*/
@SuppressWarnings("unchecked")
public static synchronized HsqlEmbeddedDatabaseConfigurer getInstance() throws ClassNotFoundException {
if (instance == null) {
instance = new HsqlEmbeddedDatabaseConfigurer( (Class<? extends Driver>)
ClassUtils.forName("org.hsqldb.jdbcDriver", HsqlEmbeddedDatabaseConfigurer.class.getClassLoader()));
}
return instance;
}
private HsqlEmbeddedDatabaseConfigurer(Class<? extends Driver> driverClass) {
this.driverClass = driverClass;
}
@Override
public void configureConnectionProperties(ConnectionProperties properties, String databaseName) {
properties.setDriverClass(this.driverClass);
properties.setUrl("jdbc:hsqldb:mem:" + databaseName);
properties.setUsername("sa");
properties.setPassword("");
}
}
| HsqlEmbeddedDatabaseConfigurer |
java | elastic__elasticsearch | x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CopyLifecycleIndexMetadataTransportAction.java | {
"start": 1873,
"end": 5920
} | class ____ extends TransportMasterNodeAction<
CopyLifecycleIndexMetadataAction.Request,
AcknowledgedResponse> {
private final ClusterStateTaskExecutor<UpdateIndexMetadataTask> executor;
private final MasterServiceTaskQueue<UpdateIndexMetadataTask> taskQueue;
private final ProjectResolver projectResolver;
@Inject
public CopyLifecycleIndexMetadataTransportAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
ProjectResolver projectResolver
) {
super(
CopyLifecycleIndexMetadataAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
CopyLifecycleIndexMetadataAction.Request::new,
AcknowledgedResponse::readFrom,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.executor = new SimpleBatchedAckListenerTaskExecutor<>() {
@Override
public Tuple<ClusterState, ClusterStateAckListener> executeTask(UpdateIndexMetadataTask task, ClusterState state) {
var projectMetadata = state.metadata().getProject(task.projectId);
var updatedMetadata = applyUpdate(projectMetadata, task);
return new Tuple<>(ClusterState.builder(state).putProjectMetadata(updatedMetadata).build(), task);
}
};
this.taskQueue = clusterService.createTaskQueue("migrate-copy-index-metadata", Priority.NORMAL, this.executor);
this.projectResolver = projectResolver;
}
@Override
protected void masterOperation(
Task task,
CopyLifecycleIndexMetadataAction.Request request,
ClusterState state,
ActionListener<AcknowledgedResponse> listener
) {
taskQueue.submitTask(
"migrate-copy-index-metadata",
new UpdateIndexMetadataTask(
projectResolver.getProjectId(),
request.sourceIndex(),
request.destIndex(),
request.ackTimeout(),
listener
),
request.masterNodeTimeout()
);
}
@Override
protected ClusterBlockException checkBlock(CopyLifecycleIndexMetadataAction.Request request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
private static ProjectMetadata applyUpdate(ProjectMetadata projectMetadata, UpdateIndexMetadataTask updateTask) {
assert projectMetadata != null && updateTask != null;
assert Objects.equals(updateTask.projectId, projectMetadata.id());
IndexMetadata sourceMetadata = projectMetadata.index(updateTask.sourceIndex);
if (sourceMetadata == null) {
throw new IndexNotFoundException(updateTask.sourceIndex);
}
IndexMetadata destMetadata = projectMetadata.index(updateTask.destIndex);
if (destMetadata == null) {
throw new IndexNotFoundException(updateTask.destIndex);
}
IndexMetadata.Builder newDestMetadata = IndexMetadata.builder(destMetadata);
var sourceILM = sourceMetadata.getCustomData(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY);
if (sourceILM != null) {
newDestMetadata.putCustom(LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY, sourceILM);
}
newDestMetadata.putRolloverInfos(sourceMetadata.getRolloverInfos())
// creation date is required for ILM to function
.creationDate(sourceMetadata.getCreationDate())
// creation date updates settings so must increment settings version
.settingsVersion(destMetadata.getSettingsVersion() + 1);
var indices = new HashMap<>(projectMetadata.indices());
indices.put(updateTask.destIndex, newDestMetadata.build());
return ProjectMetadata.builder(projectMetadata).indices(indices).build();
}
static | CopyLifecycleIndexMetadataTransportAction |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/frommap/FromMapMappingTest.java | {
"start": 1143,
"end": 4115
} | class ____ {
@ProcessorTest
void fromNullMap() {
assertThat( StringMapToBeanMapper.INSTANCE.fromMap( null ) ).isNull();
}
@ProcessorTest
void fromEmptyMap() {
StringMapToBeanMapper.Order order = StringMapToBeanMapper.INSTANCE.fromMap( Collections.emptyMap() );
assertThat( order ).isNotNull();
assertThat( order.getName() ).isNull();
assertThat( order.getPrice() ).isEqualTo( 0.0 );
assertThat( order.getOrderDate() ).isNull();
assertThat( order.getShipmentDate() ).isNull();
}
@ProcessorTest
void fromFullMap() {
Map<String, String> map = new HashMap<>();
map.put( "name", "Jacket" );
map.put( "price", "25.5" );
map.put( "shipmentDate", "2021-06-15" );
StringMapToBeanMapper.Order order = StringMapToBeanMapper.INSTANCE.fromMap( map );
assertThat( order ).isNotNull();
assertThat( order.getName() ).isEqualTo( "Jacket" );
assertThat( order.getPrice() ).isEqualTo( 25.5 );
assertThat( order.getOrderDate() ).isNull();
assertThat( order.getShipmentDate() ).isEqualTo( LocalDate.of( 2021, Month.JUNE, 15 ) );
}
@ProcessorTest
void fromMapWithEmptyValuesForString() {
Map<String, String> map = Collections.singletonMap( "name", "" );
StringMapToBeanMapper.Order order = StringMapToBeanMapper.INSTANCE.fromMap( map );
assertThat( order ).isNotNull();
assertThat( order.getName() ).isEqualTo( "" );
assertThat( order.getPrice() ).isEqualTo( 0 );
assertThat( order.getOrderDate() ).isNull();
assertThat( order.getShipmentDate() ).isNull();
}
@ProcessorTest
void fromMapWithEmptyValuesForDouble() {
Map<String, String> map = Collections.singletonMap( "price", "" );
assertThatThrownBy( () -> StringMapToBeanMapper.INSTANCE.fromMap( map ) )
.isInstanceOf( NumberFormatException.class );
}
@ProcessorTest
void fromMapWithEmptyValuesForDate() {
Map<String, String> map = Collections.singletonMap( "orderDate", "" );
assertThatThrownBy( () -> StringMapToBeanMapper.INSTANCE.fromMap( map ) )
.isInstanceOf( RuntimeException.class )
.getCause()
.isInstanceOf( ParseException.class );
}
@ProcessorTest
void fromMapWithEmptyValuesForLocalDate() {
Map<String, String> map = Collections.singletonMap( "shipmentDate", "" );
assertThatThrownBy( () -> StringMapToBeanMapper.INSTANCE.fromMap( map ) )
.isInstanceOf( DateTimeParseException.class );
}
}
@Nested
@WithClasses({
StringMapToBeanWithCustomPresenceCheckMapper.class
})
| StringMapToBeanTests |
java | google__guava | android/guava-tests/test/com/google/common/base/FinalizableReferenceQueueTest.java | {
"start": 1809,
"end": 2200
} | class ____ {
private @Nullable FinalizableReferenceQueue frq;
@After
public void tearDown() throws Exception {
frq = null;
}
@Test
public void testFinalizeReferentCalled() {
MockReference reference = new MockReference(frq = new FinalizableReferenceQueue());
GcFinalization.awaitDone(() -> reference.finalizeReferentCalled);
}
static | FinalizableReferenceQueueTest |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java | {
"start": 3090,
"end": 17067
} | class ____ extends MockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("lat", this::scriptLat);
scripts.put("lon", this::scriptLon);
scripts.put("height", this::scriptHeight);
scripts.put("width", this::scriptWidth);
scripts.put("label_lat", this::scriptLabelLat);
scripts.put("label_lon", this::scriptLabelLon);
return scripts;
}
private double scriptHeight(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
if (geometry.size() == 0) {
return Double.NaN;
} else {
BoundingBox<GeoPoint> boundingBox = geometry.getBoundingBox();
return boundingBox.topLeft().lat() - boundingBox.bottomRight().lat();
}
}
private double scriptWidth(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
if (geometry.size() == 0) {
return Double.NaN;
} else {
BoundingBox<GeoPoint> boundingBox = geometry.getBoundingBox();
return boundingBox.bottomRight().lon() - boundingBox.topLeft().lon();
}
}
private double scriptLat(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
return geometry.size() == 0 ? Double.NaN : geometry.getCentroid().lat();
}
private double scriptLon(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
return geometry.size() == 0 ? Double.NaN : geometry.getCentroid().lon();
}
private double scriptLabelLat(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
return geometry.size() == 0 ? Double.NaN : geometry.getLabelPosition().lat();
}
private double scriptLabelLon(Map<String, Object> vars) {
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> geometry = assertGeometry(doc);
return geometry.size() == 0 ? Double.NaN : geometry.getLabelPosition().lon();
}
private LeafShapeFieldData.ShapeScriptValues<GeoPoint, GeoShapeValues.GeoShapeValue> assertGeometry(Map<?, ?> doc) {
AbstractAtomicGeoShapeShapeFieldData.GeoShapeScriptValues geometry =
(AbstractAtomicGeoShapeShapeFieldData.GeoShapeScriptValues) doc.get("location");
if (geometry.size() == 0) {
assertThat(geometry.getBoundingBox(), Matchers.nullValue());
assertThat(geometry.getCentroid(), Matchers.nullValue());
assertThat(geometry.getLabelPosition(), Matchers.nullValue());
assertThat(geometry.getDimensionalType(), equalTo(-1));
} else {
assertThat(geometry.getBoundingBox(), Matchers.notNullValue());
assertThat(geometry.getCentroid(), Matchers.notNullValue());
assertThat(geometry.getLabelPosition(), Matchers.notNullValue());
assertThat(geometry.getDimensionalType(), greaterThanOrEqualTo(0));
assertThat(geometry.getDimensionalType(), lessThanOrEqualTo(2));
}
return geometry;
}
}
@Override
protected boolean forbidPrivateIndexSettings() {
return false;
}
@Before
public void setupTestIndex() throws IOException {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
.startObject()
.startObject("_doc")
.startObject("properties")
.startObject("location")
.field("type", "geo_shape");
xContentBuilder.endObject().endObject().endObject().endObject();
assertAcked(indicesAdmin().prepareCreate("test").setMapping(xContentBuilder));
ensureGreen();
}
public void testRandomShape() throws Exception {
GeoShapeIndexer indexer = new GeoShapeIndexer(Orientation.CCW, "test");
Geometry geometry = randomValueOtherThanMany(g -> {
try {
indexer.indexShape(g);
return false;
} catch (Exception e) {
return true;
}
}, () -> GeometryTestUtils.randomGeometry(false));
doTestGeometry(geometry);
}
public void testPolygonFromYamlTests() throws IOException, ParseException {
// This is the geometry used in the tests in 70_script_doc_values.yml, and is easier to test and debug here
String wkt = "POLYGON(("
+ "24.04725 59.942,24.04825 59.94125,24.04875 59.94125,24.04875 59.94175,24.048 59.9425,"
+ "24.0475 59.94275,24.0465 59.94225,24.046 59.94225,24.04575 59.9425,24.04525 59.94225,24.04725 59.942"
+ "))";
Geometry polygon = WellKnownText.fromWKT(GeographyValidator.instance(true), true, wkt);
doTestGeometry(polygon, null);
}
public void testPolygonDateline() throws Exception {
Geometry geometry = new Polygon(new LinearRing(new double[] { 170, 190, 190, 170, 170 }, new double[] { -5, -5, 5, 5, -5 }));
doTestGeometry(geometry, null);
}
private MultiPoint pointsFromLine(Line line) {
ArrayList<Point> points = new ArrayList<>();
for (int i = 0; i < line.length(); i++) {
double x = line.getX(i);
double y = line.getY(i);
points.add(new Point(x, y));
}
return new MultiPoint(points);
}
public void testEvenLineString() throws Exception {
Line line = new Line(new double[] { -5, -1, 0, 1, 5 }, new double[] { 0, 0, 0, 0, 0 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(-0.5, 0)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(0, 0)));
}
public void testOddLineString() throws Exception {
Line line = new Line(new double[] { -5, -1, 1, 5 }, new double[] { 0, 0, 0, 0 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(0, 0)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(-1, 0)));
}
public void testUnbalancedEvenLineString() throws Exception {
Line line = new Line(new double[] { -5, -4, -3, -2, -1, 0, 5 }, new double[] { 0, 0, 0, 0, 0, 0, 0 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(-2.5, 0)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(-2, 0)));
}
public void testUnbalancedOddLineString() throws Exception {
Line line = new Line(new double[] { -5, -4, -3, -2, -1, 5 }, new double[] { 0, 0, 0, 0, 0, 0 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(-2.5, 0)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(-3, 0)));
}
public void testVerticalLineString() throws Exception {
// Data with no x-range is not well sorted and odd choices occur for the first triangle tree node
Line line = new Line(new double[] { 0, 0, 0, 0, 0 }, new double[] { -5, -1, 0, 1, 5 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(0, 3)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(0, 1)));
}
public void testOffVerticalLineString() throws Exception {
// Even a very small x-range results in reasonable sorting for the label position
Line line = new Line(new double[] { -0.0005, -0.0001, 0, 0.0001, 0.0005 }, new double[] { -5, -1, 0, 1, 5 });
doTestGeometry(line, GeoTestUtils.geoShapeValue(new Point(-0.00005, -0.5)));
doTestGeometry(pointsFromLine(line), GeoTestUtils.geoShapeValue(new Point(0, 0)));
}
private void doTestGeometry(Geometry geometry) throws IOException {
doTestGeometry(geometry, null, false);
}
private void doTestGeometry(Geometry geometry, GeoShapeValues.GeoShapeValue expectedLabelPosition) throws IOException {
doTestGeometry(geometry, expectedLabelPosition, true);
}
private void doTestGeometry(Geometry geometry, GeoShapeValues.GeoShapeValue expectedLabelPosition, boolean fallbackToCentroid)
throws IOException {
prepareIndex("test").setId("1")
.setSource(
jsonBuilder().startObject().field("name", "TestPosition").field("location", WellKnownText.toWKT(geometry)).endObject()
)
.get();
indicesAdmin().prepareRefresh("test").get();
GeoShapeValues.GeoShapeValue value = GeoTestUtils.geoShapeValue(geometry);
SearchRequestBuilder searchRequest = client().prepareSearch()
.addStoredField("_source")
.addScriptField("lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lat", Collections.emptyMap()))
.addScriptField("lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lon", Collections.emptyMap()))
.addScriptField("height", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "height", Collections.emptyMap()))
.addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap()))
.addScriptField("label_lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lat", Collections.emptyMap()))
.addScriptField("label_lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lon", Collections.emptyMap()));
assertCheckedResponse(searchRequest, response -> {
Map<String, DocumentField> fields = response.getHits().getHits()[0].getFields();
assertThat(fields.get("lat").getValue(), equalTo(value.getY()));
assertThat(fields.get("lon").getValue(), equalTo(value.getX()));
assertThat(fields.get("height").getValue(), equalTo(value.boundingBox().maxY() - value.boundingBox().minY()));
assertThat(fields.get("width").getValue(), equalTo(value.boundingBox().maxX() - value.boundingBox().minX()));
// Check label position is in the geometry, but with a tolerance constructed as a circle of 1m radius to handle quantization
Point labelPosition = new Point(fields.get("label_lon").getValue(), fields.get("label_lat").getValue());
Circle tolerance = new Circle(labelPosition.getY(), labelPosition.getX(), 1);
assertTrue(
"Expect label position " + labelPosition + " to intersect geometry " + geometry,
value.relate(tolerance) != GeoRelation.QUERY_DISJOINT
);
// Check that the label position is the expected one, or the centroid in certain polygon cases
if (expectedLabelPosition != null) {
doTestLabelPosition(fields, expectedLabelPosition);
} else if (fallbackToCentroid && value.dimensionalShapeType() == DimensionalShapeType.POLYGON) {
// Use the centroid for all polygons, unless overwritten for specific cases
doTestLabelPosition(fields, GeoTestUtils.geoShapeValue(new Point(value.getX(), value.getY())));
}
});
}
private void doTestLabelPosition(Map<String, DocumentField> fields, GeoShapeValues.GeoShapeValue expectedLabelPosition)
throws IOException {
assertEquals(
"Unexpected latitude for label position,",
expectedLabelPosition.getY(),
fields.get("label_lat").getValue(),
0.0000001
);
assertEquals(
"Unexpected longitude for label position,",
expectedLabelPosition.getX(),
fields.get("label_lon").getValue(),
0.0000001
);
}
public void testNullShape() throws Exception {
prepareIndex("test").setId("1")
.setSource(jsonBuilder().startObject().field("name", "TestPosition").nullField("location").endObject())
.get();
indicesAdmin().prepareRefresh("test").get();
SearchRequestBuilder searchRequestBuilder = client().prepareSearch()
.addStoredField("_source")
.addScriptField("lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lat", Collections.emptyMap()))
.addScriptField("lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lon", Collections.emptyMap()))
.addScriptField("height", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "height", Collections.emptyMap()))
.addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap()));
assertNoFailuresAndResponse(searchRequestBuilder, response -> {
Map<String, DocumentField> fields = response.getHits().getHits()[0].getFields();
assertThat(fields.get("lat").getValue(), equalTo(Double.NaN));
assertThat(fields.get("lon").getValue(), equalTo(Double.NaN));
assertThat(fields.get("height").getValue(), equalTo(Double.NaN));
assertThat(fields.get("width").getValue(), equalTo(Double.NaN));
});
}
}
| CustomScriptPlugin |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/util/NestedExceptionUtils.java | {
"start": 1093,
"end": 2951
} | class ____ {
/**
* Build a message for the given base message and root cause.
*
* @param message the base message
* @param cause the root cause
* @return the full exception message
*/
public static String buildMessage(String message, Throwable cause) {
if (cause == null) {
return message;
}
StringBuilder sb = new StringBuilder(64);
if (message != null) {
sb.append(message).append("; ");
}
sb.append("nested exception is ").append(cause);
return sb.toString();
}
/**
* Retrieve the innermost cause of the given exception, if any.
*
* @param original the original exception to introspect
* @return the innermost exception, or {@code null} if none
* @since 4.3.9
*/
public static Throwable getRootCause(Throwable original) {
if (original == null) {
return null;
}
Throwable rootCause = null;
Throwable cause = original.getCause();
while (cause != null && cause != rootCause) {
rootCause = cause;
cause = cause.getCause();
}
return rootCause;
}
/**
* Retrieve the most specific cause of the given exception, that is,
* either the innermost cause (root cause) or the exception itself.
*
* <p>Differs from {@link #getRootCause} in that it falls back
* to the original exception if there is no root cause.
*
* @param original the original exception to introspect
* @return the most specific cause (never {@code null})
* @since 4.3.9
*/
public static Throwable getMostSpecificCause(Throwable original) {
Throwable rootCause = getRootCause(original);
return (rootCause != null ? rootCause : original);
}
}
| NestedExceptionUtils |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/DelimitedImageVisitor.java | {
"start": 1768,
"end": 6049
} | class ____ extends TextWriterImageVisitor {
private static final String defaultDelimiter = "\t";
final private LinkedList<ImageElement> elemQ = new LinkedList<ImageElement>();
private long fileSize = 0l;
// Elements of fsimage we're interested in tracking
private final Collection<ImageElement> elementsToTrack;
// Values for each of the elements in elementsToTrack
private final AbstractMap<ImageElement, String> elements =
new HashMap<ImageElement, String>();
private final String delimiter;
{
elementsToTrack = new ArrayList<ImageElement>();
// This collection determines what elements are tracked and the order
// in which they are output
Collections.addAll(elementsToTrack, ImageElement.INODE_PATH,
ImageElement.REPLICATION,
ImageElement.MODIFICATION_TIME,
ImageElement.ACCESS_TIME,
ImageElement.BLOCK_SIZE,
ImageElement.NUM_BLOCKS,
ImageElement.NUM_BYTES,
ImageElement.NS_QUOTA,
ImageElement.DS_QUOTA,
ImageElement.PERMISSION_STRING,
ImageElement.USER_NAME,
ImageElement.GROUP_NAME);
}
public DelimitedImageVisitor(String filename) throws IOException {
this(filename, false);
}
public DelimitedImageVisitor(String outputFile, boolean printToScreen)
throws IOException {
this(outputFile, printToScreen, defaultDelimiter);
}
public DelimitedImageVisitor(String outputFile, boolean printToScreen,
String delimiter) throws IOException {
super(outputFile, printToScreen);
this.delimiter = delimiter;
reset();
}
/**
* Reset the values of the elements we're tracking in order to handle
* the next file
*/
private void reset() {
elements.clear();
for(ImageElement e : elementsToTrack)
elements.put(e, null);
fileSize = 0l;
}
@Override
void leaveEnclosingElement() throws IOException {
ImageElement elem = elemQ.pop();
// If we're done with an inode, write out our results and start over
if(elem == ImageElement.INODE ||
elem == ImageElement.INODE_UNDER_CONSTRUCTION) {
writeLine();
write("\n");
reset();
}
}
/**
* Iterate through all the elements we're tracking and, if a value was
* recorded for it, write it out.
*/
private void writeLine() throws IOException {
Iterator<ImageElement> it = elementsToTrack.iterator();
while(it.hasNext()) {
ImageElement e = it.next();
String v = null;
if(e == ImageElement.NUM_BYTES)
v = String.valueOf(fileSize);
else
v = elements.get(e);
if(v != null)
write(v);
if(it.hasNext())
write(delimiter);
}
}
@Override
void visit(ImageElement element, String value) throws IOException {
// Explicitly label the root path
if(element == ImageElement.INODE_PATH && value.equals(""))
value = "/";
// Special case of file size, which is sum of the num bytes in each block
if(element == ImageElement.NUM_BYTES)
fileSize += Long.parseLong(value);
if(elements.containsKey(element) && element != ImageElement.NUM_BYTES)
elements.put(element, value);
}
@Override
void visitEnclosingElement(ImageElement element) throws IOException {
elemQ.push(element);
}
@Override
void visitEnclosingElement(ImageElement element, ImageElement key,
String value) throws IOException {
// Special case as numBlocks is an attribute of the blocks element
if(key == ImageElement.NUM_BLOCKS
&& elements.containsKey(ImageElement.NUM_BLOCKS))
elements.put(key, value);
elemQ.push(element);
}
@Override
void start() throws IOException { /* Nothing to do */ }
}
| DelimitedImageVisitor |
java | apache__camel | components/camel-ai/camel-langchain4j-agent/src/main/java/org/apache/camel/component/langchain4j/agent/LangChain4jAgentEndpoint.java | {
"start": 1644,
"end": 2708
} | class ____ extends DefaultEndpoint {
@Metadata(required = true)
@UriPath(description = "The Agent id")
private final String agentId;
@UriParam
private LangChain4jAgentConfiguration configuration;
public LangChain4jAgentEndpoint(String endpointUri, Component component, String agentId,
LangChain4jAgentConfiguration configuration) {
super(endpointUri, component);
this.agentId = agentId;
this.configuration = configuration;
}
@Override
public Producer createProducer() throws Exception {
return new LangChain4jAgentProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer not supported");
}
/**
* Return the Agent ID
*
* @return
*/
public String getAgentId() {
return agentId;
}
public LangChain4jAgentConfiguration getConfiguration() {
return configuration;
}
}
| LangChain4jAgentEndpoint |
java | apache__logging-log4j2 | log4j-jpl/src/test/java/org/apache/logging/log4j/jpl/Log4jSystemLoggerTest.java | {
"start": 1596,
"end": 5618
} | class ____ {
public static final String LOGGER_NAME = "Test";
protected Logger logger;
protected ListAppender eventAppender;
protected ListAppender stringAppender;
@BeforeEach
void setUp() {
logger = System.getLogger(LOGGER_NAME);
assertThat(logger, instanceOf(Log4jSystemLogger.class));
eventAppender = ListAppender.getListAppender("TestAppender");
stringAppender = ListAppender.getListAppender("StringAppender");
assertNotNull(eventAppender);
assertNotNull(stringAppender);
}
@AfterEach
void tearDown() {
if (eventAppender != null) {
eventAppender.clear();
}
if (stringAppender != null) {
stringAppender.clear();
}
}
@Test
void testGetName() {
assertThat(logger.getName(), equalTo(LOGGER_NAME));
}
@Test
void testIsLoggable() {
assertThat(logger.isLoggable(Logger.Level.ERROR), equalTo(true));
}
@Test
void testLog() {
logger.log(Logger.Level.INFO, "Informative message here.");
final List<LogEvent> events = eventAppender.getEvents();
assertThat(events, hasSize(1));
final LogEvent event = events.get(0);
assertThat(event, instanceOf(Log4jLogEvent.class));
assertEquals(Level.INFO, event.getLevel());
assertEquals(LOGGER_NAME, event.getLoggerName());
assertEquals("Informative message here.", event.getMessage().getFormattedMessage());
assertEquals(Log4jSystemLogger.class.getName(), event.getLoggerFqcn());
}
@Test
void testParameterizedLogging() {
logger.log(Logger.Level.INFO, "Hello, {0}!", "World");
final List<LogEvent> events = eventAppender.getEvents();
assertThat(events, hasSize(1));
final LogEvent event = events.get(0);
assertThat(event, instanceOf(Log4jLogEvent.class));
assertEquals(Level.INFO, event.getLevel());
assertEquals(LOGGER_NAME, event.getLoggerName());
assertEquals("Hello, World!", event.getMessage().getFormattedMessage());
assertEquals(Log4jSystemLogger.class.getName(), event.getLoggerFqcn());
}
@Test
void testParameterizedLoggingWithThrowable() {
final Throwable throwable = new RuntimeException();
logger.log(Logger.Level.INFO, "Hello, {0}!", "World", throwable);
final List<LogEvent> events = eventAppender.getEvents();
assertThat(events, hasSize(1));
final LogEvent event = events.get(0);
assertThat(event, instanceOf(Log4jLogEvent.class));
assertEquals(Level.INFO, event.getLevel());
assertEquals(LOGGER_NAME, event.getLoggerName());
assertEquals("Hello, World!", event.getMessage().getFormattedMessage());
assertEquals(Log4jSystemLogger.class.getName(), event.getLoggerFqcn());
assertSame(throwable, event.getThrown());
}
@Test
void testLogWithCallingClass() {
final Logger log = System.getLogger("Test.CallerClass");
log.log(Logger.Level.INFO, "Calling from LoggerTest");
final List<String> messages = stringAppender.getMessages();
assertThat(messages, hasSize(1));
final String message = messages.get(0);
assertEquals(Log4jSystemLoggerTest.class.getName(), message);
}
@Test
void testCurlyBraces() {
testMessage("{message}");
}
@Test
void testPercent() {
testMessage("message%s");
}
@Test
void testPercentAndCurlyBraces() {
testMessage("message{%s}");
}
private void testMessage(final String string) {
logger.log(Logger.Level.INFO, "Test info " + string);
final List<LogEvent> events = eventAppender.getEvents();
assertThat(events, hasSize(1));
for (final LogEvent event : events) {
final String message = event.getMessage().getFormattedMessage();
assertThat(message, equalTo("Test info " + string));
}
}
}
| Log4jSystemLoggerTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/UnsafeWildcardTest.java | {
"start": 15771,
"end": 15858
} | class ____<U> extends AbstractList<WithBound<? super U>> {}
abstract | BadList |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson/OAuth2TokenFormatMixin.java | {
"start": 1389,
"end": 1504
} | class ____ {
@JsonCreator
OAuth2TokenFormatMixin(@JsonProperty("value") String value) {
}
}
| OAuth2TokenFormatMixin |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/observers/ParameterizedPayloadTest.java | {
"start": 2450,
"end": 2787
} | class ____ {
@Inject
Event<List<Integer>> intEvent;
@Inject
Event<Collection<String>> strEvent;
void produceInt(List<Integer> value) {
intEvent.fire(value);
}
void produceStr(Collection<String> value) {
strEvent.fire(value);
}
}
}
| ListProducer |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/jdk/PrimitiveArrayDeserializers.java | {
"start": 23474,
"end": 26331
} | class ____
extends PrimitiveArrayDeserializers<int[]>
{
public final static IntDeser instance = new IntDeser();
public IntDeser() { super(int[].class); }
protected IntDeser(IntDeser base, NullValueProvider nuller, Boolean unwrapSingle) {
super(base, nuller, unwrapSingle);
}
@Override
protected PrimitiveArrayDeserializers<?> withResolved(NullValueProvider nuller,
Boolean unwrapSingle) {
return new IntDeser(this, nuller, unwrapSingle);
}
@Override
protected int[] _constructEmpty() {
return new int[0];
}
@Override
public int[] deserialize(JsonParser p, DeserializationContext ctxt) throws JacksonException
{
if (!p.isExpectedStartArrayToken()) {
return handleNonArray(p, ctxt);
}
ArrayBuilders.IntBuilder builder = ctxt.getArrayBuilders().getIntBuilder();
int[] chunk = builder.resetAndStart();
int ix = 0;
try {
JsonToken t;
while ((t = p.nextToken()) != JsonToken.END_ARRAY) {
int value;
if (t == JsonToken.VALUE_NUMBER_INT) {
value = p.getIntValue();
} else if (t == JsonToken.VALUE_NULL) {
if (_nuller != null) {
_nuller.getNullValue(ctxt);
continue;
}
_verifyNullForPrimitive(ctxt);
value = 0;
} else {
value = _parseIntPrimitive(p, ctxt);
}
if (ix >= chunk.length) {
chunk = builder.appendCompletedChunk(chunk, ix);
ix = 0;
}
chunk[ix++] = value;
}
} catch (Exception e) {
throw DatabindException.wrapWithPath(ctxt, e,
new JacksonException.Reference(chunk, builder.bufferedSize() + ix));
}
return builder.completeAndClearBuffer(chunk, ix);
}
@Override
protected int[] handleSingleElementUnwrapped(JsonParser p,
DeserializationContext ctxt) throws JacksonException {
return new int[] { _parseIntPrimitive(p, ctxt) };
}
@Override
protected int[] _concat(int[] oldValue, int[] newValue) {
int len1 = oldValue.length;
int len2 = newValue.length;
int[] result = Arrays.copyOf(oldValue, len1+len2);
System.arraycopy(newValue, 0, result, len1, len2);
return result;
}
}
@JacksonStdImpl
final static | IntDeser |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/security/RoutingContextAwareSecurityIdentity.java | {
"start": 367,
"end": 2645
} | class ____ implements SecurityIdentity {
private static final String ROUTING_CONTEXT_KEY = RoutingContext.class.getName();
private final SecurityIdentity delegate;
private final RoutingContext routingContext;
private RoutingContextAwareSecurityIdentity(SecurityIdentity delegate, RoutingContext routingContext) {
this.delegate = delegate;
this.routingContext = routingContext;
}
static SecurityIdentity addRoutingCtxToIdentityIfMissing(SecurityIdentity delegate, RoutingContext routingContext) {
if (delegate != null && delegate.getAttribute(ROUTING_CONTEXT_KEY) == null) {
return new RoutingContextAwareSecurityIdentity(delegate, routingContext);
}
return delegate;
}
@Override
public Principal getPrincipal() {
return delegate.getPrincipal();
}
@Override
public boolean isAnonymous() {
return delegate.isAnonymous();
}
@Override
public Set<String> getRoles() {
return delegate.getRoles();
}
@Override
public boolean hasRole(String s) {
return delegate.hasRole(s);
}
@Override
public <T extends Credential> T getCredential(Class<T> aClass) {
return delegate.getCredential(aClass);
}
@Override
public Set<Credential> getCredentials() {
return delegate.getCredentials();
}
@SuppressWarnings("unchecked")
@Override
public <T> T getAttribute(String s) {
if (ROUTING_CONTEXT_KEY.equals(s)) {
return (T) routingContext;
}
return delegate.getAttribute(s);
}
@Override
public Map<String, Object> getAttributes() {
// we always recreate the map as it could have changed in the delegate
var delegateAttributes = delegate.getAttributes();
if (delegateAttributes == null || delegateAttributes.isEmpty()) {
return Map.of(ROUTING_CONTEXT_KEY, routingContext);
}
var result = new HashMap<>(delegateAttributes);
result.put(ROUTING_CONTEXT_KEY, routingContext);
return result;
}
@Override
public Uni<Boolean> checkPermission(Permission permission) {
return delegate.checkPermission(permission);
}
}
| RoutingContextAwareSecurityIdentity |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/security/KeyStoreHelperTest.java | {
"start": 1075,
"end": 3562
} | class ____ extends VertxTestBase {
/**
* Verifies that the key store helper can read a PKCS#8 encoded RSA private key
* from a PEM file.
*
* @throws Exception if the key cannot be read.
*/
@Test
public void testKeyStoreHelperSupportsRSAPrivateKeys() throws Exception {
PemKeyCertOptions options = new PemKeyCertOptions()
.addKeyPath("tls/server-key.pem")
.addCertPath("tls/server-cert.pem");
KeyStoreHelper helper = options.getHelper(vertx);
assertKeyType(helper.store(), RSAPrivateKey.class);
}
/**
* Verifies that the key store helper can read a PKCS#8 encoded EC private key
* from a PEM file.
*
* @throws Exception if the key cannot be read.
*/
@Test
public void testKeyStoreHelperSupportsPKCS8ECPrivateKey() throws Exception {
Assume.assumeTrue("ECC is not supported by VM's security providers", TestUtils.isECCSupportedByVM());
PemKeyCertOptions options = new PemKeyCertOptions()
.addKeyPath("tls/server-key-ec.pem")
.addCertPath("tls/server-cert-ec.pem");
KeyStoreHelper helper = options.getHelper(vertx);
assertKeyType(helper.store(), ECPrivateKey.class);
}
/**
* Verifies that the key store helper can read a DER encoded EC private key
* from a PEM file.
*
* @throws Exception if the key cannot be read.
*/
@Test
public void testKeyStoreHelperSupportsReadingECPrivateKeyFromPEMFile() throws Exception {
Assume.assumeTrue("ECC is not supported by VM's security providers", TestUtils.isECCSupportedByVM());
PemKeyCertOptions options = new PemKeyCertOptions()
.addKeyPath("tls/server-key-ec-pkcs1.pem")
.addCertPath("tls/server-cert-ec.pem");
KeyStoreHelper helper = options.getHelper(vertx);
assertKeyType(helper.store(), ECPrivateKey.class);
}
private void assertKeyType(KeyStore store, Class<?> expectedKeyType) throws KeyStoreException, GeneralSecurityException {
assertTrue(store.size() > 0);
for (Enumeration<String> e = store.aliases(); e.hasMoreElements(); ) {
String alias = e.nextElement();
// "dummdummydummydummydummydummydummy" is the password set by KeyStoreHelper when importing the
// keys into the internal key store
assertThat(store.getKey(alias, "dummdummydummydummydummydummydummy".toCharArray()), instanceOf(expectedKeyType));
assertThat(store.getCertificate(alias), instanceOf(X509Certificate.class));
}
}
}
| KeyStoreHelperTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/paths/Paths_assertIsWritable_Test.java | {
"start": 1138,
"end": 2272
} | class ____ extends PathsBaseTest {
@Test
void should_fail_if_actual_is_null() {
// WHEN
var error = expectAssertionError(() -> underTest.assertIsWritable(INFO, null));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_exist() {
// GIVEN
Path actual = tempDir.resolve("non-existent");
// WHEN
var error = expectAssertionError(() -> underTest.assertIsWritable(INFO, actual));
// THEN
then(error).hasMessage(shouldExist(actual).create());
}
@Test
void should_fail_if_actual_is_not_writable() throws IOException {
// GIVEN
Path actual = createFile(tempDir.resolve("actual"));
actual.toFile().setWritable(false);
// WHEN
var error = expectAssertionError(() -> underTest.assertIsWritable(INFO, actual));
// THEN
then(error).hasMessage(shouldBeWritable(actual).create());
}
@Test
void should_pass_if_actual_is_writable() throws IOException {
// GIVEN
Path actual = createFile(tempDir.resolve("actual"));
// WHEN/THEN
underTest.assertIsWritable(INFO, actual);
}
}
| Paths_assertIsWritable_Test |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanPropertiesFunctionTest.java | {
"start": 1016,
"end": 1890
} | class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry registry = super.createCamelRegistry();
registry.bind("fooBean", new BeanPropertiesFunctionTest.FooBean());
registry.bind("barBean", new BeanPropertiesFunctionTest.BarBean());
return registry;
}
@Test
public void testParseEndpoint() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:{{bean:fooBean.foo}}").to("mock:{{bean:barBean.bar}}");
}
});
context.start();
getMockEndpoint("mock:bar").expectedMessageCount(1);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
public static | BeanPropertiesFunctionTest |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/spr/ControllerAdviceIntegrationTests.java | {
"start": 4641,
"end": 5160
} | class ____ {
@Bean
TestController testController() {
return new TestController();
}
@Bean
SingletonControllerAdvice singletonControllerAdvice() {
return new SingletonControllerAdvice();
}
@Bean
@Scope("prototype")
PrototypeControllerAdvice prototypeControllerAdvice() {
return new PrototypeControllerAdvice();
}
@Bean
@RequestScope
RequestScopedControllerAdvice requestScopedControllerAdvice() {
return new RequestScopedControllerAdvice();
}
}
@ControllerAdvice
static | Config |
java | elastic__elasticsearch | x-pack/plugin/eql/qa/common/src/main/java/org/elasticsearch/test/eql/EqlSpecTestCase.java | {
"start": 470,
"end": 2236
} | class ____ extends BaseEqlSpecTestCase {
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readTestSpecs() throws Exception {
// Load EQL validation specs
return asArray(EqlSpecLoader.load("/test_queries.toml", "/additional_test_queries.toml", "/test_queries_date.toml"));
}
@Override
protected String tiebreaker() {
return "serial_event_id";
}
// constructor for "local" rest tests
public EqlSpecTestCase(
String query,
String name,
List<long[]> eventIds,
String[] joinKeys,
Integer size,
Integer maxSamplesPerKey,
Boolean allowPartialSearch,
Boolean allowPartialSequenceResults,
Boolean expectShardFailures
) {
this(
TEST_INDEX,
query,
name,
eventIds,
joinKeys,
size,
maxSamplesPerKey,
allowPartialSearch,
allowPartialSequenceResults,
expectShardFailures
);
}
// constructor for multi-cluster tests
public EqlSpecTestCase(
String index,
String query,
String name,
List<long[]> eventIds,
String[] joinKeys,
Integer size,
Integer maxSamplesPerKey,
Boolean allowPartialSearch,
Boolean allowPartialSequenceResults,
Boolean expectShardFailures
) {
super(
index,
query,
name,
eventIds,
joinKeys,
size,
maxSamplesPerKey,
allowPartialSearch,
allowPartialSequenceResults,
expectShardFailures
);
}
}
| EqlSpecTestCase |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestHDFSConcat.java | {
"start": 2402,
"end": 23001
} | class ____ {
public static final Logger LOG =
LoggerFactory.getLogger(TestHDFSConcat.class);
private static final short REPL_FACTOR = 2;
private MiniDFSCluster cluster;
private NamenodeProtocols nn;
private DistributedFileSystem dfs;
private static final long blockSize = 512;
private static final Configuration conf;
static {
conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
}
@BeforeEach
public void startUpCluster() throws IOException {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(REPL_FACTOR).build();
assertNotNull(cluster, "Failed Cluster Creation");
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull(dfs, "Failed to get FileSystem");
nn = cluster.getNameNodeRpc();
assertNotNull(nn, "Failed to get NameNode");
}
@AfterEach
public void shutDownCluster() throws IOException {
if(dfs != null) {
dfs.close();
dfs = null;
}
if(cluster != null) {
cluster.shutdownDataNodes();
cluster.shutdown();
cluster = null;
}
}
/**
* Concatenates 10 files into one
* Verifies the final size, deletion of the file, number of blocks
* @throws IOException
*/
@Test
public void testConcat() throws IOException, InterruptedException {
final int numFiles = 10;
long fileLen = blockSize*3;
HdfsFileStatus fStatus;
FSDataInputStream stm;
String trg = "/trg";
Path trgPath = new Path(trg);
DFSTestUtil.createFile(dfs, trgPath, fileLen, REPL_FACTOR, 1);
fStatus = nn.getFileInfo(trg);
long trgLen = fStatus.getLen();
long trgBlocks = nn.getBlockLocations(trg, 0, trgLen).locatedBlockCount();
Path [] files = new Path[numFiles];
byte[][] bytes = new byte[numFiles + 1][(int) fileLen];
LocatedBlocks [] lblocks = new LocatedBlocks[numFiles];
long [] lens = new long [numFiles];
stm = dfs.open(trgPath);
stm.readFully(0, bytes[0]);
stm.close();
int i;
for(i=0; i<files.length; i++) {
files[i] = new Path("/file"+i);
Path path = files[i];
System.out.println("Creating file " + path);
// make files with different content
DFSTestUtil.createFile(dfs, path, fileLen, REPL_FACTOR, i);
fStatus = nn.getFileInfo(path.toUri().getPath());
lens[i] = fStatus.getLen();
assertEquals(trgLen, lens[i]); // file of the same length.
lblocks[i] = nn.getBlockLocations(path.toUri().getPath(), 0, lens[i]);
//read the file
stm = dfs.open(path);
stm.readFully(0, bytes[i + 1]);
//bytes[i][10] = 10;
stm.close();
}
// check permissions -try the operation with the "wrong" user
final UserGroupInformation user1 = UserGroupInformation.createUserForTesting(
"theDoctor", new String[] { "tardis" });
DistributedFileSystem hdfs =
(DistributedFileSystem)DFSTestUtil.getFileSystemAs(user1, conf);
try {
hdfs.concat(trgPath, files);
fail("Permission exception expected");
} catch (IOException ie) {
System.out.println("Got expected exception for permissions:"
+ ie.getLocalizedMessage());
// expected
}
// check count update
ContentSummary cBefore = dfs.getContentSummary(trgPath.getParent());
// resort file array, make INode id not sorted.
for (int j = 0; j < files.length / 2; j++) {
Path tempPath = files[j];
files[j] = files[files.length - 1 - j];
files[files.length - 1 - j] = tempPath;
byte[] tempBytes = bytes[1 + j];
bytes[1 + j] = bytes[files.length - 1 - j + 1];
bytes[files.length - 1 - j + 1] = tempBytes;
}
// now concatenate
dfs.concat(trgPath, files);
// verify count
ContentSummary cAfter = dfs.getContentSummary(trgPath.getParent());
assertEquals(cBefore.getFileCount(), cAfter.getFileCount()+files.length);
// verify other stuff
long totalLen = trgLen;
long totalBlocks = trgBlocks;
for(i=0; i<files.length; i++) {
totalLen += lens[i];
totalBlocks += lblocks[i].locatedBlockCount();
}
System.out.println("total len=" + totalLen + "; totalBlocks=" + totalBlocks);
fStatus = nn.getFileInfo(trg);
trgLen = fStatus.getLen(); // new length
// read the resulting file
stm = dfs.open(trgPath);
byte[] byteFileConcat = new byte[(int)trgLen];
stm.readFully(0, byteFileConcat);
stm.close();
trgBlocks = nn.getBlockLocations(trg, 0, trgLen).locatedBlockCount();
//verifications
// 1. number of blocks
assertEquals(trgBlocks, totalBlocks);
// 2. file lengths
assertEquals(trgLen, totalLen);
// 3. removal of the src file
for(Path p: files) {
fStatus = nn.getFileInfo(p.toUri().getPath());
assertNull(fStatus, "File " + p + " still exists"); // file shouldn't exist
// try to create fie with the same name
DFSTestUtil.createFile(dfs, p, fileLen, REPL_FACTOR, 1);
}
// 4. content
checkFileContent(byteFileConcat, bytes);
// add a small file (less then a block)
Path smallFile = new Path("/sfile");
int sFileLen = 10;
DFSTestUtil.createFile(dfs, smallFile, sFileLen, REPL_FACTOR, 1);
dfs.concat(trgPath, new Path [] {smallFile});
fStatus = nn.getFileInfo(trg);
trgLen = fStatus.getLen(); // new length
// check number of blocks
trgBlocks = nn.getBlockLocations(trg, 0, trgLen).locatedBlockCount();
assertEquals(trgBlocks, totalBlocks+1);
// and length
assertEquals(trgLen, totalLen+sFileLen);
}
/**
* Test that the concat operation is properly persisted in the
* edit log, and properly replayed on restart.
*/
@Test
public void testConcatInEditLog() throws Exception {
final Path TEST_DIR = new Path("/testConcatInEditLog");
final long FILE_LEN = blockSize;
// 1. Concat some files
Path[] srcFiles = new Path[3];
for (int i = 0; i < srcFiles.length; i++) {
Path path = new Path(TEST_DIR, "src-" + i);
DFSTestUtil.createFile(dfs, path, FILE_LEN, REPL_FACTOR, 1);
srcFiles[i] = path;
}
Path targetFile = new Path(TEST_DIR, "target");
DFSTestUtil.createFile(dfs, targetFile, FILE_LEN, REPL_FACTOR, 1);
dfs.concat(targetFile, srcFiles);
// 2. Verify the concat operation basically worked, and record
// file status.
assertTrue(dfs.exists(targetFile));
FileStatus origStatus = dfs.getFileStatus(targetFile);
// 3. Restart NN to force replay from edit log
cluster.restartNameNode(true);
// 4. Verify concat operation was replayed correctly and file status
// did not change.
assertTrue(dfs.exists(targetFile));
assertFalse(dfs.exists(srcFiles[0]));
FileStatus statusAfterRestart = dfs.getFileStatus(targetFile);
assertEquals(origStatus.getModificationTime(),
statusAfterRestart.getModificationTime());
}
// compare content
private void checkFileContent(byte[] concat, byte[][] bytes ) {
int idx=0;
boolean mismatch = false;
for(byte [] bb: bytes) {
for(byte b: bb) {
if(b != concat[idx++]) {
mismatch=true;
break;
}
}
if(mismatch)
break;
}
assertFalse(mismatch, "File content of concatenated file is different");
}
// test case when final block is not of a full length
@Test
public void testConcatNotCompleteBlock() throws IOException {
long trgFileLen = blockSize*3;
long srcFileLen = blockSize*3+20; // block at the end - not full
// create first file
String name1="/trg", name2="/src";
Path filePath1 = new Path(name1);
DFSTestUtil.createFile(dfs, filePath1, trgFileLen, REPL_FACTOR, 1);
HdfsFileStatus fStatus = nn.getFileInfo(name1);
long fileLen = fStatus.getLen();
assertEquals(fileLen, trgFileLen);
//read the file
FSDataInputStream stm = dfs.open(filePath1);
byte[] byteFile1 = new byte[(int)trgFileLen];
stm.readFully(0, byteFile1);
stm.close();
LocatedBlocks lb1 = nn.getBlockLocations(name1, 0, trgFileLen);
Path filePath2 = new Path(name2);
DFSTestUtil.createFile(dfs, filePath2, srcFileLen, REPL_FACTOR, 1);
fStatus = nn.getFileInfo(name2);
fileLen = fStatus.getLen();
assertEquals(srcFileLen, fileLen);
// read the file
stm = dfs.open(filePath2);
byte[] byteFile2 = new byte[(int)srcFileLen];
stm.readFully(0, byteFile2);
stm.close();
LocatedBlocks lb2 = nn.getBlockLocations(name2, 0, srcFileLen);
System.out.println("trg len="+trgFileLen+"; src len="+srcFileLen);
// move the blocks
dfs.concat(filePath1, new Path [] {filePath2});
long totalLen = trgFileLen + srcFileLen;
fStatus = nn.getFileInfo(name1);
fileLen = fStatus.getLen();
// read the resulting file
stm = dfs.open(filePath1);
byte[] byteFileConcat = new byte[(int)fileLen];
stm.readFully(0, byteFileConcat);
stm.close();
LocatedBlocks lbConcat = nn.getBlockLocations(name1, 0, fileLen);
//verifications
// 1. number of blocks
assertEquals(lbConcat.locatedBlockCount(),
lb1.locatedBlockCount() + lb2.locatedBlockCount());
// 2. file lengths
System.out.println("file1 len="+fileLen+"; total len="+totalLen);
assertEquals(fileLen, totalLen);
// 3. removal of the src file
fStatus = nn.getFileInfo(name2);
assertNull(fStatus, "File " + name2 + "still exists"); // file shouldn't exist
// 4. content
checkFileContent(byteFileConcat, new byte [] [] {byteFile1, byteFile2});
}
/**
* test illegal args cases
*/
@Test
public void testIllegalArg() throws IOException {
long fileLen = blockSize*3;
Path parentDir = new Path ("/parentTrg");
assertTrue(dfs.mkdirs(parentDir));
Path trg = new Path(parentDir, "trg");
DFSTestUtil.createFile(dfs, trg, fileLen, REPL_FACTOR, 1);
// must be in the same dir
{
// create first file
Path dir1 = new Path ("/dir1");
assertTrue(dfs.mkdirs(dir1));
Path src = new Path(dir1, "src");
DFSTestUtil.createFile(dfs, src, fileLen, REPL_FACTOR, 1);
try {
dfs.concat(trg, new Path [] {src});
fail("didn't fail for src and trg in different directories");
} catch (Exception e) {
// expected
}
}
// non existing file
try {
dfs.concat(trg, new Path [] {new Path("test1/a")}); // non existing file
fail("didn't fail with invalid arguments");
} catch (Exception e) {
//expected
}
// empty arg list
try {
dfs.concat(trg, new Path [] {}); // empty array
fail("didn't fail with invalid arguments");
} catch (Exception e) {
// exspected
}
// the source file's preferred block size cannot be greater than the target
{
final Path src1 = new Path(parentDir, "src1");
DFSTestUtil.createFile(dfs, src1, fileLen, REPL_FACTOR, 0L);
final Path src2 = new Path(parentDir, "src2");
// create a file whose preferred block size is greater than the target
DFSTestUtil.createFile(dfs, src2, 1024, fileLen,
dfs.getDefaultBlockSize(trg) * 2, REPL_FACTOR, 0L);
try {
dfs.concat(trg, new Path[] {src1, src2});
fail("didn't fail for src with greater preferred block size");
} catch (Exception e) {
GenericTestUtils.assertExceptionContains("preferred block size", e);
}
}
}
/**
* make sure we update the quota correctly after concat
*/
@Test
public void testConcatWithQuotaDecrease() throws IOException {
final short srcRepl = 3; // note this is different with REPL_FACTOR
final int srcNum = 10;
final Path foo = new Path("/foo");
final Path[] srcs = new Path[srcNum];
final Path target = new Path(foo, "target");
DFSTestUtil.createFile(dfs, target, blockSize, REPL_FACTOR, 0L);
dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
for (int i = 0; i < srcNum; i++) {
srcs[i] = new Path(foo, "src" + i);
DFSTestUtil.createFile(dfs, srcs[i], blockSize * 2, srcRepl, 0L);
}
ContentSummary summary = dfs.getContentSummary(foo);
assertEquals(11, summary.getFileCount());
assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * srcRepl * srcNum,
summary.getSpaceConsumed());
dfs.concat(target, srcs);
summary = dfs.getContentSummary(foo);
assertEquals(1, summary.getFileCount());
assertEquals(blockSize * REPL_FACTOR + blockSize * 2 * REPL_FACTOR * srcNum,
summary.getSpaceConsumed());
}
@Test
public void testConcatWithQuotaIncrease() throws IOException {
final short repl = 3;
final int srcNum = 10;
final Path foo = new Path("/foo");
final Path bar = new Path(foo, "bar");
final Path[] srcs = new Path[srcNum];
final Path target = new Path(bar, "target");
DFSTestUtil.createFile(dfs, target, blockSize, repl, 0L);
final long dsQuota = blockSize * repl + blockSize * srcNum * REPL_FACTOR;
dfs.setQuota(foo, Long.MAX_VALUE - 1, dsQuota);
for (int i = 0; i < srcNum; i++) {
srcs[i] = new Path(bar, "src" + i);
DFSTestUtil.createFile(dfs, srcs[i], blockSize, REPL_FACTOR, 0L);
}
ContentSummary summary = dfs.getContentSummary(bar);
assertEquals(11, summary.getFileCount());
assertEquals(dsQuota, summary.getSpaceConsumed());
try {
dfs.concat(target, srcs);
fail("QuotaExceededException expected");
} catch (RemoteException e) {
assertTrue(
e.unwrapRemoteException() instanceof QuotaExceededException);
}
dfs.setQuota(foo, Long.MAX_VALUE - 1, Long.MAX_VALUE - 1);
dfs.concat(target, srcs);
summary = dfs.getContentSummary(bar);
assertEquals(1, summary.getFileCount());
assertEquals(blockSize * repl * (srcNum + 1), summary.getSpaceConsumed());
}
@Test
public void testConcatRelativeTargetPath() throws IOException {
Path dir = new Path("/dir");
Path trg = new Path("trg");
Path src = new Path(dir, "src");
dfs.setWorkingDirectory(dir);
DFSTestUtil.createFile(dfs, trg, blockSize, REPL_FACTOR, 1);
DFSTestUtil.createFile(dfs, src, blockSize, REPL_FACTOR, 1);
dfs.concat(trg, new Path[]{src});
assertEquals(blockSize * 2, dfs.getFileStatus(trg).getLen());
assertFalse(dfs.exists(src));
}
@Test
@Timeout(value = 30)
public void testConcatReservedRelativePaths() throws IOException {
String testPathDir = "/.reserved/raw/ezone";
Path dir = new Path(testPathDir);
dfs.mkdirs(dir);
Path trg = new Path(testPathDir, "trg");
Path src = new Path(testPathDir, "src");
DFSTestUtil.createFile(dfs, trg, blockSize, REPL_FACTOR, 1);
DFSTestUtil.createFile(dfs, src, blockSize, REPL_FACTOR, 1);
try {
dfs.concat(trg, new Path[] { src });
fail("Must throw Exception!");
} catch (IOException e) {
String errMsg = "Concat operation doesn't support "
+ FSDirectory.DOT_RESERVED_STRING + " relative path : " + trg;
GenericTestUtils.assertExceptionContains(errMsg, e);
}
}
/**
* Test concat on same source and target file which is a inode reference.
*/
@Test
public void testConcatOnSameFile() throws Exception {
String dir = "/dir1";
Path trgDir = new Path(dir);
dfs.mkdirs(new Path(dir));
// create a source file
String dir2 = "/dir2";
Path srcDir = new Path(dir2);
dfs.mkdirs(srcDir);
dfs.allowSnapshot(srcDir);
Path src = new Path(srcDir, "file1");
DFSTestUtil.createFile(dfs, src, 512, (short) 2, 0);
// make the file as an Inode reference and delete the reference
dfs.createSnapshot(srcDir, "s1");
dfs.rename(src, trgDir);
dfs.deleteSnapshot(srcDir, "s1");
Path[] srcs = new Path[1];
srcs[0] = new Path(dir, "file1");
// perform concat
LambdaTestUtils.intercept(RemoteException.class,
"concat: the src file /dir1/file1 is the same with the target"
+ " file /dir1/file1",
() -> dfs.concat(srcs[0], srcs));
// the file should exists and read the file
byte[] buff = new byte[1080];
FSDataInputStream stream = dfs.open(srcs[0]);
stream.readFully(0, buff, 0, 512);
assertEquals(1, dfs.getContentSummary(new Path(dir)).getFileCount());
}
/**
* Verifies concat with wrong user when dfs.permissions.enabled is false.
*
* @throws IOException
*/
@Test
public void testConcatPermissionEnabled() throws Exception {
Configuration conf2 = new Configuration();
conf2.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf2.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
MiniDFSCluster cluster2 = new MiniDFSCluster.Builder(conf2).numDataNodes(REPL_FACTOR).build();
try {
cluster2.waitClusterUp();
DistributedFileSystem dfs2 = cluster2.getFileSystem();
String testPathDir = "/dir2";
Path dir = new Path(testPathDir);
dfs2.mkdirs(dir);
Path trg = new Path(testPathDir, "trg");
Path src = new Path(testPathDir, "src");
DFSTestUtil.createFile(dfs2, trg, blockSize, REPL_FACTOR, 1);
DFSTestUtil.createFile(dfs2, src, blockSize, REPL_FACTOR, 1);
// Check permissions with the wrong user when dfs.permissions.enabled is true.
final UserGroupInformation user =
UserGroupInformation.createUserForTesting("theDoctor", new String[] {"tardis"});
DistributedFileSystem hdfs1 =
(DistributedFileSystem) DFSTestUtil.getFileSystemAs(user, conf2);
LambdaTestUtils.intercept(AccessControlException.class,
"Permission denied: user=theDoctor, access=WRITE",
() -> hdfs1.concat(trg, new Path[] {src}));
conf2.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, false);
cluster2 = new MiniDFSCluster.Builder(conf2).numDataNodes(REPL_FACTOR).build();
cluster2.waitClusterUp();
dfs2 = cluster2.getFileSystem();
dfs2.mkdirs(dir);
DFSTestUtil.createFile(dfs2, trg, blockSize, REPL_FACTOR, 1);
DFSTestUtil.createFile(dfs2, src, blockSize, REPL_FACTOR, 1);
// Check permissions with the wrong user when dfs.permissions.enabled is false.
DistributedFileSystem hdfs2 =
(DistributedFileSystem) DFSTestUtil.getFileSystemAs(user, conf2);
hdfs2.concat(trg, new Path[] {src});
} finally {
if (cluster2 != null) {
cluster2.shutdown();
}
}
}
/**
* Test permissions of Concat operation.
*/
@Test
public void testConcatPermissions() throws Exception {
String testPathDir = "/dir";
Path dir = new Path(testPathDir);
dfs.mkdirs(dir);
dfs.setPermission(dir, new FsPermission((short) 0777));
Path dst = new Path(testPathDir, "dst");
Path src = new Path(testPathDir, "src");
DFSTestUtil.createFile(dfs, dst, blockSize, REPL_FACTOR, 1);
// Create a user who is not the owner of the file and try concat operation.
final UserGroupInformation user =
UserGroupInformation.createUserForTesting("theDoctor", new String[] {"group"});
DistributedFileSystem dfs2 = (DistributedFileSystem) DFSTestUtil.getFileSystemAs(user, conf);
// Test 1: User is not the owner of the file and has src & dst permission.
DFSTestUtil.createFile(dfs, src, blockSize, REPL_FACTOR, 1);
dfs.setPermission(dst, new FsPermission((short) 0777));
dfs.setPermission(src, new FsPermission((short) 0777));
dfs2.concat(dst, new Path[] {src});
// Test 2: User is not the owner of the file and has only dst permission.
DFSTestUtil.createFile(dfs, src, blockSize, REPL_FACTOR, 1);
dfs.setPermission(dst, new FsPermission((short) 0777));
dfs.setPermission(src, new FsPermission((short) 0700));
LambdaTestUtils.intercept(AccessControlException.class,
"Permission denied: user=theDoctor, access=READ",
() -> dfs2.concat(dst, new Path[] {src}));
// Test 3: User is not the owner of the file and has only src permission.
DFSTestUtil.createFile(dfs, src, blockSize, REPL_FACTOR, 1);
dfs.setPermission(dst, new FsPermission((short) 0700));
dfs.setPermission(src, new FsPermission((short) 0777));
LambdaTestUtils.intercept(AccessControlException.class,
"Permission denied: user=theDoctor, access=WRITE",
() -> dfs2.concat(dst, new Path[] {src}));
}
}
| TestHDFSConcat |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/java/IncomparableComparator.java | {
"start": 288,
"end": 499
} | class ____ implements Comparator {
public static final IncomparableComparator INSTANCE = new IncomparableComparator();
@Override
public int compare(Object o1, Object o2) {
return 0;
}
}
| IncomparableComparator |
java | quarkusio__quarkus | extensions/security/deployment/src/test/java/io/quarkus/security/test/permissionsallowed/UnusedParamPermissionsAllowedValidationFailureTest.java | {
"start": 1078,
"end": 1417
} | class ____ {
@PermissionsAllowed(value = "ignored", params = { "aOrganizationUnitId",
"nestedParam1.something" }, permission = OrganizationUnitIdPermission.class)
public void securedBean(UUID aOrganizationUnitId, NestedParam1 nestedParam1) {
// EMPTY
}
}
public static | SecuredBean |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java | {
"start": 5173,
"end": 5942
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory lhs;
private final EvalOperator.ExpressionEvaluator.Factory rhs;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs,
EvalOperator.ExpressionEvaluator.Factory rhs) {
this.source = source;
this.lhs = lhs;
this.rhs = rhs;
}
@Override
public MulUnsignedLongsEvaluator get(DriverContext context) {
return new MulUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context);
}
@Override
public String toString() {
return "MulUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]";
}
}
}
| Factory |
java | qos-ch__slf4j | slf4j-jdk-platform-logging/src/main/java/org/slf4j/jdk/platform/logging/SLF4JSystemLoggerFinder.java | {
"start": 1402,
"end": 2167
} | class ____ extends System.LoggerFinder {
final SLF4JPlatformLoggerFactory platformLoggerFactory = new SLF4JPlatformLoggerFactory();
@Override
public System.Logger getLogger(String name, Module module) {
// JEP 264[1], which introduced the Platform Logging API,
// contains the following note:
//
// > An implementation of the LoggerFinder service should make it
// > possible to distinguish system loggers (used by system classes
// > from the Bootstrap Class Loader (BCL)) and application loggers
// > (created by an application for its own usage). This distinction
// > is important for platform security. The creator of a logger can
// > pass the | SLF4JSystemLoggerFinder |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4TearDownNotRunTest.java | {
"start": 5660,
"end": 5810
} | class ____ {
@After
public void tearDown() {}
}
@RunWith(JUnit4.class)
| J4TearDownHasAfter |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableTimeoutTimed.java | {
"start": 2188,
"end": 4870
} | class ____<T> extends AtomicLong
implements FlowableSubscriber<T>, Subscription, TimeoutSupport {
private static final long serialVersionUID = 3764492702657003550L;
final Subscriber<? super T> downstream;
final long timeout;
final TimeUnit unit;
final Scheduler.Worker worker;
final SequentialDisposable task;
final AtomicReference<Subscription> upstream;
final AtomicLong requested;
TimeoutSubscriber(Subscriber<? super T> actual, long timeout, TimeUnit unit, Scheduler.Worker worker) {
this.downstream = actual;
this.timeout = timeout;
this.unit = unit;
this.worker = worker;
this.task = new SequentialDisposable();
this.upstream = new AtomicReference<>();
this.requested = new AtomicLong();
}
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.deferredSetOnce(upstream, requested, s);
}
@Override
public void onNext(T t) {
long idx = get();
if (idx == Long.MAX_VALUE || !compareAndSet(idx, idx + 1)) {
return;
}
task.get().dispose();
downstream.onNext(t);
startTimeout(idx + 1);
}
void startTimeout(long nextIndex) {
task.replace(worker.schedule(new TimeoutTask(nextIndex, this), timeout, unit));
}
@Override
public void onError(Throwable t) {
if (getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) {
task.dispose();
downstream.onError(t);
worker.dispose();
} else {
RxJavaPlugins.onError(t);
}
}
@Override
public void onComplete() {
if (getAndSet(Long.MAX_VALUE) != Long.MAX_VALUE) {
task.dispose();
downstream.onComplete();
worker.dispose();
}
}
@Override
public void onTimeout(long idx) {
if (compareAndSet(idx, Long.MAX_VALUE)) {
SubscriptionHelper.cancel(upstream);
downstream.onError(new TimeoutException(timeoutMessage(timeout, unit)));
worker.dispose();
}
}
@Override
public void request(long n) {
SubscriptionHelper.deferredRequest(upstream, requested, n);
}
@Override
public void cancel() {
SubscriptionHelper.cancel(upstream);
worker.dispose();
}
}
static final | TimeoutSubscriber |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/util/ClassUtilsTests.java | {
"start": 44746,
"end": 44886
} | interface ____ {
default void defaultPrint() {
}
void print(String messages);
}
@SuppressWarnings("unused")
private | MethodsInterface |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/collections/classification/list/EntityWithIndexBasedList.java | {
"start": 619,
"end": 1335
} | class ____ {
// ...
//end::collections-list-indexbase-ex[]
@Id
private Integer id;
@Basic
private String name;
//tag::collections-list-indexbase-ex[]
@ElementCollection
@OrderColumn(name = "name_index")
@ListIndexBase(1)
private List<Name> names;
//end::collections-list-indexbase-ex[]
private EntityWithIndexBasedList() {
// for Hibernate use
}
public EntityWithIndexBasedList(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
//tag::collections-list-indexbase-ex[]
}
//end::collections-list-indexbase-ex[]
| EntityWithIndexBasedList |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OptaPlannerEndpointBuilderFactory.java | {
"start": 24647,
"end": 26945
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final OptaPlannerHeaderNameBuilder INSTANCE = new OptaPlannerHeaderNameBuilder();
/**
* Specifies the solverId to use.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code OptaPlannerSolverId}.
*/
public String optaPlannerSolverId() {
return "CamelOptaPlannerSolverId";
}
/**
* Specify whether to use another thread for submitting Solution
* instances rather than blocking the current thread.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code OptaPlannerIsAsync}.
*/
public String optaPlannerIsAsync() {
return "CamelOptaPlannerIsAsync";
}
/**
* The best planning solution.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code OptaPlannerBestSolution}.
*/
public String optaPlannerBestSolution() {
return "CamelOptaPlannerBestSolution";
}
/**
* Is solving.
*
* The option is a: {@code Boolean} type.
*
* Group: producer
*
* @return the name of the header {@code OptaPlannerIsSolving}.
*/
public String optaPlannerIsSolving() {
return "CamelOptaPlannerIsSolving";
}
/**
* The Solver Manager.
*
* The option is a: {@code
* org.optaplanner.core.api.solver.SolverManager} type.
*
* Group: producer
*
* @return the name of the header {@code OptaPlannerSolverManager}.
*/
public String optaPlannerSolverManager() {
return "CamelOptaPlannerSolverManager";
}
}
static OptaPlannerEndpointBuilder endpointBuilder(String componentName, String path) {
| OptaPlannerHeaderNameBuilder |
java | grpc__grpc-java | services/src/main/java/io/grpc/protobuf/services/ProtoReflectionService.java | {
"start": 1502,
"end": 3971
} | class ____ implements BindableService {
private ProtoReflectionService() {
}
@Deprecated
public static BindableService newInstance() {
return new ProtoReflectionService();
}
@Override
@SuppressWarnings("deprecation")
public ServerServiceDefinition bindService() {
ServerServiceDefinition serverServiceDefinitionV1 = ProtoReflectionServiceV1.newInstance()
.bindService();
MethodDescriptor<ServerReflectionRequest, ServerReflectionResponse> methodDescriptorV1 =
ServerReflectionGrpc.getServerReflectionInfoMethod();
// Retain the v1 proto marshallers but change the method name and schema descriptor to v1alpha.
MethodDescriptor<io.grpc.reflection.v1alpha.ServerReflectionRequest,
io.grpc.reflection.v1alpha.ServerReflectionResponse> methodDescriptorV1AlphaGenerated =
io.grpc.reflection.v1alpha.ServerReflectionGrpc.getServerReflectionInfoMethod();
MethodDescriptor<ServerReflectionRequest, ServerReflectionResponse> methodDescriptorV1Alpha =
methodDescriptorV1.toBuilder()
.setFullMethodName(methodDescriptorV1AlphaGenerated.getFullMethodName())
.setSchemaDescriptor(methodDescriptorV1AlphaGenerated.getSchemaDescriptor())
.build();
// Retain the v1 server call handler but change the service name schema descriptor in the
// service descriptor to v1alpha.
ServiceDescriptor serviceDescriptorV1AlphaGenerated =
io.grpc.reflection.v1alpha.ServerReflectionGrpc.getServiceDescriptor();
ServiceDescriptor serviceDescriptorV1Alpha =
ServiceDescriptor.newBuilder(serviceDescriptorV1AlphaGenerated.getName())
.setSchemaDescriptor(serviceDescriptorV1AlphaGenerated.getSchemaDescriptor())
.addMethod(methodDescriptorV1Alpha)
.build();
return ServerServiceDefinition.builder(serviceDescriptorV1Alpha)
.addMethod(methodDescriptorV1Alpha, createServerCallHandler(serverServiceDefinitionV1))
.build();
}
@SuppressWarnings("unchecked")
private ServerCallHandler<ServerReflectionRequest, ServerReflectionResponse>
createServerCallHandler(
ServerServiceDefinition serverServiceDefinition) {
return (ServerCallHandler<ServerReflectionRequest, ServerReflectionResponse>)
serverServiceDefinition.getMethod(
ServerReflectionGrpc.getServerReflectionInfoMethod().getFullMethodName())
.getServerCallHandler();
}
}
| ProtoReflectionService |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/overloading/InconsistentOverloads.java | {
"start": 3456,
"end": 6495
} | class ____ but we report policy violations for
* each group after it is processed.
*/
return Description.NO_MATCH;
}
private void processClassMethods(List<MethodTree> classMethodTrees, VisitorState state) {
for (List<MethodTree> groupMethods : getMethodGroups(classMethodTrees)) {
processGroupMethods(groupMethods, state);
}
}
private void processGroupMethods(List<MethodTree> groupMethodTrees, VisitorState state) {
Preconditions.checkArgument(!groupMethodTrees.isEmpty());
for (ParameterOrderingViolation violation : getViolations(groupMethodTrees)) {
MethodSymbol methodSymbol = getSymbol(violation.methodTree());
if (ASTHelpers.findSuperMethods(methodSymbol, state.getTypes()).isEmpty()) {
Description.Builder description = buildDescription(violation.methodTree());
description.setMessage(violation.getDescription());
state.reportMatch(description.build());
}
}
}
private static ImmutableList<ParameterOrderingViolation> getViolations(
List<MethodTree> groupMethodTrees) {
ImmutableList.Builder<ParameterOrderingViolation> result = ImmutableList.builder();
ParameterTrie trie = new ParameterTrie();
for (MethodTree methodTree : sortedByArity(groupMethodTrees)) {
Optional<ParameterOrderingViolation> violation = trie.extendAndComputeViolation(methodTree);
violation.ifPresent(result::add);
}
return result.build();
}
private static ImmutableList<MethodTree> sortedByArity(Iterable<MethodTree> methodTrees) {
return sortedCopyOf(comparingArity().thenComparing(comparingPositions()), methodTrees);
}
private static Comparator<MethodTree> comparingPositions() {
return comparingInt(ASTHelpers::getStartPosition);
}
private static Comparator<MethodTree> comparingArity() {
return comparingInt(ParameterTrie::getMethodTreeArity);
}
/**
* Returns a collection of method groups for given list of {@code classMethods}.
*
* <p>A <i>method group</i> is a list of methods with the same name.
*
* <p>It is assumed that given {@code classMethods} really do belong to the same class. The
* returned collection does not guarantee any particular group ordering.
*/
private static Collection<List<MethodTree>> getMethodGroups(List<MethodTree> classMethods) {
return classMethods.stream().collect(groupingBy(MethodTree::getName)).values();
}
/**
* Returns a list of {@link MethodTree} declared in the given {@code classTree}.
*
* <p>Only method trees that belong to the {@code classTree} are returned, so methods declared in
* nested classes are not going to be considered.
*/
private ImmutableList<MethodTree> getClassTreeMethods(ClassTree classTree, VisitorState state) {
List<? extends Tree> members = classTree.getMembers();
return members.stream()
.filter(MethodTree.class::isInstance)
.map(MethodTree.class::cast)
.filter(m -> !isSuppressed(m, state))
.collect(toImmutableList());
}
}
| itself |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java | {
"start": 9906,
"end": 116027
} | class ____ extends ESTestCase {
private RBACEngine engine;
private CompositeRolesStore rolesStore;
@Before
public void createEngine() {
final LoadAuthorizedIndicesTimeChecker.Factory timerFactory = mock(LoadAuthorizedIndicesTimeChecker.Factory.class);
when(timerFactory.newTimer(any())).thenReturn(LoadAuthorizedIndicesTimeChecker.NO_OP_CONSUMER);
rolesStore = mock(CompositeRolesStore.class);
engine = new RBACEngine(Settings.EMPTY, rolesStore, new FieldPermissionsCache(Settings.EMPTY), timerFactory);
}
public void testResolveAuthorizationInfoForEmptyRolesWithAuthentication() {
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
final var listener = (ActionListener<Tuple<Role, Role>>) invocation.getArgument(1);
listener.onResponse(new Tuple<>(Role.EMPTY, Role.EMPTY));
return null;
}).when(rolesStore).getRoles(any(), anyActionListener());
final PlainActionFuture<AuthorizationInfo> future = new PlainActionFuture<>();
engine.resolveAuthorizationInfo(
new RequestInfo(
AuthenticationTestHelper.builder().build(),
mock(TransportRequest.class),
randomAlphaOfLengthBetween(20, 30),
null
),
future
);
final AuthorizationInfo authorizationInfo = future.actionGet();
assertThat((String[]) authorizationInfo.asMap().get("user.roles"), emptyArray());
assertThat((String[]) authorizationInfo.getAuthenticatedUserAuthorizationInfo().asMap().get("user.roles"), emptyArray());
}
public void testResolveAuthorizationInfoForEmptyRestrictedRolesWithAuthentication() {
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
final var listener = (ActionListener<Tuple<Role, Role>>) invocation.getArgument(1);
final Supplier<Role> randomRoleSupplier = () -> Role.buildFromRoleDescriptor(
RoleDescriptorTestHelper.builder()
.allowReservedMetadata(randomBoolean())
.allowRemoteIndices(false)
.allowRestriction(randomBoolean())
.allowDescription(randomBoolean())
.allowRemoteClusters(false)
.build(),
new FieldPermissionsCache(Settings.EMPTY),
RESTRICTED_INDICES,
List.of()
);
switch (randomIntBetween(1, 3)) {
case 1 -> listener.onResponse(new Tuple<>(Role.EMPTY_RESTRICTED_BY_WORKFLOW, Role.EMPTY_RESTRICTED_BY_WORKFLOW));
case 2 -> listener.onResponse(new Tuple<>(randomRoleSupplier.get(), Role.EMPTY_RESTRICTED_BY_WORKFLOW));
case 3 -> listener.onResponse(new Tuple<>(Role.EMPTY_RESTRICTED_BY_WORKFLOW, randomRoleSupplier.get()));
default -> throw new IllegalStateException("unexpected test case!");
}
return null;
}).when(rolesStore).getRoles(any(), anyActionListener());
final PlainActionFuture<AuthorizationInfo> future = new PlainActionFuture<>();
engine.resolveAuthorizationInfo(
new RequestInfo(
AuthenticationTestHelper.builder().build(),
mock(TransportRequest.class),
randomAlphaOfLengthBetween(20, 30),
null
),
future
);
ElasticsearchRoleRestrictionException e = expectThrows(ElasticsearchRoleRestrictionException.class, future::actionGet);
assertThat(e.getMessage(), containsString("access restricted by workflow"));
}
public void testResolveAuthorizationInfoForEmptyRoleWithSubject() {
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
final var listener = (ActionListener<Role>) invocation.getArgument(1);
listener.onResponse(Role.EMPTY);
return null;
}).when(rolesStore).getRole(any(), anyActionListener());
final PlainActionFuture<AuthorizationInfo> future = new PlainActionFuture<>();
engine.resolveAuthorizationInfo(AuthenticationTestHelper.builder().build().getEffectiveSubject(), future);
final AuthorizationInfo authorizationInfo = future.actionGet();
assertThat((String[]) authorizationInfo.asMap().get("user.roles"), emptyArray());
assertThat((String[]) authorizationInfo.getAuthenticatedUserAuthorizationInfo().asMap().get("user.roles"), emptyArray());
}
public void testSameUserPermission() {
final User user = new User("joe");
final boolean changePasswordRequest = randomBoolean();
final TransportRequest request = changePasswordRequest
? new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request()
: new HasPrivilegesRequestBuilder(mock(Client.class)).username(user.principal()).request();
final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : HasPrivilegesAction.NAME;
final Authentication.RealmRef authenticatedBy = new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : randomAlphaOfLengthBetween(4, 12),
randomAlphaOfLengthBetween(3, 8)
);
final Authentication authentication = AuthenticationTestHelper.builder().realm().realmRef(authenticatedBy).user(user).build(false);
assertThat(request, instanceOf(UserRequest.class));
assertTrue(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionDoesNotAllowNonMatchingUsername() {
final User authUser = new User("admin", "bar");
final User user = new User("joe");
final boolean changePasswordRequest = randomBoolean();
final String username = randomFrom("", "joe" + randomAlphaOfLengthBetween(1, 5), randomAlphaOfLengthBetween(3, 10));
final TransportRequest request = changePasswordRequest
? new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request()
: new HasPrivilegesRequestBuilder(mock(Client.class)).username(username).request();
final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : HasPrivilegesAction.NAME;
final Authentication.RealmRef authenticatedBy = new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
randomAlphaOfLengthBetween(4, 12),
randomAlphaOfLengthBetween(3, 8)
);
final Authentication.RealmRef lookedUpBy = new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : randomAlphaOfLengthBetween(4, 12),
randomAlphaOfLengthBetween(3, 8)
);
final Authentication authentication = Authentication.newRealmAuthentication(authUser, authenticatedBy).runAs(user, lookedUpBy);
assertThat(request, instanceOf(UserRequest.class));
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
// this should still fail since the username is still different
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
if (request instanceof ChangePasswordRequest) {
((ChangePasswordRequest) request).username("joe");
} else {
((HasPrivilegesRequest) request).username("joe");
}
assertTrue(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionForAuthenticateRequest() {
assertTrue(
RBACEngine.checkSameUserPermissions(
AuthenticateAction.NAME,
AuthenticateRequest.INSTANCE,
AuthenticationTestHelper.builder().build()
)
);
}
public void testSameUserPermissionDoesNotAllowOtherActions() {
final TransportRequest request = mock(TransportRequest.class);
final String action = randomFrom(
PutUserAction.NAME,
DeleteUserAction.NAME,
TransportClusterHealthAction.NAME,
ClusterStateAction.NAME,
TransportClusterStatsAction.TYPE.name(),
GetLicenseAction.NAME
);
final Authentication authentication = AuthenticationTestHelper.builder().build();
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
verifyNoMoreInteractions(request);
}
public void testSameUserPermissionRunAsChecksAuthenticatedBy() {
final User authUser = new User("admin", "bar");
final String username = "joe";
final User user = new User(username);
final boolean changePasswordRequest = randomBoolean();
final TransportRequest request = changePasswordRequest
? new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request()
: new HasPrivilegesRequestBuilder(mock(Client.class)).username(username).request();
final String action = changePasswordRequest ? TransportChangePasswordAction.TYPE.name() : AuthenticateAction.NAME;
final Authentication.RealmRef authenticatedBy = AuthenticationTestHelper.randomRealmRef(false);
final Authentication.RealmRef lookedUpBy = new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : randomAlphaOfLengthBetween(4, 12),
randomAlphaOfLengthBetween(3, 8)
);
final Authentication authentication = Authentication.newRealmAuthentication(authUser, authenticatedBy).runAs(user, lookedUpBy);
assertTrue(RBACEngine.checkSameUserPermissions(action, request, authentication));
final Authentication authentication2 = Authentication.newRealmAuthentication(authUser, authenticatedBy);
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication2));
}
public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() {
final Authentication authentication = AuthenticationTestHelper.builder()
.realm()
.realmRef(
new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
randomFrom(
LdapRealmSettings.LDAP_TYPE,
FileRealmSettings.TYPE,
LdapRealmSettings.AD_TYPE,
PkiRealmSettings.TYPE,
randomAlphaOfLengthBetween(4, 12)
),
randomAlphaOfLengthBetween(3, 8)
)
)
.build(false);
final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(
authentication.getEffectiveSubject().getUser().principal()
).request();
final String action = TransportChangePasswordAction.TYPE.name();
assertThat(request, instanceOf(UserRequest.class));
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionDoesNotAllowChangePasswordForApiKey() {
final Authentication authentication = AuthenticationTestHelper.builder().apiKey().build(false);
final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(
authentication.getEffectiveSubject().getUser().principal()
).request();
final String action = TransportChangePasswordAction.TYPE.name();
assertThat(request, instanceOf(UserRequest.class));
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionDoesNotAllowChangePasswordForAccessToken() {
final Authentication authentication = AuthenticationTestHelper.builder().realm().build(false).token();
final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(
authentication.getEffectiveSubject().getUser().principal()
).request();
final String action = TransportChangePasswordAction.TYPE.name();
assertThat(request, instanceOf(UserRequest.class));
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRealms() {
final Authentication authentication = AuthenticationTestHelper.builder()
.realm()
.runAs()
.realmRef(
new Authentication.RealmRef(
randomAlphaOfLengthBetween(3, 8),
randomFrom(
LdapRealmSettings.LDAP_TYPE,
FileRealmSettings.TYPE,
LdapRealmSettings.AD_TYPE,
PkiRealmSettings.TYPE,
randomAlphaOfLengthBetween(4, 12)
),
randomAlphaOfLengthBetween(3, 8)
)
)
.build();
final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(
authentication.getEffectiveSubject().getUser().principal()
).request();
final String action = TransportChangePasswordAction.TYPE.name();
assertThat(request, instanceOf(UserRequest.class));
assertFalse(RBACEngine.checkSameUserPermissions(action, request, authentication));
}
public void testSameUserPermissionAllowsSelfApiKeyInfoRetrievalWhenAuthenticatedByApiKey() {
final User user = new User("joe");
final String apiKeyId = randomAlphaOfLengthBetween(4, 7);
final Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(user, apiKeyId);
final TransportRequest request = GetApiKeyRequest.builder().apiKeyId(apiKeyId).build();
assertTrue(RBACEngine.checkSameUserPermissions(GetApiKeyAction.NAME, request, authentication));
}
public void testSameUserPermissionDeniesSelfApiKeyInfoRetrievalWithLimitedByWhenAuthenticatedByApiKey() {
final User user = new User("joe");
final String apiKeyId = randomAlphaOfLengthBetween(4, 7);
final Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(user, apiKeyId);
final TransportRequest request = GetApiKeyRequest.builder().apiKeyId(apiKeyId).withLimitedBy(true).build();
assertFalse(RBACEngine.checkSameUserPermissions(GetApiKeyAction.NAME, request, authentication));
}
public void testSameUserPermissionDeniesApiKeyInfoRetrievalWhenAuthenticatedByADifferentApiKey() {
final User user = new User("joe");
final String apiKeyId = randomAlphaOfLengthBetween(4, 7);
final TransportRequest request = GetApiKeyRequest.builder().apiKeyId(apiKeyId).ownedByAuthenticatedUser(false).build();
final Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(user, randomAlphaOfLength(8));
assertFalse(RBACEngine.checkSameUserPermissions(GetApiKeyAction.NAME, request, authentication));
}
public void testSameUserPermissionDeniesApiKeyInfoRetrievalWhenLookedupByIsPresent() {
final User user = new User("joe");
final String apiKeyId = randomAlphaOfLengthBetween(4, 7);
final TransportRequest request = GetApiKeyRequest.builder().apiKeyId(apiKeyId).ownedByAuthenticatedUser(false).build();
final Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(new User("not-joe"), apiKeyId)
.runAs(user, new Authentication.RealmRef("name", "type", randomAlphaOfLengthBetween(3, 8)));
assertFalse(RBACEngine.checkSameUserPermissions(GetApiKeyAction.NAME, request, authentication));
}
public void testSameUserPermissionForCrossClusterAccess() {
final CrossClusterAccessSubjectInfo ccaSubjectInfo = AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo();
final Authentication authentication = AuthenticationTestHelper.builder().apiKey().build().toCrossClusterAccess(ccaSubjectInfo);
// HasPrivileges is allowed
final HasPrivilegesRequest hasPrivilegesRequest = new HasPrivilegesRequest();
hasPrivilegesRequest.username(ccaSubjectInfo.getAuthentication().getEffectiveSubject().getUser().principal());
assertTrue(RBACEngine.checkSameUserPermissions(HasPrivilegesAction.NAME, hasPrivilegesRequest, authentication));
// Other actions, e.g. GetUserPrivilegesAction, are not allowed even if they are allowed when performing within a single cluster
final GetUserPrivilegesRequest getUserPrivilegesRequest = new GetUserPrivilegesRequestBuilder(mock(ElasticsearchClient.class))
.username(ccaSubjectInfo.getAuthentication().getEffectiveSubject().getUser().principal())
.request();
assertFalse(RBACEngine.checkSameUserPermissions(GetUserPrivilegesAction.NAME, getUserPrivilegesRequest, authentication));
}
/**
* This tests that action names in the request are considered "matched" by the relevant named privilege
* (in this case that {@link TransportDeleteAction} and {@link TransportIndexAction} are satisfied by {@link IndexPrivilege#WRITE}).
*/
public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception {
Role role = Role.builder(RESTRICTED_INDICES, "test1")
.cluster(Collections.singleton("all"), Collections.emptyList())
.add(IndexPrivilege.WRITE, "academy")
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
final PrivilegesCheckResult result = hasPrivileges(
IndicesPrivileges.builder().indices("academy").privileges(TransportDeleteAction.NAME, TransportIndexAction.NAME).build(),
authzInfo,
List.of(),
new String[] { TransportClusterHealthAction.NAME }
);
assertThat(result, notNullValue());
assertThat(result.allChecksSuccess(), is(true));
assertThat(result.getDetails().cluster(), aMapWithSize(1));
assertThat(result.getDetails().cluster().get(TransportClusterHealthAction.NAME), equalTo(true));
assertThat(result.getDetails().index().values(), Matchers.iterableWithSize(1));
final ResourcePrivileges resourcePrivileges = result.getDetails().index().values().iterator().next();
assertThat(resourcePrivileges.getResource(), equalTo("academy"));
assertThat(resourcePrivileges.getPrivileges(), aMapWithSize(2));
assertThat(resourcePrivileges.getPrivileges().get(TransportDeleteAction.NAME), equalTo(true));
assertThat(resourcePrivileges.getPrivileges().get(TransportIndexAction.NAME), equalTo(true));
}
/**
* This tests that the action responds correctly when the user/role has some, but not all
* of the privileges being checked.
*/
public void testMatchSubsetOfPrivileges() throws Exception {
Role role = Role.builder(RESTRICTED_INDICES, "test2")
.cluster(Set.of("monitor"), Set.of())
.add(IndexPrivilege.INDEX, "academy")
.add(IndexPrivilege.WRITE, "initiative")
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder().indices("academy", "initiative", "school").privileges("delete", "index", "manage").build(),
authzInfo,
List.of(),
new String[] { "monitor", "manage" }
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().cluster(), aMapWithSize(2));
assertThat(response.getDetails().cluster().get("monitor"), equalTo(true));
assertThat(response.getDetails().cluster().get("manage"), equalTo(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(3));
final ResourcePrivileges academy = response.getDetails().index().get("academy");
final ResourcePrivileges initiative = response.getDetails().index().get("initiative");
final ResourcePrivileges school = response.getDetails().index().get("school");
assertThat(academy.getResource(), equalTo("academy"));
assertThat(academy.getPrivileges(), aMapWithSize(3));
assertThat(academy.getPrivileges().get("index"), equalTo(true)); // explicit
assertThat(academy.getPrivileges().get("delete"), equalTo(false));
assertThat(academy.getPrivileges().get("manage"), equalTo(false));
assertThat(initiative.getResource(), equalTo("initiative"));
assertThat(initiative.getPrivileges(), aMapWithSize(3));
assertThat(initiative.getPrivileges().get("index"), equalTo(true)); // implied by write
assertThat(initiative.getPrivileges().get("delete"), equalTo(true)); // implied by write
assertThat(initiative.getPrivileges().get("manage"), equalTo(false));
assertThat(school.getResource(), equalTo("school"));
assertThat(school.getPrivileges(), aMapWithSize(3));
assertThat(school.getPrivileges().get("index"), equalTo(false));
assertThat(school.getPrivileges().get("delete"), equalTo(false));
assertThat(school.getPrivileges().get("manage"), equalTo(false));
}
/**
* This tests that the action responds correctly when the user/role has none
* of the privileges being checked.
*/
public void testMatchNothing() throws Exception {
Role role = Role.builder(RESTRICTED_INDICES, "test3").cluster(Set.of("monitor"), Set.of()).build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
final PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder().indices("academy").privileges("read", "write").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
final ResourcePrivileges result = response.getDetails().index().values().iterator().next();
assertThat(result.getResource(), equalTo("academy"));
assertThat(result.getPrivileges(), aMapWithSize(2));
assertThat(result.getPrivileges().get("read"), equalTo(false));
assertThat(result.getPrivileges().get("write"), equalTo(false));
}
/**
* Wildcards in the request are treated as
* <em>does the user have ___ privilege on every possible index that matches this pattern?</em>
* Or, expressed differently,
* <em>does the user have ___ privilege on a wildcard that covers (is a superset of) this pattern?</em>
*/
public void testWildcardHandling() throws Exception {
List<ApplicationPrivilegeDescriptor> privs = new ArrayList<>();
final ApplicationPrivilege kibanaRead = defineApplicationPrivilege(
privs,
"kibana",
"read",
"data:read/*",
"action:login",
"action:view/dashboard"
);
final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege(
privs,
"kibana",
"write",
"data:write/*",
"action:login",
"action:view/dashboard"
);
final ApplicationPrivilege kibanaAdmin = defineApplicationPrivilege(privs, "kibana", "admin", "action:login", "action:manage/*");
final ApplicationPrivilege kibanaViewSpace = defineApplicationPrivilege(
privs,
"kibana",
"view-space",
"action:login",
"space:view/*"
);
Role role = Role.builder(RESTRICTED_INDICES, "test3")
.add(IndexPrivilege.ALL, "logstash-*", "foo?")
.add(IndexPrivilege.READ, "abc*")
.add(IndexPrivilege.WRITE, "*xyz")
.addApplicationPrivilege(kibanaRead, Collections.singleton("*"))
.addApplicationPrivilege(kibanaViewSpace, newHashSet("space/engineering/*", "space/builds"))
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
final PrivilegesCheckResult response = hasPrivileges(
new IndicesPrivileges[] {
IndicesPrivileges.builder()
.indices("logstash-2016-*")
.privileges("write") // Yes, because (ALL,"logstash-*")
.build(),
IndicesPrivileges.builder()
.indices("logstash-*")
.privileges("read") // Yes, because (ALL,"logstash-*")
.build(),
IndicesPrivileges.builder()
.indices("log*")
.privileges("manage") // No, because "log*" includes indices that "logstash-*" does not
.build(),
IndicesPrivileges.builder()
.indices("foo*", "foo?")
.privileges("read") // Yes, "foo?", but not "foo*", because "foo*" > "foo?"
.build(),
IndicesPrivileges.builder()
.indices("abcd*")
.privileges("read", "write") // read = Yes, because (READ, "abc*"), write = No
.build(),
IndicesPrivileges.builder()
.indices("abc*xyz")
.privileges("read", "write", "manage") // read = Yes ( READ "abc*"), write = Yes (WRITE, "*xyz"), manage = No
.build(),
IndicesPrivileges.builder()
.indices("a*xyz")
.privileges("read", "write", "manage") // read = No, write = Yes (WRITE, "*xyz"), manage = No
.build() },
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder()
.resources("*")
.application("kibana")
.privileges(Sets.union(kibanaRead.name(), kibanaWrite.name())) // read = Yes, write = No
.build(),
ApplicationResourcePrivileges.builder()
.resources("space/engineering/project-*", "space/*") // project-* = Yes, space/* = Not
.application("kibana")
.privileges("space:view/dashboard")
.build() },
authzInfo,
privs,
new String[0]
);
assertThat(response, notNullValue());
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(8));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder("logstash-2016-*").addPrivileges(Collections.singletonMap("write", true)).build(),
ResourcePrivileges.builder("logstash-*").addPrivileges(Collections.singletonMap("read", true)).build(),
ResourcePrivileges.builder("log*").addPrivileges(Collections.singletonMap("manage", false)).build(),
ResourcePrivileges.builder("foo?").addPrivileges(Collections.singletonMap("read", true)).build(),
ResourcePrivileges.builder("foo*").addPrivileges(Collections.singletonMap("read", false)).build(),
ResourcePrivileges.builder("abcd*").addPrivileges(Map.of("read", true, "write", false)).build(),
ResourcePrivileges.builder("abc*xyz").addPrivileges(Map.of("read", true, "write", true, "manage", false)).build(),
ResourcePrivileges.builder("a*xyz").addPrivileges(Map.of("read", false, "write", true, "manage", false)).build()
)
);
assertThat(response.getDetails().application().entrySet(), Matchers.iterableWithSize(1));
final Collection<ResourcePrivileges> kibanaPrivileges = response.getDetails().application().get("kibana");
assertThat(kibanaPrivileges, Matchers.iterableWithSize(3));
assertThat(
Strings.collectionToCommaDelimitedString(kibanaPrivileges),
kibanaPrivileges,
containsInAnyOrder(
ResourcePrivileges.builder("*").addPrivileges(Map.of("read", true, "write", false)).build(),
ResourcePrivileges.builder("space/engineering/project-*")
.addPrivileges(Collections.singletonMap("space:view/dashboard", true))
.build(),
ResourcePrivileges.builder("space/*").addPrivileges(Collections.singletonMap("space:view/dashboard", false)).build()
)
);
}
public void testCheckingIndexPermissionsDefinedOnDifferentPatterns() throws Exception {
final RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(
Role.builder(RESTRICTED_INDICES, "test-multiple")
.add(IndexPrivilege.CREATE_DOC, "*")
.add(IndexPrivilege.INDEX, "apache-*", "unrelated", "something_else*")
.add(IndexPrivilege.DELETE, "apache-2016-*", ".security*")
.build(),
null
);
List<String> indices = new ArrayList<>(3);
indices.add("apache-2016-12");
indices.add("apache-2017-01");
indices.add("other");
Collections.shuffle(indices, random());
List<String> privileges = new ArrayList<>(3);
privileges.add("create_doc");
privileges.add("index");
privileges.add("delete");
Collections.shuffle(privileges, random());
PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder().indices(indices).privileges(privileges).allowRestrictedIndices(randomBoolean()).build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(3));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder("apache-2016-12")
.addPrivileges(Map.of("create_doc", true, "index", true, "delete", true))
.build(),
ResourcePrivileges.builder("apache-2017-01")
.addPrivileges(Map.of("create_doc", true, "index", true, "delete", false))
.build(),
ResourcePrivileges.builder("other").addPrivileges(Map.of("create_doc", true, "index", false, "delete", false)).build()
)
);
indices = new ArrayList<>(2);
indices.add("apache-2016-12");
indices.add("apache-2017-01");
Collections.shuffle(indices, random());
privileges = new ArrayList<>(3);
privileges.add("create");
privileges.add("create_doc");
privileges.add("index");
Collections.shuffle(privileges, random());
response = hasPrivileges(
IndicesPrivileges.builder().indices(indices).privileges(privileges).allowRestrictedIndices(randomBoolean()).build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(true));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder("apache-2016-12")
.addPrivileges(Map.of("create_doc", true, "create", true, "index", true))
.build(),
ResourcePrivileges.builder("apache-2017-01")
.addPrivileges(Map.of("create_doc", true, "create", true, "index", true))
.build()
)
);
}
public void testCheckRestrictedIndexPatternPermission() throws Exception {
final String patternPrefix = XPackPlugin.ASYNC_RESULTS_INDEX.substring(
0,
randomIntBetween(2, XPackPlugin.ASYNC_RESULTS_INDEX.length() - 2)
);
Role role = Role.builder(RESTRICTED_INDICES, "role")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.INDEX, false, patternPrefix + "*")
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
String prePatternPrefix = patternPrefix.substring(0, randomIntBetween(1, patternPrefix.length() - 1)) + "*";
PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder().indices(prePatternPrefix).allowRestrictedIndices(randomBoolean()).privileges("index").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(prePatternPrefix).addPrivileges(Map.of("index", false)).build())
);
String matchesPatternPrefix = XPackPlugin.ASYNC_RESULTS_INDEX.substring(0, patternPrefix.length() + 1);
response = hasPrivileges(
IndicesPrivileges.builder().indices(matchesPatternPrefix + "*").allowRestrictedIndices(false).privileges("index").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(true));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(matchesPatternPrefix + "*").addPrivileges(Map.of("index", true)).build())
);
response = hasPrivileges(
IndicesPrivileges.builder().indices(matchesPatternPrefix + "*").allowRestrictedIndices(true).privileges("index").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(matchesPatternPrefix + "*").addPrivileges(Map.of("index", false)).build())
);
response = hasPrivileges(
IndicesPrivileges.builder().indices(matchesPatternPrefix).allowRestrictedIndices(randomBoolean()).privileges("index").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(true));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(matchesPatternPrefix).addPrivileges(Map.of("index", true)).build())
);
final String restrictedIndexMatchingWildcard = XPackPlugin.ASYNC_RESULTS_INDEX + randomAlphaOfLengthBetween(0, 2);
response = hasPrivileges(
IndicesPrivileges.builder()
.indices(restrictedIndexMatchingWildcard + "*")
.allowRestrictedIndices(true)
.privileges("index")
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(restrictedIndexMatchingWildcard + "*").addPrivileges(Map.of("index", false)).build()
)
);
response = hasPrivileges(
IndicesPrivileges.builder()
.indices(restrictedIndexMatchingWildcard + "*")
.allowRestrictedIndices(false)
.privileges("index")
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(restrictedIndexMatchingWildcard + "*").addPrivileges(Map.of("index", false)).build()
)
);
response = hasPrivileges(
IndicesPrivileges.builder()
.indices(restrictedIndexMatchingWildcard)
.allowRestrictedIndices(randomBoolean())
.privileges("index")
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(restrictedIndexMatchingWildcard).addPrivileges(Map.of("index", false)).build())
);
role = Role.builder(RESTRICTED_INDICES, "role")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.INDEX, true, patternPrefix + "*")
.build();
authzInfo = new RBACAuthorizationInfo(role, null);
response = hasPrivileges(
IndicesPrivileges.builder()
.indices(matchesPatternPrefix + "*")
.allowRestrictedIndices(randomBoolean())
.privileges("index")
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(true));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(1));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(ResourcePrivileges.builder(matchesPatternPrefix + "*").addPrivileges(Map.of("index", true)).build())
);
}
public void testCheckExplicitRestrictedIndexPermissions() throws Exception {
final boolean restrictedIndexPermission = randomBoolean();
final boolean restrictedMonitorPermission = randomBoolean();
Role role = Role.builder(RESTRICTED_INDICES, "role")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.INDEX, restrictedIndexPermission, ".sec*")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.MONITOR, restrictedMonitorPermission, ".security*")
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
String explicitRestrictedIndex = randomFrom(TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES);
PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder()
.indices(new String[] { ".secret-non-restricted", explicitRestrictedIndex })
.privileges("index", "monitor")
.allowRestrictedIndices(false) // explicit false for test
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".secret-non-restricted") // matches ".sec*" but not ".security*"
.addPrivileges(Map.of("index", true, "monitor", false))
.build(),
ResourcePrivileges.builder(explicitRestrictedIndex) // matches both ".sec*" and ".security*"
.addPrivileges(Map.of("index", restrictedIndexPermission, "monitor", restrictedMonitorPermission))
.build()
)
);
explicitRestrictedIndex = randomFrom(TestRestrictedIndices.SAMPLE_RESTRICTED_NAMES);
response = hasPrivileges(
IndicesPrivileges.builder()
.indices(new String[] { ".secret-non-restricted", explicitRestrictedIndex })
.privileges("index", "monitor")
.allowRestrictedIndices(true) // explicit true for test
.build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".secret-non-restricted") // matches ".sec*" but not ".security*"
.addPrivileges(Map.of("index", true, "monitor", false))
.build(),
ResourcePrivileges.builder(explicitRestrictedIndex) // matches both ".sec*" and ".security*"
.addPrivileges(Map.of("index", restrictedIndexPermission, "monitor", restrictedMonitorPermission))
.build()
)
);
}
public void testCheckRestrictedIndexWildcardPermissions() throws Exception {
Role role = Role.builder(RESTRICTED_INDICES, "role")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.INDEX, false, ".sec*")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.MONITOR, true, ".security*")
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
PrivilegesCheckResult response = hasPrivileges(
IndicesPrivileges.builder().indices(".sec*", ".security*").privileges("index", "monitor").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".sec*").addPrivileges(Map.of("index", true, "monitor", false)).build(),
ResourcePrivileges.builder(".security*").addPrivileges(Map.of("index", true, "monitor", true)).build()
)
);
response = hasPrivileges(
IndicesPrivileges.builder().indices(".sec*", ".security*").privileges("index", "monitor").allowRestrictedIndices(true).build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".sec*").addPrivileges(Map.of("index", false, "monitor", false)).build(),
ResourcePrivileges.builder(".security*").addPrivileges(Map.of("index", false, "monitor", true)).build()
)
);
role = Role.builder(RESTRICTED_INDICES, "role")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.INDEX, true, ".sec*")
.add(FieldPermissions.DEFAULT, null, IndexPrivilege.MONITOR, false, ".security*")
.build();
authzInfo = new RBACAuthorizationInfo(role, null);
response = hasPrivileges(
IndicesPrivileges.builder().indices(".sec*", ".security*").privileges("index", "monitor").build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".sec*").addPrivileges(Map.of("index", true, "monitor", false)).build(),
ResourcePrivileges.builder(".security*").addPrivileges(Map.of("index", true, "monitor", true)).build()
)
);
response = hasPrivileges(
IndicesPrivileges.builder().indices(".sec*", ".security*").privileges("index", "monitor").allowRestrictedIndices(true).build(),
authzInfo,
Collections.emptyList(),
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.iterableWithSize(2));
assertThat(
response.getDetails().index().values(),
containsInAnyOrder(
ResourcePrivileges.builder(".sec*").addPrivileges(Map.of("index", true, "monitor", false)).build(),
ResourcePrivileges.builder(".security*").addPrivileges(Map.of("index", true, "monitor", false)).build()
)
);
}
public void testCheckingApplicationPrivilegesOnDifferentApplicationsAndResources() throws Exception {
List<ApplicationPrivilegeDescriptor> privs = new ArrayList<>();
final ApplicationPrivilege app1Read = defineApplicationPrivilege(privs, "app1", "read", "data:read/*");
final ApplicationPrivilege app1Write = defineApplicationPrivilege(privs, "app1", "write", "data:write/*");
final ApplicationPrivilege app1All = defineApplicationPrivilege(privs, "app1", "all", "*");
final ApplicationPrivilege app2Read = defineApplicationPrivilege(privs, "app2", "read", "data:read/*");
final ApplicationPrivilege app2Write = defineApplicationPrivilege(privs, "app2", "write", "data:write/*");
final ApplicationPrivilege app2All = defineApplicationPrivilege(privs, "app2", "all", "*");
Role role = Role.builder(RESTRICTED_INDICES, "test-role")
.addApplicationPrivilege(app1Read, Collections.singleton("foo/*"))
.addApplicationPrivilege(app1All, Collections.singleton("foo/bar/baz"))
.addApplicationPrivilege(app2Read, Collections.singleton("foo/bar/*"))
.addApplicationPrivilege(app2Write, Collections.singleton("*/bar/*"))
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
List<String> resources = new ArrayList<>();
resources.add("foo/1");
resources.add("foo/bar/2");
resources.add("foo/bar/baz");
resources.add("baz/bar/foo");
Collections.shuffle(resources, random());
List<String> privileges = new ArrayList<>();
privileges.add("read");
privileges.add("write");
privileges.add("all");
Collections.shuffle(privileges, random());
final PrivilegesCheckResult response = hasPrivileges(
new IndicesPrivileges[0],
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder().application("app1").resources(resources).privileges(privileges).build(),
ApplicationResourcePrivileges.builder().application("app2").resources(resources).privileges(privileges).build() },
authzInfo,
privs,
Strings.EMPTY_ARRAY
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().index().values(), Matchers.emptyIterable());
assertThat(response.getDetails().application().entrySet(), Matchers.iterableWithSize(2));
final Collection<ResourcePrivileges> app1 = response.getDetails().application().get("app1");
assertThat(app1, Matchers.iterableWithSize(4));
assertThat(
Strings.collectionToCommaDelimitedString(app1),
app1,
containsInAnyOrder(
ResourcePrivileges.builder("foo/1").addPrivileges(Map.of("read", true, "write", false, "all", false)).build(),
ResourcePrivileges.builder("foo/bar/2").addPrivileges(Map.of("read", true, "write", false, "all", false)).build(),
ResourcePrivileges.builder("foo/bar/baz").addPrivileges(Map.of("read", true, "write", true, "all", true)).build(),
ResourcePrivileges.builder("baz/bar/foo").addPrivileges(Map.of("read", false, "write", false, "all", false)).build()
)
);
final Collection<ResourcePrivileges> app2 = response.getDetails().application().get("app2");
assertThat(app2, Matchers.iterableWithSize(4));
assertThat(
Strings.collectionToCommaDelimitedString(app2),
app2,
containsInAnyOrder(
ResourcePrivileges.builder("foo/1").addPrivileges(Map.of("read", false, "write", false, "all", false)).build(),
ResourcePrivileges.builder("foo/bar/2").addPrivileges(Map.of("read", true, "write", true, "all", false)).build(),
ResourcePrivileges.builder("foo/bar/baz").addPrivileges(Map.of("read", true, "write", true, "all", false)).build(),
ResourcePrivileges.builder("baz/bar/foo").addPrivileges(Map.of("read", false, "write", true, "all", false)).build()
)
);
}
public void testCheckingApplicationPrivilegesWithComplexNames() throws Exception {
final String appName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10);
final String action1 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 5);
final String action2 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(6, 9);
final List<ApplicationPrivilegeDescriptor> privs = new ArrayList<>();
final ApplicationPrivilege priv1 = defineApplicationPrivilege(privs, appName, action1, "DATA:read/*", "ACTION:" + action1);
final ApplicationPrivilege priv2 = defineApplicationPrivilege(privs, appName, action2, "DATA:read/*", "ACTION:" + action2);
Role role = Role.builder(RESTRICTED_INDICES, "test-write")
.addApplicationPrivilege(priv1, Collections.singleton("user/*/name"))
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
final PrivilegesCheckResult response = hasPrivileges(
new IndicesPrivileges[0],
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder()
.application(appName)
.resources("user/hawkeye/name")
.privileges("DATA:read/user/*", "ACTION:" + action1, "ACTION:" + action2, action1, action2)
.build() },
authzInfo,
privs,
"monitor"
);
assertThat(response.allChecksSuccess(), is(false));
assertThat(response.getDetails().application().keySet(), containsInAnyOrder(appName));
assertThat(response.getDetails().application().get(appName), iterableWithSize(1));
assertThat(
response.getDetails().application().get(appName),
containsInAnyOrder(
ResourcePrivileges.builder("user/hawkeye/name")
.addPrivileges(
Map.ofEntries(
entry("DATA:read/user/*", true),
entry("ACTION:" + action1, true),
entry("ACTION:" + action2, false),
entry(action1, true),
entry(action2, false)
)
)
.build()
)
);
}
public void testCheckPrivilegesWithCache() throws Exception {
final List<ApplicationPrivilegeDescriptor> privs = new ArrayList<>();
final String appName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10);
final String privilegeName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10);
final String action1 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 5);
final ApplicationPrivilege priv1 = defineApplicationPrivilege(privs, appName, privilegeName, "DATA:read/*", "ACTION:" + action1);
SimpleRole role = spy(
Role.builder(RESTRICTED_INDICES, "test-write").addApplicationPrivilege(priv1, Collections.singleton("user/*/name")).build()
);
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
// 1st check privileges
final PrivilegesToCheck privilegesToCheck1 = new PrivilegesToCheck(
new String[0],
new IndicesPrivileges[0],
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder()
.application(appName)
.resources("user/hawkeye/name")
.privileges("DATA:read/user/*", "ACTION:" + action1)
.build() },
randomBoolean()
);
final PlainActionFuture<PrivilegesCheckResult> future1 = new PlainActionFuture<>();
engine.checkPrivileges(authzInfo, privilegesToCheck1, privs, future1);
final PrivilegesCheckResult privilegesCheckResult1 = future1.actionGet();
// Result should be cached
verify(role).cacheHasPrivileges(any(), eq(privilegesToCheck1), eq(privilegesCheckResult1));
// Stall the check so that we are sure cache is used
final RuntimeException stallCheckException = new RuntimeException("you shall not pass");
doThrow(stallCheckException).when(role).checkApplicationResourcePrivileges(anyString(), any(), any(), any(), any());
Mockito.clearInvocations(role);
final PlainActionFuture<PrivilegesCheckResult> future2 = new PlainActionFuture<>();
engine.checkPrivileges(authzInfo, privilegesToCheck1, privs, future2);
final PrivilegesCheckResult privilegesCheckResult2 = future2.actionGet();
assertThat(privilegesCheckResult2, is(privilegesCheckResult1));
// Cached result won't be cached again
verify(role, never()).cacheHasPrivileges(any(), any(), any());
// Test a new check does not go through cache (and hence will be stalled by the exception)
final PrivilegesToCheck privilegesToCheck2 = new PrivilegesToCheck(
new String[0],
new IndicesPrivileges[0],
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder()
.application(appName)
.resources("user/hawkeye/name")
.privileges("DATA:read/user/*")
.build() },
randomBoolean()
);
final RuntimeException e1 = expectThrows(
RuntimeException.class,
() -> engine.checkPrivileges(authzInfo, privilegesToCheck2, privs, new PlainActionFuture<>())
);
assertThat(e1, is(stallCheckException));
}
public void testIsCompleteMatch() throws Exception {
final List<ApplicationPrivilegeDescriptor> privs = new ArrayList<>();
final ApplicationPrivilege kibanaRead = defineApplicationPrivilege(privs, "kibana", "read", "data:read/*");
final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege(privs, "kibana", "write", "data:write/*");
Role role = Role.builder(RESTRICTED_INDICES, "test-write")
.cluster(Set.of("monitor"), Set.of())
.add(IndexPrivilege.READ, "read-*")
.add(IndexPrivilege.ALL, "all-*")
.addApplicationPrivilege(kibanaRead, Collections.singleton("*"))
.build();
RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
assertThat(
hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), authzInfo, privs, "monitor").allChecksSuccess(),
is(true)
);
assertThat(
hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), authzInfo, privs, "manage").allChecksSuccess(),
is(false)
);
assertThat(
hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), authzInfo, privs, "monitor").allChecksSuccess(),
is(false)
);
assertThat(
hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), authzInfo, privs, "manage").allChecksSuccess(),
is(false)
);
assertThat(
hasPrivileges(
new IndicesPrivileges[] {
IndicesPrivileges.builder().indices("read-a").privileges("read").build(),
IndicesPrivileges.builder().indices("all-b").privileges("read", "write").build() },
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder().application("kibana").resources("*").privileges("read").build() },
authzInfo,
privs,
"monitor"
).allChecksSuccess(),
is(true)
);
assertThat(
hasPrivileges(
new IndicesPrivileges[] { indexPrivileges("read", "read-123", "read-456", "all-999") },
new ApplicationResourcePrivileges[] {
ApplicationResourcePrivileges.builder().application("kibana").resources("*").privileges("read").build(),
ApplicationResourcePrivileges.builder().application("kibana").resources("*").privileges("write").build() },
authzInfo,
privs,
"monitor"
).allChecksSuccess(),
is(false)
);
}
public void testBuildUserPrivilegeResponse() {
final ManageApplicationPrivileges manageApplicationPrivileges = new ManageApplicationPrivileges(Sets.newHashSet("app01", "app02"));
final BytesArray query = new BytesArray("""
{"term":{"public":true}}""");
final Role role = Role.builder(RESTRICTED_INDICES, "test", "role")
.cluster(Sets.newHashSet("monitor", "manage_watcher"), Collections.singleton(manageApplicationPrivileges))
.add(IndexPrivilegeTests.resolvePrivilegeAndAssertSingleton(Sets.newHashSet("read", "write")), "index-1")
.add(IndexPrivilege.ALL, "index-2", "index-3")
.add(
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "public.*" }, new String[0])),
Collections.singleton(query),
IndexPrivilege.READ,
randomBoolean(),
"index-4",
"index-5"
)
.addApplicationPrivilege(ApplicationPrivilegeTests.createPrivilege("app01", "read", "data:read"), Collections.singleton("*"))
.runAs(new Privilege(Sets.newHashSet("user01", "user02"), "user01", "user02"))
.addRemoteIndicesGroup(Set.of("remote-1"), FieldPermissions.DEFAULT, null, IndexPrivilege.READ, false, "remote-index-1")
.addRemoteIndicesGroup(
Set.of("remote-2", "remote-3"),
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "public.*" }, new String[0])),
Collections.singleton(query),
IndexPrivilege.READ,
randomBoolean(),
"remote-index-2",
"remote-index-3"
)
.addRemoteClusterPermissions(
new RemoteClusterPermissions().addGroup(
new RemoteClusterPermissionGroup(new String[] { "monitor_enrich" }, new String[] { "remote-1" })
)
.addGroup(
new RemoteClusterPermissionGroup(
RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]),
new String[] { "remote-2", "remote-3" }
)
)
)
.build();
final GetUserPrivilegesResponse response = RBACEngine.buildUserPrivilegesResponseObject(role);
assertThat(response.getClusterPrivileges(), containsInAnyOrder("monitor", "manage_watcher"));
assertThat(response.getConditionalClusterPrivileges(), containsInAnyOrder(manageApplicationPrivileges));
assertThat(response.getIndexPrivileges(), iterableWithSize(3));
final GetUserPrivilegesResponse.Indices index1 = findIndexPrivilege(response.getIndexPrivileges(), "index-1");
assertThat(index1.getIndices(), containsInAnyOrder("index-1"));
assertThat(index1.getPrivileges(), containsInAnyOrder("read", "write"));
assertThat(index1.getFieldSecurity(), emptyIterable());
assertThat(index1.getQueries(), emptyIterable());
final GetUserPrivilegesResponse.Indices index2 = findIndexPrivilege(response.getIndexPrivileges(), "index-2");
assertThat(index2.getIndices(), containsInAnyOrder("index-2", "index-3"));
assertThat(index2.getPrivileges(), containsInAnyOrder("all"));
assertThat(index2.getFieldSecurity(), emptyIterable());
assertThat(index2.getQueries(), emptyIterable());
final GetUserPrivilegesResponse.Indices index4 = findIndexPrivilege(response.getIndexPrivileges(), "index-4");
assertThat(index4.getIndices(), containsInAnyOrder("index-4", "index-5"));
assertThat(index4.getPrivileges(), containsInAnyOrder("read"));
assertThat(
index4.getFieldSecurity(),
containsInAnyOrder(new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "public.*" }, new String[0]))
);
assertThat(index4.getQueries(), containsInAnyOrder(query));
assertThat(
response.getApplicationPrivileges(),
containsInAnyOrder(ApplicationResourcePrivileges.builder().application("app01").privileges("read").resources("*").build())
);
assertThat(response.getRunAs(), containsInAnyOrder("user01", "user02"));
assertThat(response.getRemoteIndexPrivileges(), iterableWithSize(2));
final GetUserPrivilegesResponse.RemoteIndices remoteIndex1 = findRemoteIndexPrivilege(
response.getRemoteIndexPrivileges(),
"remote-1"
);
assertThat(remoteIndex1.remoteClusters(), containsInAnyOrder("remote-1"));
assertThat(remoteIndex1.indices().getIndices(), containsInAnyOrder("remote-index-1"));
assertThat(remoteIndex1.indices().getPrivileges(), containsInAnyOrder("read"));
assertThat(remoteIndex1.indices().getFieldSecurity(), emptyIterable());
assertThat(remoteIndex1.indices().getQueries(), emptyIterable());
final GetUserPrivilegesResponse.RemoteIndices remoteIndex2 = findRemoteIndexPrivilege(
response.getRemoteIndexPrivileges(),
"remote-2"
);
assertThat(remoteIndex2.remoteClusters(), containsInAnyOrder("remote-2", "remote-3"));
assertThat(remoteIndex2.indices().getIndices(), containsInAnyOrder("remote-index-2", "remote-index-3"));
assertThat(remoteIndex2.indices().getPrivileges(), containsInAnyOrder("read"));
assertThat(
remoteIndex2.indices().getFieldSecurity(),
containsInAnyOrder(new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "public.*" }, new String[0]))
);
assertThat(remoteIndex2.indices().getQueries(), containsInAnyOrder(query));
RemoteClusterPermissions remoteClusterPermissions = response.getRemoteClusterPermissions();
String[] allRemoteClusterPermissions = RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]);
assertThat(response.getRemoteClusterPermissions().groups(), iterableWithSize(2));
// remote-1 has monitor_enrich permission
// remote-2 and remote-3 have all permissions
assertThat(
response.getRemoteClusterPermissions().groups(),
containsInAnyOrder(
new RemoteClusterPermissionGroup(new String[] { "monitor_enrich" }, new String[] { "remote-1" }),
new RemoteClusterPermissionGroup(allRemoteClusterPermissions, new String[] { "remote-2", "remote-3" })
)
);
// ensure that all permissions are valid for the current transport version
assertThat(
Arrays.asList(remoteClusterPermissions.collapseAndRemoveUnsupportedPrivileges("remote-1", TransportVersion.current())),
hasItem("monitor_enrich")
);
for (String permission : RemoteClusterPermissions.getSupportedRemoteClusterPermissions()) {
assertThat(
Arrays.asList(remoteClusterPermissions.collapseAndRemoveUnsupportedPrivileges("remote-2", TransportVersion.current())),
hasItem(permission)
);
assertThat(
Arrays.asList(remoteClusterPermissions.collapseAndRemoveUnsupportedPrivileges("remote-3", TransportVersion.current())),
hasItem(permission)
);
}
}
public void testBuildUserPrivilegeResponseCombinesIndexPrivileges() {
final BytesArray query = new BytesArray("""
{"term":{"public":true}}""");
final Role role = Role.builder(RESTRICTED_INDICES, "test", "role")
.add(IndexPrivilegeTests.resolvePrivilegeAndAssertSingleton(Sets.newHashSet("read", "write")), "index-1")
.add(IndexPrivilege.ALL, "index-2")
.add(
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "public.*" }, new String[0])),
Collections.singleton(query),
IndexPrivilege.MANAGE,
true,
"index-1",
"index-2"
)
.add(
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "public.*" }, new String[0])),
Collections.singleton(query),
IndexPrivilegeTests.resolvePrivilegeAndAssertSingleton(Sets.newHashSet("read", "write")),
true,
"index-2",
"index-1"
)
.add(
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "public.*" }, new String[0])),
Collections.singleton(query),
IndexPrivilegeTests.resolvePrivilegeAndAssertSingleton(Sets.newHashSet("read_failure_store", "manage_failure_store")),
true,
"index-2",
"index-1"
)
.add(
FieldPermissions.DEFAULT,
null,
IndexPrivilegeTests.resolvePrivilegeAndAssertSingleton(Sets.newHashSet("read_failure_store")),
false,
"index-2",
"index-1"
)
.build();
final GetUserPrivilegesResponse response = RBACEngine.buildUserPrivilegesResponseObject(role);
final GetUserPrivilegesResponse.Indices index1 = findIndexPrivilege(response.getIndexPrivileges(), Set.of("index-1"), false);
assertThat(index1.getIndices(), containsInAnyOrder("index-1"));
assertThat(index1.getPrivileges(), containsInAnyOrder("read", "write"));
assertThat(index1.getFieldSecurity(), emptyIterable());
assertThat(index1.getQueries(), emptyIterable());
final GetUserPrivilegesResponse.Indices index2 = findIndexPrivilege(response.getIndexPrivileges(), Set.of("index-2"), false);
assertThat(index2.getIndices(), containsInAnyOrder("index-2"));
assertThat(index2.getPrivileges(), containsInAnyOrder("all"));
assertThat(index2.getFieldSecurity(), emptyIterable());
assertThat(index2.getQueries(), emptyIterable());
Set<GetUserPrivilegesResponse.Indices> actualIndexPrivileges = response.getIndexPrivileges();
assertThat(actualIndexPrivileges, iterableWithSize(4));
final GetUserPrivilegesResponse.Indices index1And2 = findIndexPrivilege(actualIndexPrivileges, Set.of("index-1", "index-2"), true);
assertThat(index1And2.getIndices(), containsInAnyOrder("index-1", "index-2"));
assertThat(index1And2.getPrivileges(), containsInAnyOrder("read", "write", "read_failure_store", "manage_failure_store", "manage"));
assertThat(
index1And2.getFieldSecurity(),
containsInAnyOrder(new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[] { "public.*" }, new String[0]))
);
assertThat(index1And2.getQueries(), containsInAnyOrder(query));
final GetUserPrivilegesResponse.Indices index1And2NotRestricted = findIndexPrivilege(
actualIndexPrivileges,
Set.of("index-1", "index-2"),
false
);
assertThat(index1And2NotRestricted.getIndices(), containsInAnyOrder("index-1", "index-2"));
assertThat(index1And2NotRestricted.getPrivileges(), containsInAnyOrder("read_failure_store"));
assertThat(index1And2NotRestricted.getFieldSecurity(), emptyIterable());
assertThat(index1And2NotRestricted.getQueries(), emptyIterable());
}
public void testBackingIndicesAreIncludedForAuthorizedDataStreams() {
final String dataStreamName = "my_data_stream";
User user = new User(randomAlphaOfLengthBetween(4, 12));
Role role = Role.builder(RESTRICTED_INDICES, "test1")
.cluster(Collections.singleton("all"), Collections.emptyList())
.add(IndexPrivilege.READ, dataStreamName)
.build();
TreeMap<String, IndexAbstraction> lookup = new TreeMap<>();
List<IndexMetadata> backingIndices = new ArrayList<>();
int numBackingIndices = randomIntBetween(1, 3);
for (int k = 0; k < numBackingIndices; k++) {
backingIndices.add(DataStreamTestHelper.createBackingIndex(dataStreamName, k + 1).build());
}
DataStream ds = DataStreamTestHelper.newInstance(
dataStreamName,
backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList())
);
lookup.put(ds.getName(), ds);
for (IndexMetadata im : backingIndices) {
lookup.put(im.getIndex().getName(), new IndexAbstraction.ConcreteIndex(im, ds));
}
SearchRequest request = new SearchRequest("*");
AuthorizedIndices authorizedIndices = RBACEngine.resolveAuthorizedIndicesFromRole(
role,
getRequestInfo(request, TransportSearchAction.TYPE.name()),
lookup,
() -> ignore -> {}
);
assertThat(authorizedIndices.all(IndexComponentSelector.DATA), hasItem(dataStreamName));
assertThat(authorizedIndices.check(dataStreamName, IndexComponentSelector.DATA), is(true));
assertThat(
authorizedIndices.all(IndexComponentSelector.DATA),
hasItems(backingIndices.stream().map(im -> im.getIndex().getName()).toList().toArray(Strings.EMPTY_ARRAY))
);
for (String index : backingIndices.stream().map(im -> im.getIndex().getName()).toList()) {
assertThat(authorizedIndices.check(index, IndexComponentSelector.DATA), is(true));
}
}
public void testExplicitMappingUpdatesAreNotGrantedWithIngestPrivileges() {
final String dataStreamName = "my_data_stream";
User user = new User(randomAlphaOfLengthBetween(4, 12));
Role role = Role.builder(RESTRICTED_INDICES, "test1")
.cluster(Collections.emptySet(), Collections.emptyList())
.add(IndexPrivilege.CREATE, "my_*")
.add(IndexPrivilege.WRITE, "my_data*")
.build();
TreeMap<String, IndexAbstraction> lookup = new TreeMap<>();
List<IndexMetadata> backingIndices = new ArrayList<>();
int numBackingIndices = randomIntBetween(1, 3);
for (int k = 0; k < numBackingIndices; k++) {
backingIndices.add(DataStreamTestHelper.createBackingIndex(dataStreamName, k + 1).build());
}
DataStream ds = DataStreamTestHelper.newInstance(
dataStreamName,
backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList())
);
lookup.put(ds.getName(), ds);
for (IndexMetadata im : backingIndices) {
lookup.put(im.getIndex().getName(), new IndexAbstraction.ConcreteIndex(im, ds));
}
PutMappingRequest request = new PutMappingRequest("*");
request.source("{ \"properties\": { \"message\": { \"type\": \"text\" } } }", XContentType.JSON);
AuthorizedIndices authorizedIndices = RBACEngine.resolveAuthorizedIndicesFromRole(
role,
getRequestInfo(request, TransportPutMappingAction.TYPE.name()),
lookup,
() -> ignore -> {}
);
assertThat(authorizedIndices.all(IndexComponentSelector.DATA).isEmpty(), is(true));
assertThat(authorizedIndices.all(IndexComponentSelector.FAILURES).isEmpty(), is(true));
}
public void testNoInfiniteRecursionForRBACAuthorizationInfoHashCode() {
final Role role = Role.builder(RESTRICTED_INDICES, "role").build();
// No assertion is needed, the test is successful as long as hashCode calls do not throw error
new RBACAuthorizationInfo(role, Role.builder(RESTRICTED_INDICES, "authenticated_role").build()).hashCode();
new RBACAuthorizationInfo(role, null).hashCode();
}
public void testGetUserPrivilegesThrowsIaeForUnsupportedOperation() {
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
final Role role = mock(Role.class);
when(authorizationInfo.getRole()).thenReturn(role);
when(role.cluster()).thenReturn(ClusterPermission.NONE);
when(role.indices()).thenReturn(IndicesPermission.NONE);
when(role.application()).thenReturn(ApplicationPermission.NONE);
when(role.runAs()).thenReturn(RunAsPermission.NONE);
when(role.remoteIndices()).thenReturn(RemoteIndicesPermission.NONE);
final UnsupportedOperationException unsupportedOperationException = new UnsupportedOperationException();
switch (randomIntBetween(0, 4)) {
case 0 -> when(role.cluster()).thenThrow(unsupportedOperationException);
case 1 -> when(role.indices()).thenThrow(unsupportedOperationException);
case 2 -> when(role.application()).thenThrow(unsupportedOperationException);
case 3 -> when(role.runAs()).thenThrow(unsupportedOperationException);
case 4 -> when(role.remoteIndices()).thenThrow(unsupportedOperationException);
default -> throw new IllegalStateException("unknown case number");
}
final PlainActionFuture<GetUserPrivilegesResponse> future = new PlainActionFuture<>();
engine.getUserPrivileges(authorizationInfo, future);
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, future::actionGet);
assertThat(
e.getMessage(),
equalTo(
"Cannot retrieve privileges for API keys with assigned role descriptors. "
+ "Please use the Get API key information API https://ela.st/es-api-get-api-key"
)
);
assertThat(e.getCause(), sameInstance(unsupportedOperationException));
}
public void testGetRoleDescriptorsIntersectionForRemoteCluster() throws ExecutionException, InterruptedException {
final RemoteIndicesPermission.Builder remoteIndicesBuilder = RemoteIndicesPermission.builder();
final String concreteClusterAlias = randomAlphaOfLength(10);
final int numGroups = randomIntBetween(1, 3);
final List<IndicesPrivileges> expectedIndicesPrivileges = new ArrayList<>();
for (int i = 0; i < numGroups; i++) {
final String[] indexNames = Objects.requireNonNull(generateRandomStringArray(3, 10, false, false));
final boolean allowRestrictedIndices = randomBoolean();
final boolean hasFls = randomBoolean();
final FieldPermissionsDefinition.FieldGrantExcludeGroup group = hasFls
? randomFieldGrantExcludeGroup()
: new FieldPermissionsDefinition.FieldGrantExcludeGroup(null, null);
final BytesReference query = randomBoolean() ? randomDlsQuery() : null;
final IndicesPrivileges.Builder builder = IndicesPrivileges.builder()
.indices(Arrays.stream(indexNames).sorted().collect(Collectors.toList()))
.privileges("read")
.allowRestrictedIndices(allowRestrictedIndices)
.query(query);
if (hasFls) {
builder.grantedFields(group.getGrantedFields());
builder.deniedFields(group.getExcludedFields());
}
expectedIndicesPrivileges.add(builder.build());
remoteIndicesBuilder.addGroup(
Set.of(randomFrom(concreteClusterAlias, "*")),
IndexPrivilege.READ,
new FieldPermissions(new FieldPermissionsDefinition(Set.of(group))),
query == null ? null : Set.of(query),
allowRestrictedIndices,
indexNames
);
}
final String mismatchedConcreteClusterAlias = randomValueOtherThan(concreteClusterAlias, () -> randomAlphaOfLength(10));
// Add some groups that don't match the alias
final int numMismatchedGroups = randomIntBetween(0, 3);
for (int i = 0; i < numMismatchedGroups; i++) {
remoteIndicesBuilder.addGroup(
Set.of(mismatchedConcreteClusterAlias),
IndexPrivilege.READ,
new FieldPermissions(
new FieldPermissionsDefinition(Set.of(new FieldPermissionsDefinition.FieldGrantExcludeGroup(null, null)))
),
null,
randomBoolean(),
generateRandomStringArray(3, 10, false, false)
);
}
final Role role = createSimpleRoleWithRemoteIndices(remoteIndicesBuilder.build());
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(concreteClusterAlias, TransportVersion.current(), authorizationInfo, future);
final RoleDescriptorsIntersection actual = future.get();
assertThat(
actual,
equalTo(
new RoleDescriptorsIntersection(
List.of(
Set.of(
new RoleDescriptor(
Role.REMOTE_USER_ROLE_NAME,
null,
expectedIndicesPrivileges.stream().sorted().toArray(RoleDescriptor.IndicesPrivileges[]::new),
null,
null,
null,
null,
null
)
)
)
)
)
);
}
public void testGetRoleDescriptorsIntersectionForRemoteClusterHasDeterministicOrderForIndicesPrivileges() throws ExecutionException,
InterruptedException {
final RemoteIndicesPermission.Builder remoteIndicesBuilder = RemoteIndicesPermission.builder();
final String concreteClusterAlias = randomAlphaOfLength(10);
final int numGroups = randomIntBetween(2, 5);
int extraGroups = 0;
for (int i = 0; i < numGroups; i++) {
Set<IndexPrivilege> splitBySelector = IndexPrivilege.resolveBySelectorAccess(
Set.copyOf(randomSubsetOf(randomIntBetween(1, 4), IndexPrivilege.names()))
);
// If we end up with failure and data access (or failure, data and failure and data access),
// we will split and end up with extra groups. Need to account for this for the final assertion
if (splitBySelector.size() >= 2) {
extraGroups += splitBySelector.size() - 1;
}
for (var privilege : splitBySelector) {
remoteIndicesBuilder.addGroup(
Set.copyOf(randomNonEmptySubsetOf(List.of(concreteClusterAlias, "*"))),
privilege,
new FieldPermissions(
new FieldPermissionsDefinition(
Set.of(
randomBoolean()
? randomFieldGrantExcludeGroup()
: new FieldPermissionsDefinition.FieldGrantExcludeGroup(null, null)
)
)
),
randomBoolean() ? Set.of(randomDlsQuery()) : null,
randomBoolean(),
generateRandomStringArray(3, 10, false, false)
);
}
}
final RemoteIndicesPermission permissions = remoteIndicesBuilder.build();
List<RemoteIndicesPermission.RemoteIndicesGroup> remoteIndicesGroups = permissions.remoteIndicesGroups();
final Role role1 = createSimpleRoleWithRemoteIndices(permissions);
final RBACAuthorizationInfo authorizationInfo1 = mock(RBACAuthorizationInfo.class);
when(authorizationInfo1.getRole()).thenReturn(role1);
final PlainActionFuture<RoleDescriptorsIntersection> future1 = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
concreteClusterAlias,
TransportVersion.current(),
authorizationInfo1,
future1
);
final RoleDescriptorsIntersection actual1 = future1.get();
// Randomize the order of both remote indices groups and each of the indices permissions groups each group holds
final RemoteIndicesPermission shuffledPermissions = new RemoteIndicesPermission(
shuffledList(
remoteIndicesGroups.stream()
.map(
group -> new RemoteIndicesPermission.RemoteIndicesGroup(
group.remoteClusterAliases(),
shuffledList(group.indicesPermissionGroups())
)
)
.toList()
)
);
final Role role2 = createSimpleRoleWithRemoteIndices(shuffledPermissions);
final RBACAuthorizationInfo authorizationInfo2 = mock(RBACAuthorizationInfo.class);
when(authorizationInfo2.getRole()).thenReturn(role2);
final PlainActionFuture<RoleDescriptorsIntersection> future2 = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
concreteClusterAlias,
TransportVersion.current(),
authorizationInfo2,
future2
);
final RoleDescriptorsIntersection actual2 = future2.get();
assertThat(actual1, equalTo(actual2));
assertThat(
actual1.roleDescriptorsList().iterator().next().iterator().next().getIndicesPrivileges().length,
equalTo(numGroups + extraGroups)
);
}
public void testGetRoleDescriptorsIntersectionForRemoteClusterWithoutMatchingGroups() throws ExecutionException, InterruptedException {
final String concreteClusterAlias = randomAlphaOfLength(10);
final Role role = createSimpleRoleWithRemoteIndices(
RemoteIndicesPermission.builder()
.addGroup(
Set.of(concreteClusterAlias),
IndexPrivilege.READ,
new FieldPermissions(new FieldPermissionsDefinition(null, null)),
null,
randomBoolean(),
generateRandomStringArray(3, 10, false, false)
)
.build()
);
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
randomValueOtherThan(concreteClusterAlias, () -> randomAlphaOfLength(10)),
TransportVersion.current(),
authorizationInfo,
future
);
final RoleDescriptorsIntersection actual = future.get();
assertThat(actual, equalTo(RoleDescriptorsIntersection.EMPTY));
}
public void testGetRoleDescriptorsIntersectionForRemoteClusterWithoutRemoteIndicesPermissions() throws ExecutionException,
InterruptedException {
final String concreteClusterAlias = randomAlphaOfLength(10);
final Role role = createSimpleRoleWithRemoteIndices(RemoteIndicesPermission.NONE);
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
randomValueOtherThan(concreteClusterAlias, () -> randomAlphaOfLength(10)),
TransportVersion.current(),
authorizationInfo,
future
);
final RoleDescriptorsIntersection actual = future.get();
assertThat(actual, equalTo(RoleDescriptorsIntersection.EMPTY));
}
public void testGetRoleDescriptorsForRemoteClusterForReservedRoles() {
final ReservedRolesStore reservedRolesStore = new ReservedRolesStore();
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY);
// superuser
{
final SimpleRole role = Role.buildFromRoleDescriptor(
ReservedRolesStore.roleDescriptor("superuser"),
fieldPermissionsCache,
RESTRICTED_INDICES
);
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
randomAlphaOfLengthBetween(5, 20),
TransportVersion.current(),
authorizationInfo,
future
);
assertThat(
future.actionGet(),
equalTo(
new RoleDescriptorsIntersection(
new RoleDescriptor(
Role.REMOTE_USER_ROLE_NAME,
RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]),
new IndicesPrivileges[] {
IndicesPrivileges.builder().indices("*").privileges("all").allowRestrictedIndices(false).build(),
IndicesPrivileges.builder()
.indices("*")
.privileges("monitor", "read", "read_cross_cluster", "view_index_metadata")
.allowRestrictedIndices(true)
.build() },
null,
null,
null,
null,
null
)
)
)
);
}
// kibana_system
{
final SimpleRole role = Role.buildFromRoleDescriptor(
ReservedRolesStore.roleDescriptor("kibana_system"),
fieldPermissionsCache,
RESTRICTED_INDICES
);
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
randomAlphaOfLengthBetween(5, 20),
TransportVersion.current(),
authorizationInfo,
future
);
assertThat(
future.actionGet(),
equalTo(
new RoleDescriptorsIntersection(
new RoleDescriptor(
Role.REMOTE_USER_ROLE_NAME,
RemoteClusterPermissions.getSupportedRemoteClusterPermissions()
.stream()
.filter(s -> s.equals(ClusterPrivilegeResolver.MONITOR_STATS.name()))
.toArray(String[]::new),
new IndicesPrivileges[] {
IndicesPrivileges.builder().indices(".monitoring-*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder().indices("apm-*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder().indices("logs-apm.*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder().indices("metrics-apm.*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder().indices("traces-apm-*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder().indices("traces-apm.*").privileges("read", "read_cross_cluster").build() },
null,
null,
null,
null,
null
)
)
)
);
}
// monitoring_user
{
final SimpleRole role = Role.buildFromRoleDescriptor(
ReservedRolesStore.roleDescriptor("monitoring_user"),
fieldPermissionsCache,
RESTRICTED_INDICES
);
final RBACAuthorizationInfo authorizationInfo = mock(RBACAuthorizationInfo.class);
when(authorizationInfo.getRole()).thenReturn(role);
final PlainActionFuture<RoleDescriptorsIntersection> future = new PlainActionFuture<>();
engine.getRoleDescriptorsIntersectionForRemoteCluster(
randomAlphaOfLengthBetween(5, 20),
TransportVersion.current(),
authorizationInfo,
future
);
assertThat(
future.actionGet(),
equalTo(
new RoleDescriptorsIntersection(
new RoleDescriptor(
Role.REMOTE_USER_ROLE_NAME,
null,
new IndicesPrivileges[] {
IndicesPrivileges.builder().indices(".monitoring-*").privileges("read", "read_cross_cluster").build(),
IndicesPrivileges.builder()
.indices("/metrics-(beats|elasticsearch|enterprisesearch|kibana|logstash).*/")
.privileges("read", "read_cross_cluster")
.build(),
IndicesPrivileges.builder().indices("metricbeat-*").privileges("read", "read_cross_cluster").build() },
null,
null,
null,
null,
null
)
)
)
);
}
}
public void testChildSearchActionAuthorizationIsSkipped() {
final String[] indices = { "test-index" };
final Role role = Mockito.spy(Role.builder(RESTRICTED_INDICES, "test-role").add(IndexPrivilege.READ, indices).build());
final String action = randomFrom(
PreAuthorizationUtils.CHILD_ACTIONS_PRE_AUTHORIZED_BY_PARENT.get(TransportSearchAction.TYPE.name())
);
final ParentActionAuthorization parentAuthorization = new ParentActionAuthorization(TransportSearchAction.TYPE.name());
authorizeIndicesAction(indices, role, action, parentAuthorization, new ActionListener<IndexAuthorizationResult>() {
@Override
public void onResponse(IndexAuthorizationResult indexAuthorizationResult) {
assertTrue(indexAuthorizationResult.isGranted());
// Child authorization should be skipped since we passed parent authorization.
Mockito.verify(role, never()).checkIndicesAction(action);
Mockito.verify(role, never()).authorize(eq(action), any(), any(), any());
}
@Override
public void onFailure(Exception e) {
Assert.fail(e.getMessage());
}
});
}
public void testChildSearchActionIsAuthorizedWithoutSkipping() {
final String[] indices = { "test-index" };
final Role role = Mockito.spy(Role.builder(RESTRICTED_INDICES, "test-role").add(IndexPrivilege.READ, indices).build());
final String action = randomFrom(
PreAuthorizationUtils.CHILD_ACTIONS_PRE_AUTHORIZED_BY_PARENT.get(TransportSearchAction.TYPE.name())
);
final ParentActionAuthorization parentAuthorization = null;
authorizeIndicesAction(indices, role, action, parentAuthorization, new ActionListener<IndexAuthorizationResult>() {
@Override
public void onResponse(IndexAuthorizationResult indexAuthorizationResult) {
assertTrue(indexAuthorizationResult.isGranted());
// Child action should have been authorized normally since we did not pass parent authorization
Mockito.verify(role, atLeastOnce()).authorize(eq(action), any(), any(), any());
}
@Override
public void onFailure(Exception e) {
Assert.fail(e.getMessage());
}
});
}
public void testChildSearchActionAuthorizationIsNotSkippedWhenRoleHasDLS() {
final String[] indices = { "test-index" };
final BytesArray query = new BytesArray("""
{"term":{"foo":bar}}""");
final Role role = Mockito.spy(
Role.builder(RESTRICTED_INDICES, "test-role")
.add(
new FieldPermissions(new FieldPermissionsDefinition(new String[] { "foo" }, new String[0])),
Set.of(query),
IndexPrivilege.READ,
randomBoolean(),
indices
)
.build()
);
final String action = randomFrom(
PreAuthorizationUtils.CHILD_ACTIONS_PRE_AUTHORIZED_BY_PARENT.get(TransportSearchAction.TYPE.name())
);
final ParentActionAuthorization parentAuthorization = new ParentActionAuthorization(TransportSearchAction.TYPE.name());
authorizeIndicesAction(indices, role, action, parentAuthorization, new ActionListener<IndexAuthorizationResult>() {
@Override
public void onResponse(IndexAuthorizationResult indexAuthorizationResult) {
assertTrue(indexAuthorizationResult.isGranted());
// Child action authorization should not be skipped, even though the parent authorization was present
Mockito.verify(role, atLeastOnce()).authorize(eq(action), any(), any(), any());
}
@Override
public void onFailure(Exception e) {
Assert.fail(e.getMessage());
}
});
}
public void testRandomChildSearchActionAuthorizionIsNotSkipped() {
final String[] indices = { "test-index" };
final Role role = Mockito.spy(Role.builder(RESTRICTED_INDICES, "test-role").add(IndexPrivilege.READ, indices).build());
final String action = TransportSearchAction.TYPE.name() + "[" + randomAlphaOfLength(3) + "]";
final ParentActionAuthorization parentAuthorization = new ParentActionAuthorization(TransportSearchAction.TYPE.name());
authorizeIndicesAction(indices, role, action, parentAuthorization, new ActionListener<IndexAuthorizationResult>() {
@Override
public void onResponse(IndexAuthorizationResult indexAuthorizationResult) {
assertTrue(indexAuthorizationResult.isGranted());
Mockito.verify(role, atLeastOnce()).authorize(eq(action), any(), any(), any());
}
@Override
public void onFailure(Exception e) {
Assert.fail(e.getMessage());
}
});
}
private void authorizeIndicesAction(
final String[] indices,
final Role role,
final String action,
final ParentActionAuthorization parentAuthorization,
final ActionListener<IndexAuthorizationResult> listener
) {
final RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null);
final ResolvedIndices resolvedIndices = new ResolvedIndices(List.of(indices), List.of());
final TransportRequest searchRequest = new SearchRequest(indices);
final RequestInfo requestInfo = createRequestInfo(searchRequest, action, parentAuthorization);
final AsyncSupplier<ResolvedIndices> indicesAsyncSupplier = () -> SubscribableListener.newSucceeded(resolvedIndices);
Metadata.Builder metadata = Metadata.builder();
Stream.of(indices)
.forEach(
indexName -> metadata.put(
IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), 1, 0)).build(),
false
)
);
engine.authorizeIndexAction(requestInfo, authzInfo, indicesAsyncSupplier, metadata.build().getProject()).addListener(listener);
}
private static RequestInfo createRequestInfo(TransportRequest request, String action, ParentActionAuthorization parentAuthorization) {
final Authentication.RealmRef realm = new Authentication.RealmRef(
randomAlphaOfLength(6),
randomAlphaOfLength(4),
"node0" + randomIntBetween(1, 9)
);
return new RequestInfo(
AuthenticationTestHelper.builder().user(new User(randomAlphaOfLength(8))).realmRef(realm).build(false),
request,
action,
null,
parentAuthorization
);
}
private GetUserPrivilegesResponse.Indices findIndexPrivilege(
Set<GetUserPrivilegesResponse.Indices> indices,
Set<String> indexNames,
boolean allowRestrictedIndices
) {
return indices.stream()
.filter(
i -> i.allowRestrictedIndices() == allowRestrictedIndices
&& i.getIndices().containsAll(indexNames)
&& indexNames.containsAll(i.getIndices())
)
.findFirst()
.get();
}
private GetUserPrivilegesResponse.Indices findIndexPrivilege(Set<GetUserPrivilegesResponse.Indices> indices, String name) {
return indices.stream().filter(i -> i.getIndices().contains(name)).findFirst().get();
}
private GetUserPrivilegesResponse.RemoteIndices findRemoteIndexPrivilege(
Set<GetUserPrivilegesResponse.RemoteIndices> remoteIndices,
String remoteClusterAlias
) {
return remoteIndices.stream().filter(i -> i.remoteClusters().contains(remoteClusterAlias)).findFirst().get();
}
private IndicesPrivileges indexPrivileges(String priv, String... indices) {
return IndicesPrivileges.builder().indices(indices).privileges(priv).build();
}
private ApplicationPrivilege defineApplicationPrivilege(
List<ApplicationPrivilegeDescriptor> privs,
String app,
String name,
String... actions
) {
privs.add(new ApplicationPrivilegeDescriptor(app, name, newHashSet(actions), emptyMap()));
return ApplicationPrivilegeTests.createPrivilege(app, name, actions);
}
private PrivilegesCheckResult hasPrivileges(
IndicesPrivileges indicesPrivileges,
AuthorizationInfo authorizationInfo,
List<ApplicationPrivilegeDescriptor> applicationPrivilegeDescriptors,
String... clusterPrivileges
) throws Exception {
return hasPrivileges(
new IndicesPrivileges[] { indicesPrivileges },
new ApplicationResourcePrivileges[0],
authorizationInfo,
applicationPrivilegeDescriptors,
clusterPrivileges
);
}
private PrivilegesCheckResult hasPrivileges(
IndicesPrivileges[] indicesPrivileges,
ApplicationResourcePrivileges[] appPrivileges,
AuthorizationInfo authorizationInfo,
List<ApplicationPrivilegeDescriptor> applicationPrivilegeDescriptors,
String... clusterPrivileges
) throws Exception {
final PlainActionFuture<PrivilegesCheckResult> future = new PlainActionFuture<>();
final PlainActionFuture<PrivilegesCheckResult> future2 = new PlainActionFuture<>();
final PrivilegesToCheck privilegesToCheck = new PrivilegesToCheck(
clusterPrivileges,
indicesPrivileges,
appPrivileges,
randomBoolean()
);
engine.checkPrivileges(authorizationInfo, privilegesToCheck, applicationPrivilegeDescriptors, future);
// flip the "runDetailedCheck" flag
engine.checkPrivileges(
authorizationInfo,
new PrivilegesToCheck(
privilegesToCheck.cluster(),
privilegesToCheck.index(),
privilegesToCheck.application(),
false == privilegesToCheck.runDetailedCheck()
),
applicationPrivilegeDescriptors,
future2
);
final PrivilegesCheckResult privilegesCheckResult = future.get();
assertThat(privilegesCheckResult, notNullValue());
final PrivilegesCheckResult privilegesCheckResult2 = future2.get();
assertThat(privilegesCheckResult2, notNullValue());
// same result independent of the "runDetailedCheck" flag
assertThat(privilegesCheckResult.allChecksSuccess(), is(privilegesCheckResult2.allChecksSuccess()));
if (privilegesToCheck.runDetailedCheck()) {
assertThat(privilegesCheckResult.getDetails(), notNullValue());
assertThat(privilegesCheckResult2.getDetails(), nullValue());
return privilegesCheckResult;
} else {
assertThat(privilegesCheckResult.getDetails(), nullValue());
assertThat(privilegesCheckResult2.getDetails(), notNullValue());
return privilegesCheckResult2;
}
}
private BytesArray randomDlsQuery() {
return new BytesArray(
"{ \"term\": { \"" + randomAlphaOfLengthBetween(3, 24) + "\" : \"" + randomAlphaOfLengthBetween(3, 24) + "\" }"
);
}
private FieldPermissionsDefinition.FieldGrantExcludeGroup randomFieldGrantExcludeGroup() {
return new FieldPermissionsDefinition.FieldGrantExcludeGroup(generateRandomStringArray(3, 10, false, false), new String[] {});
}
private Role createSimpleRoleWithRemoteIndices(final RemoteIndicesPermission remoteIndicesPermission) {
final String[] roleNames = generateRandomStringArray(3, 10, false, false);
Role.Builder roleBuilder = Role.builder(new RestrictedIndices(Automatons.EMPTY), roleNames);
remoteIndicesPermission.remoteIndicesGroups().forEach(group -> {
group.indicesPermissionGroups()
.forEach(
p -> roleBuilder.addRemoteIndicesGroup(
group.remoteClusterAliases(),
p.getFieldPermissions(),
p.getQuery(),
p.privilege(),
p.allowRestrictedIndices(),
p.indices()
)
);
});
return roleBuilder.build();
}
}
| RBACEngineTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/visitor/functions/OneParamFunctions.java | {
"start": 1130,
"end": 5372
} | class ____ implements Function {
public static final OneParamFunctions instance = new OneParamFunctions();
public Object eval(SQLEvalVisitor visitor, SQLMethodInvokeExpr x) {
if (x.getArguments().isEmpty()) {
return SQLEvalVisitor.EVAL_ERROR;
}
SQLExpr param = x.getArguments().get(0);
param.accept(visitor);
Object paramValue = param.getAttributes().get(EVAL_VALUE);
if (paramValue == null) {
return SQLEvalVisitor.EVAL_ERROR;
}
if (paramValue == EVAL_VALUE_NULL) {
return EVAL_VALUE_NULL;
}
String method = x.getMethodName();
if ("md5".equalsIgnoreCase(method)) {
String text = paramValue.toString();
return Utils.md5(text);
}
if ("bit_count".equalsIgnoreCase(method)) {
if (paramValue instanceof BigInteger) {
return ((BigInteger) paramValue).bitCount();
}
if (paramValue instanceof BigDecimal) {
BigDecimal decimal = (BigDecimal) paramValue;
BigInteger bigInt = decimal.setScale(0, BigDecimal.ROUND_HALF_UP).toBigInteger();
return bigInt.bitCount();
}
Long val = SQLEvalVisitorUtils.castToLong(paramValue);
return Long.bitCount(val);
}
if ("soundex".equalsIgnoreCase(method)) {
String text = paramValue.toString();
return soundex(text);
}
if ("space".equalsIgnoreCase(method)) {
int intVal = SQLEvalVisitorUtils.castToInteger(paramValue);
char[] chars = new char[intVal];
for (int i = 0; i < chars.length; ++i) {
chars[i] = ' ';
}
return new String(chars);
}
throw new UnsupportedOperationException(method);
}
public static String soundex(String str) {
if (str == null) {
return null;
}
str = clean(str);
if (str.length() == 0) {
return str;
}
char[] out = {'0', '0', '0', '0'};
char last, mapped;
int incount = 1, count = 1;
out[0] = str.charAt(0);
// getMappingCode() throws IllegalArgumentException
last = getMappingCode(str, 0);
while ((incount < str.length()) && (count < out.length)) {
mapped = getMappingCode(str, incount++);
if (mapped != 0) {
if ((mapped != '0') && (mapped != last)) {
out[count++] = mapped;
}
last = mapped;
}
}
return new String(out);
}
static String clean(String str) {
if (str == null || str.length() == 0) {
return str;
}
int len = str.length();
char[] chars = new char[len];
int count = 0;
for (int i = 0; i < len; i++) {
if (Character.isLetter(str.charAt(i))) {
chars[count++] = str.charAt(i);
}
}
if (count == len) {
return str.toUpperCase(java.util.Locale.ENGLISH);
}
return new String(chars, 0, count).toUpperCase(java.util.Locale.ENGLISH);
}
private static char getMappingCode(String str, int index) {
// map() throws IllegalArgumentException
char mappedChar = map(str.charAt(index));
// HW rule check
if (index > 1 && mappedChar != '0') {
char hwChar = str.charAt(index - 1);
if ('H' == hwChar || 'W' == hwChar) {
char preHWChar = str.charAt(index - 2);
char firstCode = map(preHWChar);
if (firstCode == mappedChar || 'H' == preHWChar || 'W' == preHWChar) {
return 0;
}
}
}
return mappedChar;
}
private static char map(char ch) {
String soundexMapping = "01230120022455012623010202";
int index = ch - 'A';
if (index < 0 || index >= soundexMapping.length()) {
throw new IllegalArgumentException("The character is not mapped: " + ch);
}
return soundexMapping.charAt(index);
}
}
| OneParamFunctions |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DebeziumSqlserverEndpointBuilderFactory.java | {
"start": 5028,
"end": 5706
} | class ____ should be used to serialize and deserialize
* value data for offsets. The default is JSON converter.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.connect.json.JsonConverter
* Group: consumer
*
* @param internalValueConverter the value to set
* @return the dsl builder
*/
default DebeziumSqlserverEndpointBuilder internalValueConverter(String internalValueConverter) {
doSetProperty("internalValueConverter", internalValueConverter);
return this;
}
/**
* The name of the Java | that |
java | google__guava | guava-tests/test/com/google/common/collect/ImmutableListMultimapTest.java | {
"start": 6788,
"end": 26485
} | class ____ {
@Nullable String string;
}
public void testBuilder_withMutableEntry() {
ImmutableListMultimap.Builder<String, Integer> builder = new Builder<>();
StringHolder holder = new StringHolder();
holder.string = "one";
Entry<String, Integer> entry =
new AbstractMapEntry<String, Integer>() {
@Override
public String getKey() {
return holder.string;
}
@Override
public Integer getValue() {
return 1;
}
};
builder.put(entry);
holder.string = "two";
assertEquals(Arrays.asList(1), builder.build().get("one"));
}
public void testBuilderPutAllIterable() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll("foo", Arrays.asList(1, 2, 3));
builder.putAll("bar", Arrays.asList(4, 5));
builder.putAll("foo", Arrays.asList(6, 7));
Multimap<String, Integer> multimap = builder.build();
assertThat(multimap.get("foo")).containsExactly(1, 2, 3, 6, 7).inOrder();
assertThat(multimap.get("bar")).containsExactly(4, 5).inOrder();
assertEquals(7, multimap.size());
}
public void testBuilderPutAllVarargs() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.putAll("foo", 6, 7);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.get("foo")).containsExactly(1, 2, 3, 6, 7).inOrder();
assertThat(multimap.get("bar")).containsExactly(4, 5).inOrder();
assertEquals(7, multimap.size());
}
public void testBuilderPutAllMultimap() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", 1);
toPut.put("bar", 4);
toPut.put("foo", 2);
toPut.put("foo", 3);
Multimap<String, Integer> moreToPut = LinkedListMultimap.create();
moreToPut.put("foo", 6);
moreToPut.put("bar", 5);
moreToPut.put("foo", 7);
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll(toPut);
builder.putAll(moreToPut);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.get("foo")).containsExactly(1, 2, 3, 6, 7).inOrder();
assertThat(multimap.get("bar")).containsExactly(4, 5).inOrder();
assertEquals(7, multimap.size());
}
public void testBuilderPutAllWithDuplicates() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.putAll("foo", 1, 6, 7);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertEquals(Arrays.asList(1, 2, 3, 1, 6, 7), multimap.get("foo"));
assertEquals(Arrays.asList(4, 5), multimap.get("bar"));
assertEquals(8, multimap.size());
}
public void testBuilderPutWithDuplicates() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll("foo", 1, 2, 3);
builder.putAll("bar", 4, 5);
builder.put("foo", 1);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertEquals(Arrays.asList(1, 2, 3, 1), multimap.get("foo"));
assertEquals(Arrays.asList(4, 5), multimap.get("bar"));
assertEquals(6, multimap.size());
}
public void testBuilderPutAllMultimapWithDuplicates() {
Multimap<String, Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", 1);
toPut.put("bar", 4);
toPut.put("foo", 2);
toPut.put("foo", 1);
toPut.put("bar", 5);
Multimap<String, Integer> moreToPut = LinkedListMultimap.create();
moreToPut.put("foo", 6);
moreToPut.put("bar", 4);
moreToPut.put("foo", 7);
moreToPut.put("foo", 2);
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.putAll(toPut);
builder.putAll(moreToPut);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.get("foo")).containsExactly(1, 2, 1, 6, 7, 2).inOrder();
assertThat(multimap.get("bar")).containsExactly(4, 5, 4).inOrder();
assertEquals(9, multimap.size());
}
public void testBuilderPutNullKey() {
Multimap<@Nullable String, Integer> toPut = LinkedListMultimap.create();
toPut.put(null, 1);
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
assertThrows(NullPointerException.class, () -> builder.put(null, 1));
assertThrows(NullPointerException.class, () -> builder.putAll(null, Arrays.asList(1, 2, 3)));
assertThrows(NullPointerException.class, () -> builder.putAll(null, 1, 2, 3));
assertThrows(
NullPointerException.class, () -> builder.putAll((Multimap<String, Integer>) toPut));
}
public void testBuilderPutNullValue() {
Multimap<String, @Nullable Integer> toPut = LinkedListMultimap.create();
toPut.put("foo", null);
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
assertThrows(NullPointerException.class, () -> builder.put("foo", null));
assertThrows(
NullPointerException.class, () -> builder.putAll("foo", Arrays.asList(1, null, 3)));
assertThrows(NullPointerException.class, () -> builder.putAll("foo", 1, null, 3));
assertThrows(
NullPointerException.class, () -> builder.putAll((Multimap<String, Integer>) toPut));
}
public void testBuilderOrderKeysBy() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.keySet()).containsExactly("d", "c", "b", "a").inOrder();
assertThat(multimap.values()).containsExactly(2, 4, 3, 6, 5, 2).inOrder();
assertThat(multimap.get("a")).containsExactly(5, 2).inOrder();
assertThat(multimap.get("b")).containsExactly(3, 6).inOrder();
}
public void testBuilderOrderKeysByDuplicates() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.put("bb", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(
new Ordering<String>() {
@Override
public int compare(String left, String right) {
return left.length() - right.length();
}
});
builder.put("cc", 4);
builder.put("a", 2);
builder.put("bb", 6);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.keySet()).containsExactly("d", "a", "bb", "cc").inOrder();
assertThat(multimap.values()).containsExactly(2, 5, 2, 3, 6, 4).inOrder();
assertThat(multimap.get("a")).containsExactly(5, 2).inOrder();
assertThat(multimap.get("bb")).containsExactly(3, 6).inOrder();
}
public void testBuilderOrderValuesBy() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderValuesBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.keySet()).containsExactly("b", "d", "a", "c").inOrder();
assertThat(multimap.values()).containsExactly(6, 3, 2, 5, 2, 4).inOrder();
assertThat(multimap.get("a")).containsExactly(5, 2).inOrder();
assertThat(multimap.get("b")).containsExactly(6, 3).inOrder();
}
public void testBuilderOrderKeysAndValuesBy() {
ImmutableListMultimap.Builder<String, Integer> builder = ImmutableListMultimap.builder();
builder.put("b", 3);
builder.put("d", 2);
builder.put("a", 5);
builder.orderKeysBy(Collections.reverseOrder());
builder.orderValuesBy(Collections.reverseOrder());
builder.put("c", 4);
builder.put("a", 2);
builder.put("b", 6);
ImmutableListMultimap<String, Integer> multimap = builder.build();
assertThat(multimap.keySet()).containsExactly("d", "c", "b", "a").inOrder();
assertThat(multimap.values()).containsExactly(2, 4, 6, 3, 5, 2).inOrder();
assertThat(multimap.get("a")).containsExactly(5, 2).inOrder();
assertThat(multimap.get("b")).containsExactly(6, 3).inOrder();
}
public void testCopyOf() {
ArrayListMultimap<String, Integer> input = ArrayListMultimap.create();
input.put("foo", 1);
input.put("bar", 2);
input.put("foo", 3);
ImmutableListMultimap<String, Integer> multimap = ImmutableListMultimap.copyOf(input);
new EqualsTester().addEqualityGroup(input, multimap).testEquals();
}
public void testCopyOfWithDuplicates() {
ArrayListMultimap<String, Integer> input = ArrayListMultimap.create();
input.put("foo", 1);
input.put("bar", 2);
input.put("foo", 3);
input.put("foo", 1);
ImmutableListMultimap<String, Integer> multimap = ImmutableListMultimap.copyOf(input);
new EqualsTester().addEqualityGroup(input, multimap).testEquals();
}
public void testCopyOfEmpty() {
ArrayListMultimap<String, Integer> input = ArrayListMultimap.create();
ImmutableListMultimap<String, Integer> multimap = ImmutableListMultimap.copyOf(input);
new EqualsTester().addEqualityGroup(input, multimap).testEquals();
}
public void testCopyOfImmutableListMultimap() {
Multimap<String, Integer> multimap = createMultimap();
assertSame(multimap, ImmutableListMultimap.copyOf(multimap));
}
public void testCopyOfNullKey() {
ArrayListMultimap<@Nullable String, Integer> input = ArrayListMultimap.create();
input.put(null, 1);
assertThrows(
NullPointerException.class,
() -> ImmutableListMultimap.copyOf((ArrayListMultimap<String, Integer>) input));
}
public void testCopyOfNullValue() {
ArrayListMultimap<String, @Nullable Integer> input = ArrayListMultimap.create();
input.putAll("foo", Arrays.<@Nullable Integer>asList(1, null, 3));
assertThrows(
NullPointerException.class,
() -> ImmutableListMultimap.copyOf((ArrayListMultimap<String, Integer>) input));
}
public void testToImmutableListMultimap() {
Collector<Entry<String, Integer>, ?, ImmutableListMultimap<String, Integer>> collector =
toImmutableListMultimap(Entry::getKey, Entry::getValue);
BiPredicate<ImmutableListMultimap<?, ?>, ImmutableListMultimap<?, ?>> equivalence =
Equivalence.equals()
.onResultOf((ImmutableListMultimap<?, ?> mm) -> mm.asMap().entrySet().asList())
.and(Equivalence.equals());
CollectorTester.of(collector, equivalence)
.expectCollects(ImmutableListMultimap.of())
.expectCollects(
ImmutableListMultimap.of("a", 1, "b", 2, "a", 3, "c", 4),
mapEntry("a", 1),
mapEntry("b", 2),
mapEntry("a", 3),
mapEntry("c", 4));
}
public void testFlatteningToImmutableListMultimap() {
Collector<String, ?, ImmutableListMultimap<Character, Character>> collector =
flatteningToImmutableListMultimap(
str -> str.charAt(0), str -> Chars.asList(str.substring(1).toCharArray()).stream());
BiPredicate<Multimap<?, ?>, Multimap<?, ?>> equivalence =
Equivalence.equals()
.onResultOf((Multimap<?, ?> mm) -> ImmutableList.copyOf(mm.asMap().entrySet()))
.and(Equivalence.equals());
ImmutableListMultimap<Character, Character> empty = ImmutableListMultimap.of();
ImmutableListMultimap<Character, Character> filled =
ImmutableListMultimap.<Character, Character>builder()
.putAll('b', Arrays.asList('a', 'n', 'a', 'n', 'a'))
.putAll('a', Arrays.asList('p', 'p', 'l', 'e'))
.putAll('c', Arrays.asList('a', 'r', 'r', 'o', 't'))
.putAll('a', Arrays.asList('s', 'p', 'a', 'r', 'a', 'g', 'u', 's'))
.putAll('c', Arrays.asList('h', 'e', 'r', 'r', 'y'))
.build();
CollectorTester.of(collector, equivalence)
.expectCollects(empty)
.expectCollects(filled, "banana", "apple", "carrot", "asparagus", "cherry");
}
public void testEmptyMultimapReads() {
ImmutableListMultimap<String, Integer> multimap = ImmutableListMultimap.of();
assertFalse(multimap.containsKey("foo"));
assertFalse(multimap.containsValue(1));
assertFalse(multimap.containsEntry("foo", 1));
assertTrue(multimap.entries().isEmpty());
assertTrue(multimap.equals(ArrayListMultimap.create()));
assertEquals(emptyList(), multimap.get("foo"));
assertEquals(0, multimap.hashCode());
assertTrue(multimap.isEmpty());
assertEquals(HashMultiset.create(), multimap.keys());
assertEquals(emptySet(), multimap.keySet());
assertEquals(0, multimap.size());
assertTrue(multimap.values().isEmpty());
assertEquals("{}", multimap.toString());
}
public void testEmptyMultimapWrites() {
Multimap<String, Integer> multimap = ImmutableListMultimap.of();
UnmodifiableCollectionTests.assertMultimapIsUnmodifiable(multimap, "foo", 1);
}
private Multimap<String, Integer> createMultimap() {
return ImmutableListMultimap.<String, Integer>builder()
.put("foo", 1)
.put("bar", 2)
.put("foo", 3)
.build();
}
public void testMultimapReads() {
Multimap<String, Integer> multimap = createMultimap();
assertTrue(multimap.containsKey("foo"));
assertFalse(multimap.containsKey("cat"));
assertTrue(multimap.containsValue(1));
assertFalse(multimap.containsValue(5));
assertTrue(multimap.containsEntry("foo", 1));
assertFalse(multimap.containsEntry("cat", 1));
assertFalse(multimap.containsEntry("foo", 5));
assertFalse(multimap.entries().isEmpty());
assertEquals(3, multimap.size());
assertFalse(multimap.isEmpty());
assertEquals("{foo=[1, 3], bar=[2]}", multimap.toString());
}
public void testMultimapWrites() {
Multimap<String, Integer> multimap = createMultimap();
UnmodifiableCollectionTests.assertMultimapIsUnmodifiable(multimap, "bar", 2);
}
public void testMultimapEquals() {
Multimap<String, Integer> multimap = createMultimap();
Multimap<String, Integer> arrayListMultimap = ArrayListMultimap.create();
arrayListMultimap.putAll("foo", Arrays.asList(1, 3));
arrayListMultimap.put("bar", 2);
new EqualsTester()
.addEqualityGroup(
multimap,
createMultimap(),
arrayListMultimap,
ImmutableListMultimap.<String, Integer>builder()
.put("bar", 2)
.put("foo", 1)
.put("foo", 3)
.build())
.addEqualityGroup(
ImmutableListMultimap.<String, Integer>builder()
.put("bar", 2)
.put("foo", 3)
.put("foo", 1)
.build())
.addEqualityGroup(
ImmutableListMultimap.<String, Integer>builder()
.put("foo", 2)
.put("foo", 3)
.put("foo", 1)
.build())
.addEqualityGroup(
ImmutableListMultimap.<String, Integer>builder().put("bar", 2).put("foo", 3).build())
.testEquals();
}
public void testOf() {
assertMultimapEquals(ImmutableListMultimap.of("one", 1), "one", 1);
assertMultimapEquals(ImmutableListMultimap.of("one", 1, "two", 2), "one", 1, "two", 2);
assertMultimapEquals(
ImmutableListMultimap.of("one", 1, "two", 2, "three", 3), "one", 1, "two", 2, "three", 3);
assertMultimapEquals(
ImmutableListMultimap.of("one", 1, "two", 2, "three", 3, "four", 4),
"one",
1,
"two",
2,
"three",
3,
"four",
4);
assertMultimapEquals(
ImmutableListMultimap.of("one", 1, "two", 2, "three", 3, "four", 4, "five", 5),
"one",
1,
"two",
2,
"three",
3,
"four",
4,
"five",
5);
}
public void testInverse() {
assertEquals(
ImmutableListMultimap.<Integer, String>of(),
ImmutableListMultimap.<String, Integer>of().inverse());
assertEquals(ImmutableListMultimap.of(1, "one"), ImmutableListMultimap.of("one", 1).inverse());
assertEquals(
ImmutableListMultimap.of(1, "one", 2, "two"),
ImmutableListMultimap.of("one", 1, "two", 2).inverse());
assertEquals(
ImmutableListMultimap.of("of", 'o', "of", 'f', "to", 't', "to", 'o').inverse(),
ImmutableListMultimap.of('o', "of", 'f', "of", 't', "to", 'o', "to"));
assertEquals(
ImmutableListMultimap.of('f', "foo", 'o', "foo", 'o', "foo"),
ImmutableListMultimap.of("foo", 'f', "foo", 'o', "foo", 'o').inverse());
}
public void testNotDistinctEntrySpliterator() {
ImmutableListMultimap<String, String> multimap =
ImmutableListMultimap.of("foo", "bar", "foo", "bar");
assertThat(multimap.entries().spliterator().characteristics() & Spliterator.DISTINCT)
.isEqualTo(0);
}
public void testInverseMinimizesWork() {
ImmutableListMultimap<String, Character> multimap =
ImmutableListMultimap.<String, Character>builder()
.put("foo", 'f')
.put("foo", 'o')
.put("foo", 'o')
.put("poo", 'p')
.put("poo", 'o')
.put("poo", 'o')
.build();
assertSame(multimap.inverse(), multimap.inverse());
assertSame(multimap, multimap.inverse().inverse());
}
private static <K, V> void assertMultimapEquals(
Multimap<K, V> multimap, Object... alternatingKeysAndValues) {
assertEquals(multimap.size(), alternatingKeysAndValues.length / 2);
int i = 0;
for (Entry<K, V> entry : multimap.entries()) {
assertEquals(alternatingKeysAndValues[i++], entry.getKey());
assertEquals(alternatingKeysAndValues[i++], entry.getValue());
}
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testSerialization() {
Multimap<String, Integer> multimap = createMultimap();
SerializableTester.reserializeAndAssert(multimap);
assertEquals(multimap.size(), SerializableTester.reserialize(multimap).size());
SerializableTester.reserializeAndAssert(multimap.get("foo"));
LenientSerializableTester.reserializeAndAssertLenient(multimap.keySet());
LenientSerializableTester.reserializeAndAssertLenient(multimap.keys());
SerializableTester.reserializeAndAssert(multimap.asMap());
Collection<Integer> valuesCopy = SerializableTester.reserialize(multimap.values());
assertEquals(HashMultiset.create(multimap.values()), HashMultiset.create(valuesCopy));
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testEmptySerialization() {
Multimap<String, Integer> multimap = ImmutableListMultimap.of();
assertSame(multimap, SerializableTester.reserialize(multimap));
}
@J2ktIncompatible
@GwtIncompatible // reflection
public void testNulls() throws Exception {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(ImmutableListMultimap.class);
tester.ignore(ImmutableListMultimap.class.getMethod("get", Object.class));
tester.testAllPublicInstanceMethods(ImmutableListMultimap.of());
tester.testAllPublicInstanceMethods(ImmutableListMultimap.of("a", 1));
}
}
| StringHolder |
java | quarkusio__quarkus | integration-tests/kubernetes-client/src/test/java/io/quarkus/it/kubernetes/client/KubernetesTestServerTest.java | {
"start": 538,
"end": 854
} | class ____ no native-image test because it relies on setting config overrides that clash
* with native image build config.
*/
@TestProfile(KubernetesTestServerTest.MyProfile.class)
@WithKubernetesTestServer(https = false, crud = true, port = 10001, setup = KubernetesTestServerTest.Setup.class)
@QuarkusTest
public | has |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/TestClassInheritanceTests.java | {
"start": 7664,
"end": 8080
} | class ____ extends TestCase2 {
@BeforeAll
static void beforeAll3() {
callSequence.add("beforeAll3");
}
@BeforeEach
void beforeEach3() {
callSequence.add("beforeEach3");
}
@Test
void test3() {
callSequence.add("test3");
}
@AfterEach
void afterEach3() {
callSequence.add("afterEach3");
}
@AfterAll
static void afterAll3() {
callSequence.add("afterAll3");
}
}
}
| TestCase3 |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigPhase.java | {
"start": 48,
"end": 1434
} | enum ____ {
/**
* Values are read and available for usage at build time.
*/
BUILD_TIME(true, false, false, "Build time"),
/**
* Values are read and available for usage at build time, and available on a read-only basis at run time.
*/
BUILD_AND_RUN_TIME_FIXED(true, true, false, "Build time and run time fixed"),
/**
* Values are read and available for usage at run time and are re-read on each program execution.
*/
RUN_TIME(false, true, true, "Run time"),
;
private final boolean availableAtBuild;
private final boolean availableAtRun;
private final boolean readAtMain;
private final String name;
ConfigPhase(final boolean availableAtBuild, final boolean availableAtRun, final boolean readAtMain, final String name) {
this.availableAtBuild = availableAtBuild;
this.availableAtRun = availableAtRun;
this.readAtMain = readAtMain;
this.name = name;
}
public boolean isAvailableAtBuild() {
return availableAtBuild;
}
public boolean isAvailableAtRun() {
return availableAtRun;
}
public boolean isReadAtStaticInit() {
return isAvailableAtBuild() && isAvailableAtRun();
}
public boolean isReadAtMain() {
return readAtMain;
}
@Override
public String toString() {
return name;
}
}
| ConfigPhase |
java | google__auto | common/src/test/java/com/google/auto/common/BasicAnnotationProcessorTest.java | {
"start": 20598,
"end": 21144
} | class ____<@"
+ TypeParameterRequiresGeneratedCode.class.getCanonicalName()
+ " T> {",
" @" + AnAnnotation.class.getCanonicalName(),
" public void method() {}",
"}");
requiresGeneratedCodeRejectionTest(classAFileObject);
}
@Test
public void properlyDefersProcessing_rejectsArgumentElement() {
JavaFileObject classAFileObject =
JavaFileObjects.forSourceLines(
"test.ClassA",
"package test;",
"",
"public | ClassA |
java | apache__camel | components/camel-opentelemetry/src/main/java/org/apache/camel/opentelemetry/OpenTelemetryInstrumentedThreadFactoryListener.java | {
"start": 1107,
"end": 1405
} | class ____ implements ExecutorServiceManager.ThreadFactoryListener {
@Override
public ThreadFactory onNewThreadFactory(Object source, ThreadFactory factory) {
return runnable -> factory.newThread(Context.current().wrap(runnable));
}
}
| OpenTelemetryInstrumentedThreadFactoryListener |
java | apache__flink | flink-formats/flink-json/src/main/java/org/apache/flink/formats/json/JsonFormatOptionsUtil.java | {
"start": 1639,
"end": 6582
} | class ____ {
// --------------------------------------------------------------------------------------------
// Option enumerations
// --------------------------------------------------------------------------------------------
public static final String SQL = "SQL";
public static final String ISO_8601 = "ISO-8601";
public static final Set<String> TIMESTAMP_FORMAT_ENUM =
new HashSet<>(Arrays.asList(SQL, ISO_8601));
// The handling mode of null key for map data
public static final String JSON_MAP_NULL_KEY_MODE_FAIL = "FAIL";
public static final String JSON_MAP_NULL_KEY_MODE_DROP = "DROP";
public static final String JSON_MAP_NULL_KEY_MODE_LITERAL = "LITERAL";
// --------------------------------------------------------------------------------------------
// Utilities
// --------------------------------------------------------------------------------------------
public static TimestampFormat getTimestampFormat(ReadableConfig config) {
String timestampFormat = config.get(TIMESTAMP_FORMAT);
switch (timestampFormat) {
case SQL:
return TimestampFormat.SQL;
case ISO_8601:
return TimestampFormat.ISO_8601;
default:
throw new TableException(
String.format(
"Unsupported timestamp format '%s'. Validator should have checked that.",
timestampFormat));
}
}
/**
* Creates handling mode for null key map data.
*
* <p>See {@link #JSON_MAP_NULL_KEY_MODE_FAIL}, {@link #JSON_MAP_NULL_KEY_MODE_DROP}, and {@link
* #JSON_MAP_NULL_KEY_MODE_LITERAL} for more information.
*/
public static JsonFormatOptions.MapNullKeyMode getMapNullKeyMode(ReadableConfig config) {
String mapNullKeyMode = config.get(MAP_NULL_KEY_MODE);
switch (mapNullKeyMode.toUpperCase()) {
case JSON_MAP_NULL_KEY_MODE_FAIL:
return JsonFormatOptions.MapNullKeyMode.FAIL;
case JSON_MAP_NULL_KEY_MODE_DROP:
return JsonFormatOptions.MapNullKeyMode.DROP;
case JSON_MAP_NULL_KEY_MODE_LITERAL:
return JsonFormatOptions.MapNullKeyMode.LITERAL;
default:
throw new TableException(
String.format(
"Unsupported map null key handling mode '%s'. Validator should have checked that.",
mapNullKeyMode));
}
}
// --------------------------------------------------------------------------------------------
// Validation
// --------------------------------------------------------------------------------------------
/** Validator for json decoding format. */
public static void validateDecodingFormatOptions(ReadableConfig tableOptions) {
boolean failOnMissingField = tableOptions.get(FAIL_ON_MISSING_FIELD);
boolean ignoreParseErrors = tableOptions.get(IGNORE_PARSE_ERRORS);
if (ignoreParseErrors && failOnMissingField) {
throw new ValidationException(
FAIL_ON_MISSING_FIELD.key()
+ " and "
+ IGNORE_PARSE_ERRORS.key()
+ " shouldn't both be true.");
}
validateTimestampFormat(tableOptions);
}
/** Validator for json encoding format. */
public static void validateEncodingFormatOptions(ReadableConfig tableOptions) {
// validator for {@link MAP_NULL_KEY_MODE}
Set<String> nullKeyModes =
Arrays.stream(JsonFormatOptions.MapNullKeyMode.values())
.map(Objects::toString)
.collect(Collectors.toSet());
if (!nullKeyModes.contains(tableOptions.get(MAP_NULL_KEY_MODE).toUpperCase())) {
throw new ValidationException(
String.format(
"Unsupported value '%s' for option %s. Supported values are %s.",
tableOptions.get(MAP_NULL_KEY_MODE),
MAP_NULL_KEY_MODE.key(),
nullKeyModes));
}
validateTimestampFormat(tableOptions);
}
/** Validates timestamp format which value should be SQL or ISO-8601. */
static void validateTimestampFormat(ReadableConfig tableOptions) {
String timestampFormat = tableOptions.get(TIMESTAMP_FORMAT);
if (!TIMESTAMP_FORMAT_ENUM.contains(timestampFormat)) {
throw new ValidationException(
String.format(
"Unsupported value '%s' for %s. Supported values are [SQL, ISO-8601].",
timestampFormat, TIMESTAMP_FORMAT.key()));
}
}
private JsonFormatOptionsUtil() {}
}
| JsonFormatOptionsUtil |
java | apache__avro | lang/java/tools/src/test/compiler/output/OptionalGettersNullableFieldsTest.java | {
"start": 467,
"end": 2266
} | class ____ extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = -6919829133416680993L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"OptionalGettersNullableFieldsTest\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"Test that optional getters are created only for nullable fields\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"nullable_name\",\"type\":[\"string\",\"null\"]},{\"name\":\"favorite_number\",\"type\":[\"int\"]},{\"name\":\"nullable_favorite_number\",\"type\":[\"int\",\"null\"]},{\"name\":\"nullable_array\",\"type\":[{\"type\":\"array\",\"items\":\"string\"},\"null\"]}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<OptionalGettersNullableFieldsTest> ENCODER =
new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<OptionalGettersNullableFieldsTest> DECODER =
new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder<OptionalGettersNullableFieldsTest> getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<OptionalGettersNullableFieldsTest> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this | OptionalGettersNullableFieldsTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/util/Sets_newLinkedHashSet_GenericArray_Test.java | {
"start": 888,
"end": 1485
} | class ____ {
@Test
void should_return_Set_containing_all_elements_in_array() {
String[] expected = { "One", "Two" };
LinkedHashSet<String> set = Sets.newLinkedHashSet(expected);
assertThat(set.toArray()).isEqualTo(expected);
}
@Test
void should_return_null_if_array_is_null() {
Object[] elements = null;
assertThat(Sets.newLinkedHashSet(elements)).isNull();
}
@Test
void should_return_empty_Set_if_array_is_empty() {
LinkedHashSet<Object> set = Sets.newLinkedHashSet(new Object[0]);
assertThat(set).isEmpty();
}
}
| Sets_newLinkedHashSet_GenericArray_Test |
java | micronaut-projects__micronaut-core | http-server/src/main/java/io/micronaut/http/server/exceptions/ErrorResponseProcessorExceptionHandler.java | {
"start": 1159,
"end": 2003
} | class ____<T extends Throwable> implements ExceptionHandler<T, HttpResponse<?>> {
protected final ErrorResponseProcessor<?> responseProcessor;
/**
* Constructor.
*
* @param responseProcessor Error Response Processor
*/
protected ErrorResponseProcessorExceptionHandler(ErrorResponseProcessor<?> responseProcessor) {
this.responseProcessor = responseProcessor;
}
@Override
public HttpResponse<?> handle(HttpRequest request, T exception) {
return responseProcessor.processResponse(ErrorContext.builder(request)
.cause(exception)
.errorMessage(exception.getMessage())
.build(), createResponse(exception));
}
@NonNull
protected abstract MutableHttpResponse<?> createResponse(T exception);
}
| ErrorResponseProcessorExceptionHandler |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/server/standard/SpringConfiguratorTests.java | {
"start": 2956,
"end": 3183
} | class ____ {
@Bean
EchoEndpoint javaConfigEndpoint() {
return new EchoEndpoint(echoService());
}
@Bean
EchoService echoService() {
return new EchoService();
}
}
@ServerEndpoint("/echo")
private static | Config |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/test/java/org/springframework/security/saml2/provider/service/web/Saml2WebSsoAuthenticationRequestFilterTests.java | {
"start": 2568,
"end": 12065
} | class ____ {
private static final String IDP_SSO_URL = "https://sso-url.example.com/IDP/SSO";
private Saml2WebSsoAuthenticationRequestFilter filter;
private RelyingPartyRegistrationRepository repository = mock(RelyingPartyRegistrationRepository.class);
private Saml2AuthenticationRequestResolver authenticationRequestResolver = mock(
Saml2AuthenticationRequestResolver.class);
private Saml2AuthenticationRequestRepository<AbstractSaml2AuthenticationRequest> authenticationRequestRepository = mock(
Saml2AuthenticationRequestRepository.class);
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private MockFilterChain filterChain;
private RelyingPartyRegistration.Builder rpBuilder;
@BeforeEach
public void setup() {
this.filter = new Saml2WebSsoAuthenticationRequestFilter(this.authenticationRequestResolver);
this.request = new MockHttpServletRequest();
this.response = new MockHttpServletResponse();
this.request.setPathInfo("/saml2/authenticate/registration-id");
this.filterChain = new MockFilterChain() {
@Override
public void doFilter(ServletRequest request, ServletResponse response) {
((HttpServletResponse) response).setStatus(HttpServletResponse.SC_UNAUTHORIZED);
}
};
this.rpBuilder = RelyingPartyRegistration.withRegistrationId("registration-id")
.assertingPartyMetadata((c) -> c.entityId("idp-entity-id"))
.assertingPartyMetadata((c) -> c.singleSignOnServiceLocation(IDP_SSO_URL))
.assertionConsumerServiceLocation("template")
.signingX509Credentials((c) -> c.add(TestSaml2X509Credentials.assertingPartyPrivateCredential()))
.decryptionX509Credentials((c) -> c.add(TestSaml2X509Credentials.assertingPartyPrivateCredential()));
this.filter.setAuthenticationRequestRepository(this.authenticationRequestRepository);
}
@Test
public void doFilterWhenNoRelayStateThenRedirectDoesNotContainParameter() throws ServletException, IOException {
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).doesNotContain("RelayState=").startsWith(IDP_SSO_URL);
}
private static Saml2RedirectAuthenticationRequest.Builder redirectAuthenticationRequest() {
return Saml2RedirectAuthenticationRequest
.withRelyingPartyRegistration(TestRelyingPartyRegistrations.relyingPartyRegistration().build())
.samlRequest("request")
.authenticationRequestUri(IDP_SSO_URL);
}
private static Saml2RedirectAuthenticationRequest.Builder redirectAuthenticationRequest(
RelyingPartyRegistration registration) {
return Saml2RedirectAuthenticationRequest.withRelyingPartyRegistration(registration)
.samlRequest("request")
.authenticationRequestUri(IDP_SSO_URL);
}
private static Saml2PostAuthenticationRequest.Builder postAuthenticationRequest() {
return Saml2PostAuthenticationRequest
.withRelyingPartyRegistration(TestRelyingPartyRegistrations.relyingPartyRegistration().build())
.samlRequest("request")
.authenticationRequestUri(IDP_SSO_URL);
}
@Test
public void doFilterWhenRelayStateThenRedirectDoesContainParameter() throws ServletException, IOException {
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().relayState("relayState").build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).contains("RelayState=relayState").startsWith(IDP_SSO_URL);
}
@Test
public void doFilterWhenRelayStateThatRequiresEncodingThenRedirectDoesContainsEncodedParameter() throws Exception {
String relayStateValue = "https://my-relay-state.example.com?with=param&other=param";
String relayStateEncoded = UriUtils.encode(relayStateValue, StandardCharsets.ISO_8859_1);
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().relayState(relayStateValue)
.build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).contains("RelayState=" + relayStateEncoded)
.startsWith(IDP_SSO_URL);
}
@Test
public void doFilterWhenSimpleSignatureSpecifiedThenSignatureParametersAreInTheRedirectURL() throws Exception {
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().sigAlg("sigalg")
.signature("signature")
.build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).contains("SigAlg=")
.contains("Signature=")
.startsWith(IDP_SSO_URL);
}
@Test
public void doFilterWhenSignatureIsDisabledThenSignatureParametersAreNotInTheRedirectURL() throws Exception {
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).doesNotContain("SigAlg=")
.doesNotContain("Signature=")
.startsWith(IDP_SSO_URL);
}
@Test
public void doFilterWhenPostFormDataIsPresent() throws Exception {
String relayStateValue = "https://my-relay-state.example.com?with=param&other=param&javascript{alert('1');}";
String relayStateEncoded = HtmlUtils.htmlEscape(relayStateValue);
RelyingPartyRegistration registration = this.rpBuilder
.assertingPartyMetadata((asserting) -> asserting.singleSignOnServiceBinding(Saml2MessageBinding.POST))
.build();
Saml2PostAuthenticationRequest request = Saml2PostAuthenticationRequest
.withRelyingPartyRegistration(registration)
.samlRequest("request")
.relayState(relayStateValue)
.build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
assertThat(this.response.getHeader("Location")).isNull();
assertThat(this.response.getContentAsString()).contains(
"<meta http-equiv=\"Content-Security-Policy\" content=\"script-src 'sha256-oZhLbc2kO8b8oaYLrUc7uye1MgVKMyLtPqWR4WtKF+c='\">")
.contains("<script>window.onload = function() { document.forms[0].submit(); }</script>")
.contains("<form action=\"https://sso-url.example.com/IDP/SSO\" method=\"post\">")
.contains("<input type=\"hidden\" name=\"SAMLRequest\"")
.contains("value=\"" + relayStateEncoded + "\"");
}
@Test
public void doFilterWhenRelyingPartyRegistrationNotFoundThenUnauthorized() throws Exception {
Saml2WebSsoAuthenticationRequestFilter filter = new Saml2WebSsoAuthenticationRequestFilter(
this.authenticationRequestResolver);
filter.doFilter(this.request, this.response, this.filterChain);
assertThat(this.response.getStatus()).isEqualTo(401);
}
@Test
public void setAuthenticationRequestRepositoryWhenNullThenException() {
Saml2WebSsoAuthenticationRequestFilter filter = new Saml2WebSsoAuthenticationRequestFilter(
this.authenticationRequestResolver);
assertThatIllegalArgumentException().isThrownBy(() -> filter.setAuthenticationRequestRepository(null));
}
@Test
public void doFilterWhenRedirectThenSaveRedirectRequest() throws ServletException, IOException {
Saml2RedirectAuthenticationRequest request = redirectAuthenticationRequest().build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
verify(this.authenticationRequestRepository).saveAuthenticationRequest(
any(Saml2RedirectAuthenticationRequest.class), eq(this.request), eq(this.response));
}
@Test
public void doFilterWhenPostThenSaveRedirectRequest() throws ServletException, IOException {
RelyingPartyRegistration registration = this.rpBuilder
.assertingPartyMetadata((asserting) -> asserting.singleSignOnServiceBinding(Saml2MessageBinding.POST))
.build();
Saml2PostAuthenticationRequest request = Saml2PostAuthenticationRequest
.withRelyingPartyRegistration(registration)
.samlRequest("request")
.build();
given(this.authenticationRequestResolver.resolve(any())).willReturn(request);
this.filter.doFilterInternal(this.request, this.response, this.filterChain);
verify(this.authenticationRequestRepository)
.saveAuthenticationRequest(any(Saml2PostAuthenticationRequest.class), eq(this.request), eq(this.response));
}
@Test
public void doFilterWhenCustomAuthenticationRequestResolverThenUses() throws Exception {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.relyingPartyRegistration().build();
Saml2RedirectAuthenticationRequest authenticationRequest = redirectAuthenticationRequest(registration).build();
Saml2WebSsoAuthenticationRequestFilter filter = new Saml2WebSsoAuthenticationRequestFilter(
this.authenticationRequestResolver);
given(this.authenticationRequestResolver.resolve(any())).willReturn(authenticationRequest);
filter.doFilterInternal(this.request, this.response, this.filterChain);
verify(this.authenticationRequestResolver).resolve(any());
}
}
| Saml2WebSsoAuthenticationRequestFilterTests |
java | elastic__elasticsearch | test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityRolesMultiProjectIT.java | {
"start": 1495,
"end": 8165
} | class ____ extends MultiProjectRestTestCase {
private static final String PASSWORD = "wh@tever";
private static final MutableResource rolesFile = MutableResource.from(Resource.fromString(""));
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.nodes(1)
.distribution(DistributionType.INTEG_TEST)
.module("analysis-common")
.setting("test.multi_project.enabled", "true")
.setting("xpack.security.enabled", "true")
.user("admin", PASSWORD)
.rolesFile(rolesFile)
.build();
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
@Override
protected Settings restClientSettings() {
final String token = basicAuthHeaderValue("admin", new SecureString(PASSWORD.toCharArray()));
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
}
public void testUsersWithSameRoleNamesInDifferentProjects() throws Exception {
var project1 = randomUniqueProjectId();
var project2 = randomUniqueProjectId();
createProject(project1.id());
createProject(project2.id());
String roleName = randomAlphaOfLength(8);
createRole(project1, roleName, "monitor");
createRole(project2, roleName, "manage");
String username = randomAlphaOfLength(6);
createUser(project1, username, roleName);
createUser(project2, username, roleName);
assertThat(getClusterPrivileges(project1, username), containsInAnyOrder("monitor"));
assertThat(getClusterPrivileges(project2, username), containsInAnyOrder("manage"));
}
public void testInvalidateRoleInSingleProjectOnly() throws Exception {
var project1 = randomUniqueProjectId();
var project2 = randomUniqueProjectId();
createProject(project1.id());
createProject(project2.id());
String roleName = randomAlphaOfLength(8);
createRole(project1, roleName, "monitor");
createRole(project2, roleName, "manage");
String username = randomAlphaOfLength(6);
createUser(project1, username, roleName);
createUser(project2, username, roleName);
assertThat(getClusterPrivileges(project1, username), containsInAnyOrder("monitor"));
assertThat(getClusterPrivileges(project2, username), containsInAnyOrder("manage"));
deleteDocument(project1, ".security-7", "role-" + roleName);
deleteDocument(project2, ".security-7", "role-" + roleName);
invalidateRoleCache(project1, roleName);
// In project 1, the role is no longer cached so the user has no privileges
assertThat(getClusterPrivileges(project1, username), empty());
// In project 2 the role is still cached, so we can get privileges even though the role doc was deleted
assertThat(getClusterPrivileges(project2, username), containsInAnyOrder("manage"));
}
public void testUpdatingFileBasedRoleAffectsAllProjects() throws Exception {
final String originalRoles;
final var roleName = "test_role";
rolesFile.update(Resource.fromString(Strings.format("""
%s:
cluster:
- monitor
""", roleName)));
var project1 = randomUniqueProjectId();
var project2 = randomUniqueProjectId();
createProject(project1.id());
createProject(project2.id());
String username1 = randomAlphaOfLength(5);
createUser(project1, username1, roleName);
String username2 = randomAlphaOfLength(7);
createUser(project2, username2, roleName);
assertBusy(() -> {
assertThat(getClusterPrivileges(project1, username1), contains("monitor"));
assertThat(getClusterPrivileges(project2, username2), contains("monitor"));
}, 20, TimeUnit.SECONDS); // increasing this to try and solve for a rare failure
rolesFile.update(Resource.fromString(""));
assertBusy(() -> {
// Both projects should automatically reflect that the role has been removed
assertThat(getClusterPrivileges(project1, username1), empty());
assertThat(getClusterPrivileges(project2, username2), empty());
}, 20, TimeUnit.SECONDS);
}
private void createUser(ProjectId projectId, String username, String roleName) throws IOException {
Request request = new Request("PUT", "/_security/user/" + username);
request.setJsonEntity(Strings.format("""
{
"roles": [ "%s" ],
"password": "%s"
}
""", roleName, PASSWORD));
setRequestProjectId(request, projectId.id());
client().performRequest(request);
}
private void createRole(ProjectId projectId, String roleName, String clusterPrivilege) throws IOException {
Request request = new Request("PUT", "/_security/role/" + roleName);
request.setJsonEntity(Strings.format("""
{
"cluster": [ "%s" ]
}
""", clusterPrivilege));
setRequestProjectId(request, projectId.id());
client().performRequest(request);
}
private void invalidateRoleCache(ProjectId projectId, String roleName) throws IOException {
Request request = new Request("POST", "/_security/role/" + roleName + "/_clear_cache");
setRequestProjectId(request, projectId.id());
client().performRequest(request);
}
@SuppressWarnings("unchecked")
private Collection<String> getClusterPrivileges(ProjectId projectId, String username) throws IOException {
Request request = new Request("GET", "/_security/user/_privileges");
setRequestProjectId(request, projectId.id());
request.setOptions(
request.getOptions()
.toBuilder()
.addHeader("Authorization", basicAuthHeaderValue(username, new SecureString(PASSWORD.toCharArray())))
.build()
);
final Map<String, Object> response = entityAsMap(client().performRequest(request));
return (Collection<String>) response.get("cluster");
}
private void deleteDocument(ProjectId projectId, String index, String docId) throws IOException {
Request request = new Request("DELETE", "/" + index + "/_doc/" + docId);
setRequestProjectId(request, projectId.id());
request.setOptions(request.getOptions().toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build());
client().performRequest(request);
}
}
| SecurityRolesMultiProjectIT |
java | elastic__elasticsearch | x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java | {
"start": 1721,
"end": 5809
} | class ____ extends MlSingleNodeTestCase {
private DataFrameAnalyticsConfigProvider configProvider;
@Before
public void createComponents() throws Exception {
configProvider = new DataFrameAnalyticsConfigProvider(
client(),
xContentRegistry(),
new DataFrameAnalyticsAuditor(
client(),
getInstanceFromNode(ClusterService.class),
TestIndexNameExpressionResolver.newInstance(),
randomBoolean()
),
getInstanceFromNode(ClusterService.class)
);
waitForMlTemplates();
}
public void testGet_ConfigDoesNotExist() throws InterruptedException {
AtomicReference<DataFrameAnalyticsConfig> configHolder = new AtomicReference<>();
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
blockingCall(actionListener -> configProvider.get("missing", actionListener), configHolder, exceptionHolder);
assertThat(configHolder.get(), is(nullValue()));
assertThat(exceptionHolder.get(), is(notNullValue()));
assertThat(exceptionHolder.get(), is(instanceOf(ResourceNotFoundException.class)));
}
public void testDeleteConfigWithStateAndStats() throws InterruptedException {
String configId = "delete-config-with-state-and-stats";
// Create valid config
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfigTests.createRandomBuilder(configId)
.setAnalysis(RegressionTests.createRandom())
.build();
AtomicReference<DataFrameAnalyticsConfig> configHolder = new AtomicReference<>();
AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
blockingCall(
actionListener -> configProvider.put(config, emptyMap(), TimeValue.timeValueSeconds(5), actionListener),
configHolder,
exceptionHolder
);
assertThat(configHolder.get(), is(notNullValue()));
assertThat(configHolder.get(), is(equalTo(config)));
OriginSettingClient originSettingClient = new OriginSettingClient(client(), ClientHelper.ML_ORIGIN);
originSettingClient.prepareIndex(".ml-state-000001")
.setId("delete-config-with-state-and-stats_regression_state#1")
.setSource("{}", XContentType.JSON)
.get();
originSettingClient.prepareIndex(".ml-state-000001")
.setId("data_frame_analytics-delete-config-with-state-and-stats-progress")
.setSource("{}", XContentType.JSON)
.get();
originSettingClient.prepareIndex(".ml-stats-000001")
.setId("delete-config-with-state-and-stats_1")
.setSource("{\"job_id\": \"delete-config-with-state-and-stats\"}", XContentType.JSON)
.get();
originSettingClient.prepareIndex(".ml-stats-000001")
.setId("delete-config-with-state-and-stats_2")
.setSource("{\"job_id\": \"delete-config-with-state-and-stats\"}", XContentType.JSON)
.get();
originSettingClient.admin().indices().prepareRefresh(".ml-stat*").get();
client().execute(DeleteDataFrameAnalyticsAction.INSTANCE, new DeleteDataFrameAnalyticsAction.Request(configId)).actionGet();
assertHitCount(
originSettingClient.prepareSearch(".ml-state-*")
.setQuery(
QueryBuilders.idsQuery()
.addIds(
"delete-config-with-state-and-stats_regression_state#1",
"data_frame_analytics-delete-config-with-state-and-stats-progress"
)
)
.setTrackTotalHits(true),
0
);
assertHitCount(
originSettingClient.prepareSearch(".ml-stats-*")
.setQuery(QueryBuilders.idsQuery().addIds("delete-config-with-state-and-stats_1", "delete-config-with-state-and-stats_2"))
.setTrackTotalHits(true),
0
);
}
}
| DataFrameAnalyticsCRUDIT |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/AbstractWatchServiceTest.java | {
"start": 7500,
"end": 7978
} | class ____ implements Watchable {
@Override
public WatchKey register(
WatchService watcher, WatchEvent.Kind<?>[] events, WatchEvent.Modifier... modifiers)
throws IOException {
return register(watcher, events);
}
@Override
public WatchKey register(WatchService watcher, WatchEvent.Kind<?>... events)
throws IOException {
return ((AbstractWatchService) watcher).register(this, Arrays.asList(events));
}
}
}
| StubWatchable |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/aot/BeanDefinitionPropertyValueCodeGeneratorDelegates.java | {
"start": 4173,
"end": 4433
} | class ____ extends CollectionDelegate<ManagedSet<?>> {
public ManagedSetDelegate() {
super(ManagedSet.class, CodeBlock.of("new $T()", ManagedSet.class));
}
}
/**
* {@link Delegate} for {@link ManagedMap} types.
*/
private static | ManagedSetDelegate |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/visitor/MySqlSchemaStatVisitorTest5.java | {
"start": 925,
"end": 2665
} | class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "SELECT distinct a.id \"id\", a.col \"col\", a.position \"position\", a.panel_id \"panelId\" "
+ "FROM (select * from view_position_info) a LEFT JOIN db1.view_portal b ON a.panel_id = b.panel_id "
+ " LEFT JOIN (select * from view_portal_panel) c ON a.panel_id = c.panel_id "
+ " WHERE b.user_id = ? and ((b.is_grid='y' and c.param_name='is_hidden' and c.param_value='false') or b.is_grid != 'y') and b.user_id in (select user_id from table1 where id = 1) ORDER BY a.col ASC, a.position ASC";
// sql = "select columnName from table1 where id in (select id from table3 where name = ?)";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
assertEquals(1, statementList.size());
System.out.println(stmt);
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println(sql);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
assertEquals(4, visitor.getTables().size());
assertEquals(true, visitor.containsTable("view_position_info"));
assertEquals(11, visitor.getColumns().size());
// assertEquals(true, visitor.getFields().contains(new
// Column("users", "id")));
// assertEquals(true, visitor.getFields().contains(new
// Column("users", "name")));
}
}
| MySqlSchemaStatVisitorTest5 |
java | spring-projects__spring-boot | module/spring-boot-session/src/test/java/org/springframework/boot/session/autoconfigure/SessionAutoConfigurationEarlyInitializationIntegrationTests.java | {
"start": 1729,
"end": 2374
} | class ____ {
@Test
void configurationIsFrozenWhenSessionRepositoryAccessed() {
new WebApplicationContextRunner(AnnotationConfigServletWebServerApplicationContext::new)
.withSystemProperties("spring.jndi.ignore=true")
.withPropertyValues("server.port=0")
.withUserConfiguration(TestConfiguration.class)
.run((context) -> assertThat(context).hasSingleBean(MapSessionRepository.class));
}
@Configuration(proxyBeanMethods = false)
@EnableSpringHttpSession
@ImportAutoConfiguration({ TomcatServletWebServerAutoConfiguration.class, SessionAutoConfiguration.class })
static | SessionAutoConfigurationEarlyInitializationIntegrationTests |
java | apache__spark | common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java | {
"start": 54321,
"end": 54770
} | class ____ implements Serializable {
public transient long value = 0;
}
/**
* Wrapper over `int` to allow result of parsing integer from string to be accessed via reference.
* This is done solely for better performance and is not expected to be used by end users.
*
* {@link LongWrapper} could have been used here but using `int` directly save the extra cost of
* conversion from `long` to `int`
*/
public static | LongWrapper |
java | google__dagger | dagger-producers/main/java/dagger/producers/monitoring/ProducerMonitor.java | {
"start": 2793,
"end": 7194
} | class ____ {
/**
* Called when the producer's output is requested; that is, when the first method is called that
* requires the production of this producer's output.
*
* <p>Note that if a method depends on {@code Producer<T>}, then this does not count as requesting
* {@code T}; that is only triggered by calling {@link Producer#get()}.
*
* <p>Depending on how this producer is requested, the following threading constraints are
* guaranteed:
*
* <ol>
* <li>If the producer is requested directly by a method on a component, then {@code requested}
* will be called on the same thread as the component method call.
* <li>If the producer is requested by value from another producer (i.e., injected as {@code T}
* or {@code Produced<T>}), then {@code requested} will be called from the same thread as
* the other producer's {@code requested}.
* <li>If the producer is requested by calling {@link Producer#get()}, then {@code requested}
* will be called from the same thread as that {@code get()} call.
* </ol>
*
* <p>When multiple monitors are installed, the order that each monitor will call this method is
* unspecified, but will remain consistent throughout the course of the execution of a component.
*
* <p>This implementation is a no-op.
*/
public void requested() {}
/**
* Called when all of the producer's inputs are available. This is called regardless of whether
* the inputs have succeeded or not; when the inputs have succeeded, this is called prior to
* scheduling the method on the executor, and if an input has failed and the producer will be
* skipped, this method will be called before {@link #failed(Throwable)} is called.
*
* <p>When multiple monitors are installed, the order that each monitor will call this method is
* unspecified, but will remain consistent throughout the course of the execution of a component.
*
* <p>This implementation is a no-op.
*/
public void ready() {}
/**
* Called when the producer method is about to start executing. This will be called from the same
* thread as the producer method itself.
*
* <p>When multiple monitors are installed, calls to this method will be in the reverse order from
* calls to {@link #requested()}.
*
* <p>This implementation is a no-op.
*/
public void methodStarting() {}
/**
* Called when the producer method has finished executing. This will be called from the same
* thread as {@link #methodStarting()} and the producer method itself.
*
* <p>When multiple monitors are installed, calls to this method will be in the reverse order from
* calls to {@link #requested()}.
*
* <p>This implementation is a no-op.
*/
public void methodFinished() {}
/**
* Called when the producer’s future has completed successfully with a value.
*
* <p>When multiple monitors are installed, calls to this method will be in the reverse order from
* calls to {@link #requested()}.
*
* <p>This implementation is a no-op.
*/
public void succeeded(@SuppressWarnings("unused") Object value) {}
/**
* Called when the producer's future has failed with an exception.
*
* <p>When multiple monitors are installed, calls to this method will be in the reverse order from
* calls to {@link #requested()}.
*
* <p>This implementation is a no-op.
*/
public void failed(@SuppressWarnings("unused") Throwable t) {}
/**
* Adds this monitor's completion methods as a callback to the future. This is only intended to be
* overridden in the framework!
*/
public <T> void addCallbackTo(ListenableFuture<T> future) {
addCallback(
future,
new FutureCallback<T>() {
@Override
public void onSuccess(T value) {
succeeded(value);
}
@Override
public void onFailure(Throwable t) {
failed(t);
}
},
directExecutor());
}
private static final ProducerMonitor NO_OP =
new ProducerMonitor() {
@Override
public <T> void addCallbackTo(ListenableFuture<T> future) {
// overridden to avoid adding a do-nothing callback
}
};
/** Returns a monitor that does no monitoring. */
public static ProducerMonitor noOp() {
return NO_OP;
}
}
| ProducerMonitor |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.