language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
|
{
"start": 1039,
"end": 1217
}
|
class ____ an Avro generated class. All Avro
* generated classes will be annotated with this annotation.
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @
|
is
|
java
|
apache__flink
|
flink-connectors/flink-connector-files/src/test/java/org/apache/flink/connector/file/src/impl/StaticFileSplitEnumeratorTest.java
|
{
"start": 1464,
"end": 5067
}
|
class ____ {
// this is no JUnit temporary folder, because we don't create actual files, we just
// need some random file path.
private static final File TMP_DIR = new File(System.getProperty("java.io.tmpdir"));
private static long splitId = 1L;
@Test
void testCheckpointNoSplitRequested() throws Exception {
final TestingSplitEnumeratorContext<FileSourceSplit> context =
new TestingSplitEnumeratorContext<>(4);
final FileSourceSplit split = createRandomSplit();
final StaticFileSplitEnumerator enumerator = createEnumerator(context, split);
final PendingSplitsCheckpoint<FileSourceSplit> checkpoint = enumerator.snapshotState(1L);
assertThat(checkpoint.getSplits()).contains(split);
}
@Test
void testSplitRequestForRegisteredReader() throws Exception {
final TestingSplitEnumeratorContext<FileSourceSplit> context =
new TestingSplitEnumeratorContext<>(4);
final FileSourceSplit split = createRandomSplit();
final StaticFileSplitEnumerator enumerator = createEnumerator(context, split);
context.registerReader(3, "somehost");
enumerator.addReader(3);
enumerator.handleSplitRequest(3, "somehost");
assertThat(enumerator.snapshotState(1L).getSplits()).isEmpty();
assertThat(context.getSplitAssignments().get(3).getAssignedSplits()).contains(split);
}
@Test
void testSplitRequestForNonRegisteredReader() throws Exception {
final TestingSplitEnumeratorContext<FileSourceSplit> context =
new TestingSplitEnumeratorContext<>(4);
final FileSourceSplit split = createRandomSplit();
final StaticFileSplitEnumerator enumerator = createEnumerator(context, split);
enumerator.handleSplitRequest(3, "somehost");
assertThat(context.getSplitAssignments()).doesNotContainKey(3);
assertThat(enumerator.snapshotState(1L).getSplits()).contains(split);
}
@Test
void testNoMoreSplits() throws Exception {
final TestingSplitEnumeratorContext<FileSourceSplit> context =
new TestingSplitEnumeratorContext<>(4);
final FileSourceSplit split = createRandomSplit();
final StaticFileSplitEnumerator enumerator = createEnumerator(context, split);
// first split assignment
context.registerReader(1, "somehost");
enumerator.addReader(1);
enumerator.handleSplitRequest(1, "somehost");
// second request has no more split
enumerator.handleSplitRequest(1, "somehost");
assertThat(context.getSplitAssignments().get(1).getAssignedSplits()).contains(split);
assertThat(context.getSplitAssignments().get(1).hasReceivedNoMoreSplitsSignal()).isTrue();
}
// ------------------------------------------------------------------------
// test setup helpers
// ------------------------------------------------------------------------
private static FileSourceSplit createRandomSplit() {
return new FileSourceSplit(
String.valueOf(splitId++),
Path.fromLocalFile(new File(TMP_DIR, "foo")),
0L,
0L,
0L,
0L);
}
private static StaticFileSplitEnumerator createEnumerator(
final SplitEnumeratorContext<FileSourceSplit> context,
final FileSourceSplit... splits) {
return new StaticFileSplitEnumerator(
context, new SimpleSplitAssigner(Arrays.asList(splits)));
}
}
|
StaticFileSplitEnumeratorTest
|
java
|
apache__camel
|
components/camel-google/camel-google-drive/src/generated/java/org/apache/camel/component/google/drive/internal/DriveRevisionsApiMethod.java
|
{
"start": 664,
"end": 2329
}
|
enum ____ implements ApiMethod {
DELETE(
com.google.api.services.drive.Drive.Revisions.Delete.class,
"delete",
arg("fileId", String.class),
arg("revisionId", String.class)),
GET(
com.google.api.services.drive.Drive.Revisions.Get.class,
"get",
arg("fileId", String.class),
arg("revisionId", String.class),
setter("acknowledgeAbuse", Boolean.class)),
LIST(
com.google.api.services.drive.Drive.Revisions.List.class,
"list",
arg("fileId", String.class),
setter("pageSize", Integer.class),
setter("pageToken", String.class)),
UPDATE(
com.google.api.services.drive.Drive.Revisions.Update.class,
"update",
arg("fileId", String.class),
arg("revisionId", String.class),
arg("content", com.google.api.services.drive.model.Revision.class));
private final ApiMethod apiMethod;
DriveRevisionsApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(Revisions.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
|
DriveRevisionsApiMethod
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/BackoffPolicy.java
|
{
"start": 6282,
"end": 7182
}
|
class ____ implements Iterator<TimeValue> {
private final int numberOfElements;
private final int start;
private int currentlyConsumed;
private ExponentialBackoffIterator(int start, int numberOfElements) {
this.start = start;
this.numberOfElements = numberOfElements;
}
@Override
public boolean hasNext() {
return currentlyConsumed < numberOfElements;
}
@Override
public TimeValue next() {
if (hasNext() == false) {
throw new NoSuchElementException("Only up to " + numberOfElements + " elements");
}
int result = start + 10 * ((int) Math.exp(0.8d * (currentlyConsumed)) - 1);
currentlyConsumed++;
return TimeValue.timeValueMillis(result);
}
}
private static final
|
ExponentialBackoffIterator
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/InterfaceResourceImpl.java
|
{
"start": 96,
"end": 290
}
|
class ____ implements InterfaceResource {
@Inject
HelloService helloService;
@Override
public String hello() {
return helloService.sayHello();
}
}
|
InterfaceResourceImpl
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NamedLikeContextualKeywordTest.java
|
{
"start": 1250,
"end": 1706
}
|
class ____ {
static Throwable foo;
public Test() {}
// BUG: Diagnostic contains: [NamedLikeContextualKeyword]
public void yield() {
foo = new NullPointerException("uh oh");
}
}
""")
.doTest();
}
@Test
public void staticMethodName_error() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseTests.java
|
{
"start": 1315,
"end": 18207
}
|
class ____ extends ESTestCase {
public void testFromXContentForGoldLicenseWithVersion2Signature() throws Exception {
String licenseString = """
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "gold",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": 5,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "AAAAAgAAAA34V2kfTJVtvdL2LttwAAABmFJ6NGRnbEM3WVQrZVQwNkdKQmR1VytlMTMyM1J0dTZ1WGwyY2ZCVFhqMGtJU2gzZ3pnNTVp\
OWF5Y1NaUkwyN2VsTEtCYnlZR2c5WWtjQ0phaDlhRjlDUXViUmUwMWhjSkE2TFcwSGdneTJHbUV4N2RHUWJxV20ybjRsZHRzV2xkN0ZmdDlYblJmNVcxMl\
BWeU81V1hLUm1EK0V1dmF3cFdlSGZzTU5SZE1qUmFra3JkS1hCanBWVmVTaFFwV3BVZERzeG9Sci9rYnlJK2toODZXY09tNmFHUVNUL3IyUHExV3VSTlBn\
eWNJcFQ0bXl0cmhNNnRwbE1CWE4zWjJ5eGFuWFo0NGhsb3B5WFd1eTdYbFFWQkxFVFFPSlBERlB0eVVJYXVSZ0lsR2JpRS9rN1h4MSsvNUpOcGN6cU1NOH\
N1cHNtSTFIUGN1bWNGNEcxekhrblhNOXZ2VEQvYmRzQUFwbytUZEpRR3l6QU5oS2ZFSFdSbGxxNDZyZ0xvUHIwRjdBL2JqcnJnNGFlK09Cek9pYlJ5Umc9\
PQAAAQAth77fQLF7CCEL7wA6Z0/UuRm/weECcsjW/50kBnPLO8yEs+9/bPa5LSU0bF6byEXOVeO0ebUQfztpjulbXh8TrBDSG+6VdxGtohPo2IYPBaXzGs\
3LOOor6An/lhptxBWdwYmfbcp0m8mnXZh1vN9rmbTsZXnhBIoPTaRDwUBi3vJ3Ms3iLaEm4S8Slrfmtht2jUjgGZ2vAeZ9OHU2YsGtrSpz6f"
}
}""";
License license = License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
assertThat(license.type(), equalTo("gold"));
assertThat(license.uid(), equalTo("4056779d-b823-4c12-a9cb-efa4a8d8c422"));
assertThat(license.issuer(), equalTo("elasticsearch"));
assertThat(license.issuedTo(), equalTo("customer"));
assertThat(license.expiryDate(), equalTo(1546596340459L));
assertThat(license.issueDate(), equalTo(1546589020459L));
assertThat(license.maxNodes(), equalTo(5));
assertThat(license.maxResourceUnits(), equalTo(-1));
assertThat(license.version(), equalTo(2));
}
public void testFromXContentForGoldLicenseWithVersion4Signature() throws Exception {
String licenseString = """
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "gold",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": 5,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "AAAABAAAAA22vXffI41oM4jLCwZ6AAAAIAo5/x6hrsGh1GqqrJmy4qgmEC7gK0U4zQ6q5ZEMhm4jAAABAH3oL4weubwYGjLGNZsz90EerX6y\
OX3Dh6wswG9EfqCiyv6lcjuC7aeKKuOkqhMRTHZ9vHnfMuakHWVlpuGC14WyGqaMwSmgTZ9jVAzt/W3sIotRxM/3rtlCXUc1rOUXNFcii1i3KkrckTzhENTKjdkOmU\
N3qZlTEmHkp93eYpx8++iIukHYU9K9Vm2VKgydFfxvYaN/Qr+iPfJSbHJB8+DmS2ywdrmdqW+ScE+1ZNouPNhnP3RKTleNvixXPG9l5BqZ2So1IlCrxVDByA1E6JH5\
AvjbOucpcGiWCm7IzvfpkzphKHMyxhUaIByoHl9UAf4AdPLhowWAQk0eHMRDDlo=",
"start_date_in_millis": -1
}
}""";
License license = License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
assertThat(license.type(), equalTo("gold"));
assertThat(license.uid(), equalTo("4056779d-b823-4c12-a9cb-efa4a8d8c422"));
assertThat(license.issuer(), equalTo("elasticsearch"));
assertThat(license.issuedTo(), equalTo("customer"));
assertThat(license.expiryDate(), equalTo(1546596340459L));
assertThat(license.issueDate(), equalTo(1546589020459L));
assertThat(license.maxNodes(), equalTo(5));
assertThat(license.maxResourceUnits(), equalTo(-1));
assertThat(license.version(), equalTo(4));
}
public void testFromXContentForEnterpriseLicenseWithV5Signature() throws Exception {
String licenseString = """
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "enterprise",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": null,
"max_resource_units": 15,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "AAAABQAAAA2MUoEqXb9K9Ie5d6JJAAAAIAo5/x6hrsGh1GqqrJmy4qgmEC7gK0U4zQ6q5ZEMhm4jAAABAAAwVZKGAmDELUlS5PScBkhQsZaD\
aQTtJ4ZP5EnZ/nLpmCt9Dj7d/FRsgMtHmSJLrr2CdrIo4Vx5VuhmbwzZvXMttLz2lrJzG7770PX3TnC9e7F9GdnE9ec0FP2U0ZlLOBOtPuirX0q+j6GfB+DLyE5D+L\
o1NQ3eLJGvbd3DBYPWJxkb+EBVHczCH2OrIEVWnN/TafmkdZCPX5PcultkNOs3j7d3s7b51EXHKoye8UTcB/RGmzZwMah+E6I/VJkqu7UHL8bB01wJeqo6WxI4LC/9\
+f5kpmHrUu3CHe5pHbmMGDk7O6/cwt1pw/hnJXKIFCi36IGaKcHLgORxQdN0uzE=",
"start_date_in_millis": -1
}
}""";
License license = License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
assertThat(license.type(), equalTo("enterprise"));
assertThat(license.uid(), equalTo("4056779d-b823-4c12-a9cb-efa4a8d8c422"));
assertThat(license.issuer(), equalTo("elasticsearch"));
assertThat(license.issuedTo(), equalTo("customer"));
assertThat(license.expiryDate(), equalTo(1546596340459L));
assertThat(license.issueDate(), equalTo(1546589020459L));
assertThat(license.maxNodes(), equalTo(-1));
assertThat(license.maxResourceUnits(), equalTo(15));
assertThat(license.version(), equalTo(5));
}
public void testThatEnterpriseLicenseMayNotHaveMaxNodes() throws Exception {
License.Builder builder = randomLicense(License.LicenseType.ENTERPRISE).maxNodes(randomIntBetween(1, 50))
.maxResourceUnits(randomIntBetween(10, 500));
final IllegalStateException ex = expectThrows(IllegalStateException.class, builder::build);
assertThat(ex, TestMatchers.throwableWithMessage("maxNodes may not be set for enterprise licenses (type=[enterprise])"));
}
public void testThatEnterpriseLicenseMustHaveMaxResourceUnits() throws Exception {
License.Builder builder = randomLicense(License.LicenseType.ENTERPRISE).maxResourceUnits(-1);
final IllegalStateException ex = expectThrows(IllegalStateException.class, builder::build);
assertThat(ex, TestMatchers.throwableWithMessage("maxResourceUnits must be set for enterprise licenses (type=[enterprise])"));
}
public void testThatRegularLicensesMustHaveMaxNodes() throws Exception {
License.LicenseType type = randomValueOtherThan(License.LicenseType.ENTERPRISE, () -> randomFrom(License.LicenseType.values()));
License.Builder builder = randomLicense(type).maxNodes(-1);
final IllegalStateException ex = expectThrows(IllegalStateException.class, builder::build);
assertThat(ex, TestMatchers.throwableWithMessage("maxNodes has to be set"));
}
public void testThatRegularLicensesMayNotHaveMaxResourceUnits() throws Exception {
License.LicenseType type = randomValueOtherThan(License.LicenseType.ENTERPRISE, () -> randomFrom(License.LicenseType.values()));
License.Builder builder = randomLicense(type).maxResourceUnits(randomIntBetween(10, 500)).maxNodes(randomIntBetween(1, 50));
final IllegalStateException ex = expectThrows(IllegalStateException.class, builder::build);
assertThat(
ex,
TestMatchers.throwableWithMessage(
"maxResourceUnits may only be set for enterprise licenses (not permitted " + "for type=[" + type.getTypeName() + "])"
)
);
}
public void testLicenseToAndFromXContentForEveryLicenseType() throws Exception {
for (License.LicenseType type : License.LicenseType.values()) {
final License license1 = randomLicense(type)
// We need a signature that parses correctly, but it doesn't need to verify
.signature(
"AAAABQAAAA2MUoEqXb9K9Ie5d6JJAAAAIAo5/x6hrsGh1GqqrJmy4qgmEC7gK0U4zQ6q5ZEMhm4jAAABAAAwVZKGAmDELUlS5PScBkhQsZa"
+ "DaQTtJ4ZP5EnZ/nLpmCt9Dj7d/FRsgMtHmSJLrr2CdrIo4Vx5VuhmbwzZvXMttLz2lrJzG7770PX3TnC9e7F9GdnE9ec0FP2U0ZlL"
+ "OBOtPuirX0q+j6GfB+DLyE5D+Lo1NQ3eLJGvbd3DBYPWJxkb+EBVHczCH2OrIEVWnN/TafmkdZCPX5PcultkNOs3j7d3s7b51EXHK"
+ "oye8UTcB/RGmzZwMah+E6I/VJkqu7UHL8bB01wJeqo6WxI4LC/9+f5kpmHrUu3CHe5pHbmMGDk7O6/cwt1pw/hnJXKIFCi36IGaKc"
+ "HLgORxQdN0uzE="
)
.build();
XContentParser parser = XContentType.JSON.xContent()
.createParser(XContentParserConfiguration.EMPTY, Strings.toString(license1));
License license2 = License.fromXContent(parser);
assertThat(license2, notNullValue());
assertThat(license2.type(), equalTo(type.getTypeName()));
assertThat(license2.uid(), equalTo(license1.uid()));
assertThat(license2.issuer(), equalTo(license1.issuer()));
assertThat(license2.issuedTo(), equalTo(license1.issuedTo()));
assertThat(license2.expiryDate(), equalTo(license1.expiryDate()));
assertThat(license2.issueDate(), equalTo(license1.issueDate()));
assertThat(license2.maxNodes(), equalTo(license1.maxNodes()));
assertThat(license2.maxResourceUnits(), equalTo(license1.maxResourceUnits()));
}
}
public void testSerializationOfLicenseForEveryLicenseType() throws Exception {
for (License.LicenseType type : License.LicenseType.values()) {
final String signature = randomBoolean()
? null
: "AAAABQAAAA2MUoEqXb9K9Ie5d6JJAAAAIAo5/x6hrsGh1GqqrJmy4qgmEC7gK0U4zQ6q5ZEM"
+ "hm4jAAABAAAwVZKGAmDELUlS5PScBkhQsZaDaQTtJ4ZP5EnZ/nLpmCt9Dj7d/FRsgMtH"
+ "mSJLrr2CdrIo4Vx5VuhmbwzZvXMttLz2lrJzG7770PX3TnC9e7F9GdnE9ec0FP2U0ZlL"
+ "OBOtPuirX0q+j6GfB+DLyE5D+Lo1NQ3eLJGvbd3DBYPWJxkb+EBVHczCH2OrIEVWnN/T"
+ "afmkdZCPX5PcultkNOs3j7d3s7b51EXHKoye8UTcB/RGmzZwMah+E6I/VJkqu7UHL8bB"
+ "01wJeqo6WxI4LC/9+f5kpmHrUu3CHe5pHbmMGDk7O6/cwt1pw/hnJXKIFCi36IGaKcHL"
+ "gORxQdN0uzE=";
final int version;
if (type == License.LicenseType.ENTERPRISE) {
version = randomIntBetween(License.VERSION_ENTERPRISE, License.VERSION_CURRENT);
} else {
version = randomIntBetween(License.VERSION_NO_FEATURE_TYPE, License.VERSION_CURRENT);
}
final License license1 = randomLicense(type).signature(signature).version(version).build();
final BytesStreamOutput out = new BytesStreamOutput();
out.setTransportVersion(TransportVersion.current());
license1.writeTo(out);
final StreamInput in = out.bytes().streamInput();
in.setTransportVersion(TransportVersion.current());
final License license2 = License.readLicense(in);
assertThat(in.read(), Matchers.equalTo(-1));
assertThat(license2, notNullValue());
assertThat(license2.type(), equalTo(type.getTypeName()));
assertThat(license2.version(), equalTo(version));
assertThat(license2.signature(), equalTo(signature));
assertThat(license2.uid(), equalTo(license1.uid()));
assertThat(license2.issuer(), equalTo(license1.issuer()));
assertThat(license2.issuedTo(), equalTo(license1.issuedTo()));
assertThat(license2.expiryDate(), equalTo(license1.expiryDate()));
assertThat(license2.issueDate(), equalTo(license1.issueDate()));
assertThat(license2.maxNodes(), equalTo(license1.maxNodes()));
assertThat(license2.maxResourceUnits(), equalTo(license1.maxResourceUnits()));
}
}
public void testNotEnoughBytesFromXContent() throws Exception {
String licenseString = """
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "gold",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": 5,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "AA"
}
}""";
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> {
License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
});
assertThat(exception.getMessage(), containsString("malformed signature for license [4056779d-b823-4c12-a9cb-efa4a8d8c422]"));
assertThat(exception.getCause(), instanceOf(BufferUnderflowException.class));
}
public void testMalformedSignatureFromXContent() throws Exception {
String licenseString = Strings.format("""
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "gold",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": 5,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "%s"
}
}""", randomAlphaOfLength(10));
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> {
License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
});
// When parsing a license, we read the signature bytes to verify the _version_.
// Random alphabetic sig bytes will generate a bad version
assertThat(exception, throwableWithMessage(containsString("Unknown license version found")));
}
public void testUnableToBase64DecodeFromXContent() throws Exception {
String licenseString = """
{
"license": {
"uid": "4056779d-b823-4c12-a9cb-efa4a8d8c422",
"type": "gold",
"issue_date_in_millis": 1546589020459,
"expiry_date_in_millis": 1546596340459,
"max_nodes": 5,
"issued_to": "customer",
"issuer": "elasticsearch",
"signature": "AAAAAgAAAA34V2kfTJVtvdL2LttwAAABmFJ6NGRnbEM3WVQrZVQwNkdKQmR1VytlMTMyM1J0dTZ1WGwyY2ZCVFhqMGtJU2gzZ3pnNTVpOWF5\
Y1NaUkwyN2VsTEtCYnlZR2c5WWtjQ0phaDlhRjlDUXViUmUwMWhjSkE2TFcwSGdneTJHbUV4N2RHUWJxV20ybjRsZHRzV2xkN0ZmdDlYblJmNVcxMlBWeU81V1hLUm\
1EK0V1dmF3cFdlSGZzTU5SZE1qUmFra3JkS1hCanBWVmVTaFFwV3BVZERzeG9Sci9rYnlJK2toODZXY09tNmFHUVNUL3IyUHExV3VSTlBneWNJcFQ0bXl0cmhNNnRw\
bE1CWE4zWjJ5eGFuWFo0NGhsb3B5WFd1eTdYbFFWQkxFVFFPSlBERlB0eVVJYXVSZ0lsR2JpRS9rN1h4MSsvNUpOcGN6cU1NOHN1cHNtSTFIUGN1bWNGNEcxekhrbl\
hNOXZ2VEQvYmRzQUFwbytUZEpRR3l6QU5oS2ZFSFdSbGxxNDZyZ0xvUHIwRjdBL2JqcnJnNGFlK09Cek9pYlJ5Umc9PQAAAQAth77fQLF7CCEL7wA6Z0/UuRm/weEC\
csjW/50kBnPLO8yEs+9/bPa5LSU0bF6byEXOVeO0ebUQfztpjulbXh8TrBDSG+6VdxGtohPo2IYPBaXzGs3LOOor6An/lhptxBWdwYmf+xHAQ8tyvRqP5G+PRU7til\
uEwR/eyHGZV2exdJNzmoGzdPSWwueBM5HK2GexORICH+UFI4cuGz444/hL2MMM1RdpVWQkT0SJ6D9x/VuSmHuYPdtX59Pp41LXvlbcp0m8mnXZh1vN9rmbTsZXnhBI\
oPTaRDwUBi3vJ3Ms3iLaEm4S8Slrfmtht2jUjgGZ2vAeZ9OHU2YsGtrSpz6fd"
}""";
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> {
License.fromSource(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
});
assertThat(exception.getMessage(), containsString("malformed signature for license [4056779d-b823-4c12-a9cb-efa4a8d8c422]"));
assertThat(exception.getCause(), instanceOf(IllegalArgumentException.class));
}
private License.Builder randomLicense(License.LicenseType type) {
return License.builder()
.uid(UUIDs.randomBase64UUID(random()))
.type(type)
.issueDate(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(randomIntBetween(1, 10)))
.expiryDate(System.currentTimeMillis() + TimeUnit.DAYS.toMillis(randomIntBetween(1, 1000)))
.maxNodes(type == License.LicenseType.ENTERPRISE ? -1 : randomIntBetween(1, 100))
.maxResourceUnits(type == License.LicenseType.ENTERPRISE ? randomIntBetween(1, 100) : -1)
.issuedTo(randomAlphaOfLengthBetween(5, 50))
.issuer(randomAlphaOfLengthBetween(5, 50));
}
}
|
LicenseTests
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/event/AnnotationDrivenEventListenerTests.java
|
{
"start": 26816,
"end": 26964
}
|
class ____ {
public boolean valid(Double ratio) {
return Double.valueOf(42).equals(ratio);
}
}
}
abstract static
|
TestConditionEvaluator
|
java
|
quarkusio__quarkus
|
integration-tests/grpc-test-random-port/src/test/java/io/quarkus/grpc/examples/hello/RandomPortSeparateServerPlainTestBase.java
|
{
"start": 214,
"end": 621
}
|
class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Map.of(
"quarkus.grpc.server.test-port", "0",
"quarkus.grpc.clients.hello.host", "localhost");
}
}
@Override
protected String serverPortProperty() {
return "quarkus.grpc.server.test-port";
}
}
|
Profile
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/util/collections/LongHashSet.java
|
{
"start": 945,
"end": 4408
}
|
class ____ extends OptimizableHashSet {
private long[] key;
private long min = Long.MAX_VALUE;
private long max = Long.MIN_VALUE;
public LongHashSet(final int expected, final float f) {
super(expected, f);
this.key = new long[this.n + 1];
}
public LongHashSet(final int expected) {
this(expected, DEFAULT_LOAD_FACTOR);
}
public LongHashSet() {
this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
}
public boolean add(final long k) {
if (k == 0L) {
if (this.containsZero) {
return false;
}
this.containsZero = true;
} else {
long[] key = this.key;
int pos;
long curr;
if ((curr = key[pos = (int) MurmurHashUtil.fmix(k) & this.mask]) != 0L) {
if (curr == k) {
return false;
}
while ((curr = key[pos = pos + 1 & this.mask]) != 0L) {
if (curr == k) {
return false;
}
}
}
key[pos] = k;
}
if (this.size++ >= this.maxFill) {
this.rehash(OptimizableHashSet.arraySize(this.size + 1, this.f));
}
if (k < min) {
min = k;
}
if (k > max) {
max = k;
}
return true;
}
public boolean contains(final long k) {
if (isDense) {
return k >= min && k <= max && used[(int) (k - min)];
} else {
if (k == 0L) {
return this.containsZero;
} else {
long[] key = this.key;
long curr;
int pos;
if ((curr = key[pos = (int) MurmurHashUtil.fmix(k) & this.mask]) == 0L) {
return false;
} else if (k == curr) {
return true;
} else {
while ((curr = key[pos = pos + 1 & this.mask]) != 0L) {
if (k == curr) {
return true;
}
}
return false;
}
}
}
}
private void rehash(final int newN) {
long[] key = this.key;
int mask = newN - 1;
long[] newKey = new long[newN + 1];
int i = this.n;
int pos;
for (int j = this.realSize(); j-- != 0; newKey[pos] = key[i]) {
do {
--i;
} while (key[i] == 0L);
if (newKey[pos = (int) MurmurHashUtil.fmix(key[i]) & mask] != 0L) {
while (newKey[pos = pos + 1 & mask] != 0L) {}
}
}
this.n = newN;
this.mask = mask;
this.maxFill = OptimizableHashSet.maxFill(this.n, this.f);
this.key = newKey;
}
@Override
public void optimize() {
long range = max - min;
if (range >= 0 && (range < key.length || range < OptimizableHashSet.DENSE_THRESHOLD)) {
this.used = new boolean[(int) (max - min + 1)];
for (long v : key) {
if (v != 0) {
used[(int) (v - min)] = true;
}
}
if (containsZero) {
used[(int) (-min)] = true;
}
isDense = true;
key = null;
}
}
}
|
LongHashSet
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/modern/UnionSetTest.java
|
{
"start": 1336,
"end": 4001
}
|
class ____ {
@Test
public void testSetsCannotBeNull() {
assertThrows(NullPointerException.class, () -> new UnionSet<String>(Set.of(), null));
assertThrows(NullPointerException.class, () -> new UnionSet<String>(null, Set.of()));
}
@Test
public void testUnion() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
List<Integer> result = new ArrayList<>(union);
result.sort(Integer::compareTo);
assertEquals(List.of(1, 2, 3, 4, 5), result);
}
@Test
public void testSize() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
assertEquals(5, union.size());
}
@Test
public void testIsEmpty() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
assertFalse(union.isEmpty());
union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of()
);
assertFalse(union.isEmpty());
union = new UnionSet<>(
Set.of(),
Set.of(2, 3, 4, 5)
);
assertFalse(union.isEmpty());
union = new UnionSet<>(
Set.of(),
Set.of()
);
assertTrue(union.isEmpty());
}
@Test
public void testContains() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
IntStream.range(1, 6).forEach(item -> assertTrue(union.contains(item)));
assertFalse(union.contains(0));
assertFalse(union.contains(6));
}
@Test
public void testToArray() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
Object[] expected = {1, 2, 3, 4, 5};
Object[] actual = union.toArray();
Arrays.sort(actual);
assertArrayEquals(expected, actual);
}
@Test
public void testToArrayWithArrayParameter() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
Integer[] input = new Integer[5];
Integer[] expected = {1, 2, 3, 4, 5};
union.toArray(input);
Arrays.sort(input);
assertArrayEquals(expected, input);
}
@Test
public void testEquals() {
UnionSet<Integer> union = new UnionSet<>(
Set.of(1, 2, 3),
Set.of(2, 3, 4, 5)
);
assertEquals(Set.of(1, 2, 3, 4, 5), union);
}
}
|
UnionSetTest
|
java
|
quarkusio__quarkus
|
extensions/oidc-client-filter/deployment/src/test/java/io/quarkus/oidc/client/filter/OidcClientFilterRevokedAccessTokenDevModeTest.java
|
{
"start": 4113,
"end": 4335
}
|
interface ____ {
@OidcClientFilter
@POST
String revokeAccessTokenAndRespond(String named);
}
@RegisterRestClient
@Path(MY_SERVER_RESOURCE_PATH)
public
|
MyDefaultClient_AnnotationOnMethod
|
java
|
quarkusio__quarkus
|
independent-projects/qute/debug/src/test/java/io/quarkus/qute/debug/breakpoints/ConditionalBreakpointTest.java
|
{
"start": 722,
"end": 7246
}
|
class ____ {
private static final String TEMPLATE_ID = "hello.qute";
@Test
public void debuggingTemplate() throws Exception {
int port = DebuggerUtils.findAvailableSocketPort();
// Server side :
// - create a Qute engine and set the debugging port as 1234
Engine engine = Engine.builder() //
.enableTracing(true) // enable tracing required by debugger
.addEngineListener(new RegisterDebugServerAdapter(port, false)) // debug engine on the given port
.addDefaults().addValueResolver(new ReflectionValueResolver()).build();
// - create a Qute template
Template template = engine.parse("<html>\n" + //
" Hello {name}!\n" + //
" {#for item in items}\n" + //
" {item}\n" + //
" {item_count}\n" + //
" {/for}\n" + //
"</html>", null, TEMPLATE_ID);
// Client side
// - connect the remote debugger client on the given port
DAPClient client = new DAPClient();
client.connectToServer(port) //
.get(10000, TimeUnit.MILLISECONDS);
// Render template without breakpoint
final StringBuilder renderResult = new StringBuilder(1028);
var renderThread = new RenderTemplateInThread(template, renderResult, instance -> {
instance.data("name", "Quarkus") //
.data("items", List.of("foo", "bar", "baz"));
});
assertEquals("<html>\n" + //
" Hello Quarkus!\n" + //
" foo\n" + //
" 1\n" + //
" bar\n" + //
" 2\n" + //
" baz\n" + //
" 3\n" + //
"</html>", renderResult.toString());
// Set a breakpoint on line 5: --> {item_count}
client.setBreakpoint("src/main/resources/templates/" + TEMPLATE_ID, 5);
// Render template with a breakpoint
renderResult.setLength(0);
renderThread.render();
// Result here is empty
assertEquals("", renderResult.toString());
// Collect debuggee Thread (one thread)
var threads = client.getThreads();
assertEquals(1, threads.length);
var thread = threads[0];
int threadId = thread.getId();
assertEquals("Qute render thread", thread.getName());
// Get stack trace of the debuggee Thread
StackFrame[] stackFrames = client.getStackFrames(threadId);
StackFrame currentFrame = stackFrames[0];
int frameId = currentFrame.getId();
String frameName = currentFrame.getName();
assertEquals("ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]",
frameName);
// Render template with breakpoint on line 4
renderResult.setLength(0);
renderThread.render();
// Result here is empty
assertEquals("", renderResult.toString());
// Collect debuggee Thread (two threads)
threads = client.getThreads();
assertEquals(2, threads.length);
thread = threads[1];
threadId = thread.getId();
assertEquals("Qute render thread", thread.getName());
// Get stack trace of the debuggee Thread
stackFrames = client.getStackFrames(threadId);
currentFrame = stackFrames[0];
// Stack frame on item_count
frameId = currentFrame.getId();
frameName = currentFrame.getName();
assertEquals(
"ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]",
frameName);
// Evaluate item_count
var evalResult = client.evaluateSync(frameId, "item_count");
assertEquals("1", evalResult.getResult());
// Get scope (Globals, Locals, etc) of the current stack frame
Scope[] scopes = client.getScopes(frameId);
assertFalse(scopes.length == 0);
Scope globalsScope = scopes[1];
assertEquals("Globals", globalsScope.getName());
// Get variables of the Globals scope
// [name=Quarkus, ..]
int variablesReference = globalsScope.getVariablesReference();
Variable[] variables = client.getVariables(variablesReference);
assertEquals(2, variables.length);
Variable firstVar = variables[0];
assertEquals("name", firstVar.getName());
assertEquals("Quarkus", firstVar.getValue());
assertEquals("java.lang.String", firstVar.getType());
Variable secondVar = variables[1];
assertEquals("items", secondVar.getName());
assertEquals("[foo, bar, baz]", secondVar.getValue());
assertEquals("java.util.ImmutableCollections$ListN", secondVar.getType());
// Set a breakpoint on line 5: --> {item_count} with condition
client.setBreakpoint("src/main/resources/templates/" + TEMPLATE_ID, 5, "item_count > 2");
// Resume (_continue) the breakpoint
client.resume(threadId);
java.lang.Thread.sleep(1000);
stackFrames = client.getStackFrames(threadId);
currentFrame = stackFrames[0];
// Stack frame on item_count
frameId = currentFrame.getId();
frameName = currentFrame.getName();
assertEquals(
"ExpressionNode [expression=Expression [namespace=null, parts=[item_count], literal=null]]",
frameName);
// Evaluate item_count
evalResult = client.evaluateSync(frameId, "item_count");
assertEquals("3", evalResult.getResult());
client.resume(threadId);
java.lang.Thread.sleep(1000);
// Result here is:
// <!DOCTYPE html>
// <html>
// <body>
// <h1>Hello <b>Qute</b></h1>
// </body>
// </html>
java.lang.Thread.sleep(1000);
assertEquals("<html>\n" + //
" Hello Quarkus!\n" + //
" foo\n" + //
" 1\n" + //
" bar\n" + //
" 2\n" + //
" baz\n" + //
" 3\n" + //
"</html>", renderResult.toString());
// On client side, disconnect the client
client.terminate();
// On server side, terminate the server
// server.terminate();
}
}
|
ConditionalBreakpointTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java
|
{
"start": 6831,
"end": 7103
}
|
enum ____ {
TOTAL("total"),
CPU("cpu");
private final String order;
SortOrder(String order) {
this.order = order;
}
public String getOrderValue() {
return order;
}
// Custom
|
SortOrder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundToDouble7Evaluator.java
|
{
"start": 4589,
"end": 5758
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory field;
private final double p0;
private final double p1;
private final double p2;
private final double p3;
private final double p4;
private final double p5;
private final double p6;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field, double p0,
double p1, double p2, double p3, double p4, double p5, double p6) {
this.source = source;
this.field = field;
this.p0 = p0;
this.p1 = p1;
this.p2 = p2;
this.p3 = p3;
this.p4 = p4;
this.p5 = p5;
this.p6 = p6;
}
@Override
public RoundToDouble7Evaluator get(DriverContext context) {
return new RoundToDouble7Evaluator(source, field.get(context), p0, p1, p2, p3, p4, p5, p6, context);
}
@Override
public String toString() {
return "RoundToDouble7Evaluator[" + "field=" + field + ", p0=" + p0 + ", p1=" + p1 + ", p2=" + p2 + ", p3=" + p3 + ", p4=" + p4 + ", p5=" + p5 + ", p6=" + p6 + "]";
}
}
}
|
Factory
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/reflect/testbed/PublicSubBeanOtherPackage.java
|
{
"start": 1193,
"end": 1638
}
|
class ____ extends PackageBeanOtherPackage {
/**
* A directly implemented property.
*/
private String foo = "This is foo";
/**
* Package private constructor, can only use factory method to create beans.
*/
public PublicSubBeanOtherPackage() {
}
public String getFoo() {
return this.foo;
}
public void setFoo(final String foo) {
this.foo = foo;
}
}
|
PublicSubBeanOtherPackage
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/kstream/JoinWindowsTest.java
|
{
"start": 1508,
"end": 9965
}
|
class ____ {
private static final long ANY_SIZE = 123L;
private static final long ANY_OTHER_SIZE = 456L; // should be larger than anySize
private static final long ANY_GRACE = 1024L;
@Test
public void validWindows() {
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_OTHER_SIZE)) // [ -anyOtherSize ; anyOtherSize ]
.before(ofMillis(ANY_SIZE)) // [ -anySize ; anyOtherSize ]
.before(ofMillis(0)) // [ 0 ; anyOtherSize ]
.before(ofMillis(-ANY_SIZE)) // [ anySize ; anyOtherSize ]
.before(ofMillis(-ANY_OTHER_SIZE)); // [ anyOtherSize ; anyOtherSize ]
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_OTHER_SIZE)) // [ -anyOtherSize ; anyOtherSize ]
.after(ofMillis(ANY_SIZE)) // [ -anyOtherSize ; anySize ]
.after(ofMillis(0)) // [ -anyOtherSize ; 0 ]
.after(ofMillis(-ANY_SIZE)) // [ -anyOtherSize ; -anySize ]
.after(ofMillis(-ANY_OTHER_SIZE)); // [ -anyOtherSize ; -anyOtherSize ]
}
@Test
public void beforeShouldNotModifyGrace() {
final JoinWindows joinWindows = JoinWindows.ofTimeDifferenceAndGrace(ofMillis(ANY_SIZE), ofMillis(ANY_OTHER_SIZE))
.before(ofSeconds(ANY_SIZE));
assertThat(joinWindows.gracePeriodMs(), equalTo(ANY_OTHER_SIZE));
}
@Test
public void afterShouldNotModifyGrace() {
final JoinWindows joinWindows = JoinWindows.ofTimeDifferenceAndGrace(ofMillis(ANY_SIZE), ofMillis(ANY_OTHER_SIZE))
.after(ofSeconds(ANY_SIZE));
assertThat(joinWindows.gracePeriodMs(), equalTo(ANY_OTHER_SIZE));
}
@Test
public void timeDifferenceMustNotBeNegative() {
assertThrows(IllegalArgumentException.class, () -> JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(-1)));
assertThrows(IllegalArgumentException.class, () -> JoinWindows.ofTimeDifferenceAndGrace(ofMillis(-1), ofMillis(ANY_GRACE)));
}
@SuppressWarnings("deprecation")
@Test
public void graceShouldNotCalledAfterGraceSet() {
assertThrows(IllegalStateException.class, () -> JoinWindows.ofTimeDifferenceAndGrace(ofMillis(10), ofMillis(10)).grace(ofMillis(10)));
assertThrows(IllegalStateException.class, () -> JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(10)).grace(ofMillis(10)));
}
@Test
public void endTimeShouldNotBeBeforeStart() {
final JoinWindows windowSpec = JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_SIZE));
try {
windowSpec.after(ofMillis(-ANY_SIZE - 1));
fail("window end time should not be before window start time");
} catch (final IllegalArgumentException e) {
// expected
}
}
@Test
public void startTimeShouldNotBeAfterEnd() {
final JoinWindows windowSpec = JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_SIZE));
try {
windowSpec.before(ofMillis(-ANY_SIZE - 1));
fail("window start time should not be after window end time");
} catch (final IllegalArgumentException e) {
// expected
}
}
@SuppressWarnings("deprecation")
@Test
public void untilShouldSetGraceDuration() {
final JoinWindows windowSpec = JoinWindows.of(ofMillis(ANY_SIZE));
final long windowSize = windowSpec.size();
assertEquals(windowSize, windowSpec.grace(ofMillis(windowSize)).gracePeriodMs());
}
@Test
public void gracePeriodShouldEnforceBoundaries() {
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3L), ofMillis(0L));
try {
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3L), ofMillis(-1L));
fail("should not accept negatives");
} catch (final IllegalArgumentException e) {
//expected
}
}
@SuppressWarnings("deprecation")
@Test
public void oldAPIShouldSetDefaultGracePeriod() {
assertEquals(Duration.ofDays(1).toMillis(), DEPRECATED_DEFAULT_24_HR_GRACE_PERIOD);
assertEquals(DEPRECATED_DEFAULT_24_HR_GRACE_PERIOD - 6L, JoinWindows.of(ofMillis(3L)).gracePeriodMs());
assertEquals(0L, JoinWindows.of(ofMillis(DEPRECATED_DEFAULT_24_HR_GRACE_PERIOD)).gracePeriodMs());
assertEquals(0L, JoinWindows.of(ofMillis(DEPRECATED_DEFAULT_24_HR_GRACE_PERIOD + 1L)).gracePeriodMs());
}
@Test
public void noGraceAPIShouldNotSetGracePeriod() {
assertEquals(0L, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3L)).gracePeriodMs());
assertEquals(0L, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_SIZE)).gracePeriodMs());
assertEquals(0L, JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(ANY_OTHER_SIZE)).gracePeriodMs());
}
@Test
public void withGraceAPIShouldSetGracePeriod() {
assertEquals(ANY_GRACE, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3L), ofMillis(ANY_GRACE)).gracePeriodMs());
assertEquals(ANY_GRACE, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(ANY_SIZE), ofMillis(ANY_GRACE)).gracePeriodMs());
assertEquals(ANY_GRACE, JoinWindows.ofTimeDifferenceAndGrace(ofMillis(ANY_OTHER_SIZE), ofMillis(ANY_GRACE)).gracePeriodMs());
}
@Test
public void equalsAndHashcodeShouldBeValidForPositiveCases() {
verifyEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3))
);
verifyEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2))
);
verifyEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).after(ofMillis(2)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).after(ofMillis(2))
);
verifyEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).before(ofMillis(2)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).before(ofMillis(2))
);
verifyEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2)).after(ofMillis(4)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2)).after(ofMillis(4))
);
verifyEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2)).before(ofMillis(4)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2)).before(ofMillis(4))
);
}
@Test
public void equalsAndHashcodeShouldBeValidForNegativeCases() {
verifyInEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(9)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(9)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(2))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).after(ofMillis(9)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).after(ofMillis(2))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).before(ofMillis(9)),
JoinWindows.ofTimeDifferenceWithNoGrace(ofMillis(3)).before(ofMillis(2))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(3)).before(ofMillis(9)).after(ofMillis(2)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(3)).before(ofMillis(1)).after(ofMillis(2))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(3)).before(ofMillis(1)).after(ofMillis(9)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(3)).before(ofMillis(1)).after(ofMillis(2))
);
verifyInEquality(
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(9)).before(ofMillis(1)).after(ofMillis(2)),
JoinWindows.ofTimeDifferenceAndGrace(ofMillis(3), ofMillis(3)).before(ofMillis(1)).after(ofMillis(2))
);
}
}
|
JoinWindowsTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerPollStrategyRollbackThrowExceptionTest.java
|
{
"start": 1404,
"end": 2504
}
|
class ____ extends ContextTestSupport {
private static volatile String event = "";
private static final CountDownLatch LATCH = new CountDownLatch(1);
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myPoll", new MyPollStrategy());
return jndi;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(fileUri("?pollStrategy=#myPoll&initialDelay=0&delay=10"))
.convertBodyTo(String.class).to("mock:result");
}
};
}
@Test
public void testRollbackThrowException() {
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, "hello.txt");
await().atMost(2, TimeUnit.SECONDS).until(() -> LATCH.getCount() == 0);
// and we should rollback X number of times
assertTrue(event.startsWith("rollback"));
}
private static
|
FileConsumerPollStrategyRollbackThrowExceptionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/ql/JoinTableOptimizationTest.java
|
{
"start": 13380,
"end": 13545
}
|
class ____ {
@Id
Long id;
String name;
}
@Entity(name = "File")
@Table(name = "file_tbl")
@Inheritance(strategy = InheritanceType.JOINED)
public static
|
Person
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/bindinggraphvalidation/DependsOnProductionExecutorValidator.java
|
{
"start": 1506,
"end": 3026
}
|
class ____ extends ValidationBindingGraphPlugin {
private final XProcessingEnv processingEnv;
private final KeyFactory keyFactory;
@Inject
DependsOnProductionExecutorValidator(XProcessingEnv processingEnv, KeyFactory keyFactory) {
this.processingEnv = processingEnv;
this.keyFactory = keyFactory;
}
@Override
public String pluginName() {
return "Dagger/DependsOnProductionExecutor";
}
@Override
public void visitGraph(BindingGraph bindingGraph, DiagnosticReporter diagnosticReporter) {
if (!usesProducers()) {
return;
}
Key productionImplementationExecutorKey = keyFactory.forProductionImplementationExecutor();
Key productionExecutorKey = keyFactory.forProductionExecutor();
bindingGraph.network().nodes().stream()
.flatMap(instancesOf(MaybeBinding.class))
.filter(node -> node.key().equals(productionExecutorKey))
.flatMap(productionExecutor -> bindingGraph.requestingBindings(productionExecutor).stream())
.filter(binding -> !binding.key().equals(productionImplementationExecutorKey))
.forEach(binding -> reportError(diagnosticReporter, binding));
}
private void reportError(DiagnosticReporter diagnosticReporter, Binding binding) {
diagnosticReporter.reportBinding(
ERROR, binding, "%s may not depend on the production executor", binding.key());
}
private boolean usesProducers() {
return processingEnv.findTypeElement(XTypeNames.PRODUCES) != null;
}
}
|
DependsOnProductionExecutorValidator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java
|
{
"start": 857,
"end": 4864
}
|
class ____ extends AbstractMultivalueFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianLongEvaluator.class);
public MvMedianLongEvaluator(EvalOperator.ExpressionEvaluator field,
DriverContext driverContext) {
super(driverContext, field);
}
@Override
public String name() {
return "MvMedian";
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNullable(Block fieldVal) {
if (fieldVal.mvSortedAscending()) {
return evalAscendingNullable(fieldVal);
}
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
MvMedian.Longs work = new MvMedian.Longs();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
int first = v.getFirstValueIndex(p);
int end = first + valueCount;
for (int i = first; i < end; i++) {
long value = v.getLong(i);
MvMedian.process(work, value);
}
long result = MvMedian.finish(work);
builder.appendLong(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNotNullable(Block fieldVal) {
if (fieldVal.mvSortedAscending()) {
return evalAscendingNotNullable(fieldVal);
}
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (LongVector.FixedBuilder builder = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) {
MvMedian.Longs work = new MvMedian.Longs();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
int first = v.getFirstValueIndex(p);
int end = first + valueCount;
for (int i = first; i < end; i++) {
long value = v.getLong(i);
MvMedian.process(work, value);
}
long result = MvMedian.finish(work);
builder.appendLong(result);
}
return builder.build().asBlock();
}
}
/**
* Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order.
*/
private Block evalAscendingNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (LongBlock.Builder builder = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
MvMedian.Longs work = new MvMedian.Longs();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
int first = v.getFirstValueIndex(p);
long result = MvMedian.ascending(v, first, valueCount);
builder.appendLong(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order.
*/
private Block evalAscendingNotNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (LongVector.FixedBuilder builder = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) {
MvMedian.Longs work = new MvMedian.Longs();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
int first = v.getFirstValueIndex(p);
long result = MvMedian.ascending(v, first, valueCount);
builder.appendLong(result);
}
return builder.build().asBlock();
}
}
@Override
public long baseRamBytesUsed() {
return BASE_RAM_BYTES_USED + field.baseRamBytesUsed();
}
public static
|
MvMedianLongEvaluator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/collection/spi/AbstractPersistentCollection.java
|
{
"start": 27339,
"end": 28907
}
|
class ____<E> implements java.util.Set<E> {
protected final Collection<E> set;
public SetProxy(Collection<E> set) {
this.set = set;
}
@Override
public boolean add(E o) {
write();
return set.add( o );
}
@Override
public boolean addAll(Collection<? extends E> c) {
write();
return set.addAll( c );
}
@Override
public void clear() {
write();
set.clear();
}
@Override
public boolean contains(Object o) {
return set.contains( o );
}
@Override
public boolean containsAll(Collection<?> c) {
return set.containsAll( c );
}
@Override
public boolean isEmpty() {
return set.isEmpty();
}
@Override
public Iterator<E> iterator() {
return new IteratorProxy<>( set.iterator() );
}
@Override
public boolean remove(Object o) {
write();
return set.remove( o );
}
@Override
public boolean removeAll(Collection<?> c) {
write();
return set.removeAll( c );
}
@Override
public boolean retainAll(Collection<?> c) {
write();
return set.retainAll( c );
}
@Override
public int size() {
return set.size();
}
@Override
public Object[] toArray() {
return set.toArray();
}
@Override
public <A> A[] toArray(A[] array) {
return set.toArray( array );
}
@Override
public final boolean equals(Object object) {
return object == this
|| object instanceof Set<?> that
&& that.size() == this.size()
&& containsAll( that );
}
@Override
public int hashCode() {
return Objects.hashCode( set );
}
}
protected final
|
SetProxy
|
java
|
apache__spark
|
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedValuesReader.java
|
{
"start": 1139,
"end": 2701
}
|
interface ____ {
boolean readBoolean();
byte readByte();
short readShort();
int readInteger();
long readLong();
float readFloat();
double readDouble();
Binary readBinary(int len);
/*
* Reads `total` values into `c` start at `c[rowId]`
*/
void readBooleans(int total, WritableColumnVector c, int rowId);
void readBytes(int total, WritableColumnVector c, int rowId);
void readShorts(int total, WritableColumnVector c, int rowId);
void readIntegers(int total, WritableColumnVector c, int rowId);
void readIntegersWithRebase(int total, WritableColumnVector c, int rowId, boolean failIfRebase);
void readUnsignedIntegers(int total, WritableColumnVector c, int rowId);
void readUnsignedLongs(int total, WritableColumnVector c, int rowId);
void readLongs(int total, WritableColumnVector c, int rowId);
void readLongsWithRebase(
int total,
WritableColumnVector c,
int rowId,
boolean failIfRebase,
String timeZone);
void readFloats(int total, WritableColumnVector c, int rowId);
void readDoubles(int total, WritableColumnVector c, int rowId);
void readBinary(int total, WritableColumnVector c, int rowId);
/*
* Skips `total` values
*/
void skipBooleans(int total);
void skipBytes(int total);
void skipShorts(int total);
void skipIntegers(int total);
void skipLongs(int total);
void skipFloats(int total);
void skipDoubles(int total);
void skipBinary(int total);
void skipFixedLenByteArray(int total, int len);
/**
* A functional
|
VectorizedValuesReader
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/method/configuration/MethodSecurityService.java
|
{
"start": 11560,
"end": 11854
}
|
interface ____ {
String value() default "";
String expression() default "";
}
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@PostAuthorize("hasRole('{role}')")
@HandleAuthorizationDenied(handlerClass = NullPostProcessor.class)
@
|
Mask
|
java
|
resilience4j__resilience4j
|
resilience4j-test/src/main/java/io/github/resilience4j/test/TestContextPropagators.java
|
{
"start": 2611,
"end": 3499
}
|
class ____ {
private static final ThreadLocal<Object> threadLocal = new ThreadLocal<>();
private TestThreadLocalContextHolder() {
}
public static void put(Object context) {
if (threadLocal.get() != null) {
clear();
}
threadLocal.set(context);
}
public static Map<String, String> getMDCContext() {
return Optional.ofNullable(MDC.getCopyOfContextMap()).orElse(Collections.emptyMap());
}
public static void clear() {
if (threadLocal.get() != null) {
threadLocal.remove();
}
}
public static Optional<?> get() {
return Optional.ofNullable(threadLocal.get());
}
}
}
}
|
TestThreadLocalContextHolder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/callbacks/PrivateConstructorEnhancerTest.java
|
{
"start": 3932,
"end": 4392
}
|
class ____ {
@Id
@GeneratedValue
private int id;
private String name;
@ManyToOne(fetch = FetchType.LAZY)
private Country country;
public Person() {
}
private Person(String name, Country country) {
this.name = name;
this.country = country;
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public Country getCountry() {
return country;
}
}
@Entity(name = "Country")
static
|
Person
|
java
|
spring-projects__spring-boot
|
module/spring-boot-hibernate/src/test/java/org/springframework/boot/hibernate/autoconfigure/HibernateJpaAutoConfigurationTests.java
|
{
"start": 45362,
"end": 45740
}
|
class ____ extends TestConfiguration {
@Bean
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean(EntityManagerFactoryBuilder builder,
DataSource dataSource) {
return builder.dataSource(dataSource).properties(Map.of("configured", "manually")).build();
}
}
@Configuration(proxyBeanMethods = false)
static
|
TestConfigurationWithEntityManagerFactoryBuilder
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/Pack.java
|
{
"start": 846,
"end": 1024
}
|
interface ____ {
/**
* @param obj instance
* @return byte array
* @throws Exception when error occurs
*/
byte[] pack(Object obj) throws Exception;
}
|
Pack
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/processor/internals/tasks/DefaultTaskExecutorTest.java
|
{
"start": 2176,
"end": 10949
}
|
class ____ {
private static final long VERIFICATION_TIMEOUT = 15000;
private final Time time = new MockTime(1L);
private final StreamTask task = mock(StreamTask.class);
private final TaskManager taskManager = mock(TaskManager.class);
private final TaskExecutionMetadata taskExecutionMetadata = mock(TaskExecutionMetadata.class);
private final DefaultTaskExecutor taskExecutor = new DefaultTaskExecutor(taskManager, "TaskExecutor", time, taskExecutionMetadata);
@BeforeEach
public void setUp() {
// only assign a task for the first time
when(taskManager.assignNextTask(taskExecutor)).thenReturn(task).thenReturn(null);
when(taskExecutionMetadata.canProcessTask(eq(task), anyLong())).thenReturn(true);
when(task.isProcessable(anyLong())).thenReturn(true);
when(task.id()).thenReturn(new TaskId(0, 0, "A"));
when(task.process(anyLong())).thenReturn(true);
when(task.prepareCommit(true)).thenReturn(Collections.emptyMap());
}
@AfterEach
public void tearDown() {
taskExecutor.requestShutdown();
taskExecutor.awaitShutdown(Duration.ofMinutes(1));
}
@Test
public void shouldShutdownTaskExecutor() {
assertNull(taskExecutor.currentTask(), "Have task assigned before startup");
assertFalse(taskExecutor.isRunning());
taskExecutor.start();
assertTrue(taskExecutor.isRunning());
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).assignNextTask(taskExecutor);
taskExecutor.requestShutdown();
taskExecutor.awaitShutdown(Duration.ofMinutes(1));
verify(task).flush();
verify(taskManager).unassignTask(task, taskExecutor);
assertNull(taskExecutor.currentTask(), "Have task assigned after shutdown");
assertFalse(taskExecutor.isRunning());
}
@Test
public void shouldClearTaskReleaseFutureOnShutdown() throws InterruptedException {
assertNull(taskExecutor.currentTask(), "Have task assigned before startup");
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).assignNextTask(taskExecutor);
final KafkaFuture<StreamTask> future = taskExecutor.unassign();
taskExecutor.requestShutdown();
taskExecutor.awaitShutdown(Duration.ofMinutes(1));
waitForCondition(future::isDone, "Await for unassign future to complete");
assertNull(taskExecutor.currentTask(), "Have task assigned after shutdown");
}
@Test
public void shouldAwaitProcessableTasksIfNoneAssignable() throws InterruptedException {
assertNull(taskExecutor.currentTask(), "Have task assigned before startup");
when(taskManager.assignNextTask(taskExecutor)).thenReturn(null);
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).awaitProcessableTasks(any());
}
@Test
public void shouldUnassignTaskWhenNotProgressing() {
when(task.isProcessable(anyLong())).thenReturn(false);
when(task.maybePunctuateStreamTime()).thenReturn(false);
when(task.maybePunctuateSystemTime()).thenReturn(false);
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
verify(task).flush();
assertNull(taskExecutor.currentTask());
}
@Test
public void shouldProcessTasks() {
when(taskExecutionMetadata.canProcessTask(any(), anyLong())).thenReturn(true);
when(task.isProcessable(anyLong())).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeast(2)).process(anyLong());
verify(task, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).recordProcessBatchTime(anyLong());
}
@Test
public void shouldClearTaskTimeoutOnProcessed() {
when(taskExecutionMetadata.canProcessTask(any(), anyLong())).thenReturn(true);
when(task.isProcessable(anyLong())).thenReturn(true);
when(task.process(anyLong())).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).clearTaskTimeout();
}
@Test
public void shouldSetTaskTimeoutOnTimeoutException() {
final TimeoutException e = new TimeoutException();
when(taskExecutionMetadata.canProcessTask(any(), anyLong())).thenReturn(true);
when(task.isProcessable(anyLong())).thenReturn(true);
when(task.process(anyLong())).thenReturn(true).thenThrow(e);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).process(anyLong());
verify(task, timeout(VERIFICATION_TIMEOUT).atLeastOnce()).maybeInitTaskTimeoutOrThrow(anyLong(), eq(e));
}
@Test
public void shouldPunctuateStreamTime() {
when(taskExecutionMetadata.canProcessTask(eq(task), anyLong())).thenReturn(false);
when(taskExecutionMetadata.canPunctuateTask(task)).thenReturn(true);
when(task.maybePunctuateStreamTime()).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeast(2)).maybePunctuateStreamTime();
}
@Test
public void shouldPunctuateSystemTime() {
when(taskExecutionMetadata.canProcessTask(eq(task), anyLong())).thenReturn(false);
when(taskExecutionMetadata.canPunctuateTask(task)).thenReturn(true);
when(task.maybePunctuateSystemTime()).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeast(2)).maybePunctuateSystemTime();
}
@Test
public void shouldRespectPunctuationDisabledByTaskExecutionMetadata() {
when(taskExecutionMetadata.canProcessTask(eq(task), anyLong())).thenReturn(true);
when(taskExecutionMetadata.canPunctuateTask(task)).thenReturn(false);
when(task.isProcessable(anyLong())).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT).atLeast(2)).process(anyLong());
taskExecutor.unassign();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
verify(task, never()).maybePunctuateStreamTime();
verify(task, never()).maybePunctuateSystemTime();
}
@Test
public void shouldRespectProcessingDisabledByTaskExecutionMetadata() {
when(taskExecutionMetadata.canProcessTask(eq(task), anyLong())).thenReturn(false);
when(taskExecutionMetadata.canPunctuateTask(task)).thenReturn(true);
when(task.isProcessable(anyLong())).thenReturn(true);
taskExecutor.start();
verify(task, timeout(VERIFICATION_TIMEOUT)).maybePunctuateSystemTime();
verify(task, timeout(VERIFICATION_TIMEOUT)).maybePunctuateStreamTime();
taskExecutor.unassign();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
verify(task, never()).process(anyLong());
}
@Test
public void shouldUnassignTaskWhenRequired() throws Exception {
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).assignNextTask(taskExecutor);
TestUtils.waitForCondition(() -> taskExecutor.currentTask() != null,
VERIFICATION_TIMEOUT,
"Task reassign take too much time");
final KafkaFuture<StreamTask> future = taskExecutor.unassign();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
verify(task).flush();
assertNull(taskExecutor.currentTask());
assertTrue(future.isDone(), "Unassign is not completed");
assertEquals(task, future.get(), "Unexpected task was unassigned");
}
@Test
public void shouldSetUncaughtStreamsException() {
final StreamsException exception = mock(StreamsException.class);
when(task.process(anyLong())).thenThrow(exception);
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).setUncaughtException(exception, task.id());
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
assertNull(taskExecutor.currentTask());
assertTrue(taskExecutor.isRunning(), "should not shut down upon exception");
}
@Test
public void shouldNotFlushOnException() {
final StreamsException exception = mock(StreamsException.class);
when(task.process(anyLong())).thenThrow(exception);
when(taskManager.hasUncaughtException(task.id())).thenReturn(true);
taskExecutor.start();
verify(taskManager, timeout(VERIFICATION_TIMEOUT)).unassignTask(task, taskExecutor);
verify(task, never()).flush();
}
}
|
DefaultTaskExecutorTest
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/HasCamelContext.java
|
{
"start": 967,
"end": 1125
}
|
interface ____ {
/**
* Returns the camel context.
*
* @return the camel context.
*/
CamelContext getCamelContext();
}
|
HasCamelContext
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/http/converter/OAuth2AuthorizationServerMetadataHttpMessageConverter.java
|
{
"start": 2175,
"end": 6125
}
|
class ____
extends AbstractHttpMessageConverter<OAuth2AuthorizationServerMetadata> {
private static final ParameterizedTypeReference<Map<String, Object>> STRING_OBJECT_MAP = new ParameterizedTypeReference<>() {
};
private final GenericHttpMessageConverter<Object> jsonMessageConverter = HttpMessageConverters
.getJsonMessageConverter();
private Converter<Map<String, Object>, OAuth2AuthorizationServerMetadata> authorizationServerMetadataConverter = new OAuth2AuthorizationServerMetadataConverter();
private Converter<OAuth2AuthorizationServerMetadata, Map<String, Object>> authorizationServerMetadataParametersConverter = OAuth2AuthorizationServerMetadata::getClaims;
public OAuth2AuthorizationServerMetadataHttpMessageConverter() {
super(MediaType.APPLICATION_JSON, new MediaType("application", "*+json"));
}
@Override
protected boolean supports(Class<?> clazz) {
return OAuth2AuthorizationServerMetadata.class.isAssignableFrom(clazz);
}
@Override
@SuppressWarnings("unchecked")
protected OAuth2AuthorizationServerMetadata readInternal(Class<? extends OAuth2AuthorizationServerMetadata> clazz,
HttpInputMessage inputMessage) throws HttpMessageNotReadableException {
try {
Map<String, Object> authorizationServerMetadataParameters = (Map<String, Object>) this.jsonMessageConverter
.read(STRING_OBJECT_MAP.getType(), null, inputMessage);
return this.authorizationServerMetadataConverter.convert(authorizationServerMetadataParameters);
}
catch (Exception ex) {
throw new HttpMessageNotReadableException(
"An error occurred reading the OAuth 2.0 Authorization Server Metadata: " + ex.getMessage(), ex,
inputMessage);
}
}
@Override
protected void writeInternal(OAuth2AuthorizationServerMetadata authorizationServerMetadata,
HttpOutputMessage outputMessage) throws HttpMessageNotWritableException {
try {
Map<String, Object> authorizationServerMetadataResponseParameters = this.authorizationServerMetadataParametersConverter
.convert(authorizationServerMetadata);
this.jsonMessageConverter.write(authorizationServerMetadataResponseParameters, STRING_OBJECT_MAP.getType(),
MediaType.APPLICATION_JSON, outputMessage);
}
catch (Exception ex) {
throw new HttpMessageNotWritableException(
"An error occurred writing the OAuth 2.0 Authorization Server Metadata: " + ex.getMessage(), ex);
}
}
/**
* Sets the {@link Converter} used for converting the OAuth 2.0 Authorization Server
* Metadata parameters to an {@link OAuth2AuthorizationServerMetadata}.
* @param authorizationServerMetadataConverter the {@link Converter} used for
* converting to an {@link OAuth2AuthorizationServerMetadata}.
*/
public final void setAuthorizationServerMetadataConverter(
Converter<Map<String, Object>, OAuth2AuthorizationServerMetadata> authorizationServerMetadataConverter) {
Assert.notNull(authorizationServerMetadataConverter, "authorizationServerMetadataConverter cannot be null");
this.authorizationServerMetadataConverter = authorizationServerMetadataConverter;
}
/**
* Sets the {@link Converter} used for converting the
* {@link OAuth2AuthorizationServerMetadata} to a {@code Map} representation of the
* OAuth 2.0 Authorization Server Metadata.
* @param authorizationServerMetadataParametersConverter the {@link Converter} used
* for converting to a {@code Map} representation of the OAuth 2.0 Authorization
* Server Metadata.
*/
public final void setAuthorizationServerMetadataParametersConverter(
Converter<OAuth2AuthorizationServerMetadata, Map<String, Object>> authorizationServerMetadataParametersConverter) {
Assert.notNull(authorizationServerMetadataParametersConverter,
"authorizationServerMetadataParametersConverter cannot be null");
this.authorizationServerMetadataParametersConverter = authorizationServerMetadataParametersConverter;
}
private static final
|
OAuth2AuthorizationServerMetadataHttpMessageConverter
|
java
|
apache__camel
|
components/camel-huawei/camel-huaweicloud-dms/src/main/java/org/apache/camel/component/huaweicloud/dms/constants/DMSOperations.java
|
{
"start": 937,
"end": 1329
}
|
class ____ {
public static final String CREATE_INSTANCE = "createInstance";
public static final String DELETE_INSTANCE = "deleteInstance";
public static final String LIST_INSTANCES = "listInstances";
public static final String QUERY_INSTANCE = "queryInstance";
public static final String UPDATE_INSTANCE = "updateInstance";
private DMSOperations() {
}
}
|
DMSOperations
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/FunctionTemplate.java
|
{
"start": 2505,
"end": 8630
}
|
class ____ {
private final @Nullable FunctionSignatureTemplate signatureTemplate;
private final @Nullable FunctionStateTemplate stateTemplate;
private final @Nullable FunctionOutputTemplate outputTemplate;
private FunctionTemplate(
@Nullable FunctionSignatureTemplate signatureTemplate,
@Nullable FunctionStateTemplate stateTemplate,
@Nullable FunctionOutputTemplate outputTemplate) {
this.signatureTemplate = signatureTemplate;
this.stateTemplate = stateTemplate;
this.outputTemplate = outputTemplate;
}
/**
* Creates an instance using the given {@link FunctionHint}. It resolves explicitly defined data
* types.
*/
@SuppressWarnings("deprecation")
static FunctionTemplate fromAnnotation(DataTypeFactory typeFactory, FunctionHint hint) {
return new FunctionTemplate(
createSignatureTemplate(
typeFactory,
defaultAsNull(hint, FunctionHint::input),
defaultAsNull(hint, FunctionHint::argumentNames),
defaultAsNull(hint, FunctionHint::argument),
defaultAsNull(hint, FunctionHint::arguments),
hint.isVarArgs()),
createStateTemplate(
typeFactory,
defaultAsNull(hint, FunctionHint::accumulator),
defaultAsNull(hint, FunctionHint::state)),
createOutputTemplate(typeFactory, defaultAsNull(hint, FunctionHint::output)));
}
/**
* Creates an instance using the given {@link ProcedureHint}. It resolves explicitly defined
* data types.
*/
@SuppressWarnings("deprecation")
static FunctionTemplate fromAnnotation(DataTypeFactory typeFactory, ProcedureHint hint) {
return new FunctionTemplate(
createSignatureTemplate(
typeFactory,
defaultAsNull(hint, ProcedureHint::input),
defaultAsNull(hint, ProcedureHint::argumentNames),
defaultAsNull(hint, ProcedureHint::argument),
defaultAsNull(hint, ProcedureHint::arguments),
hint.isVarArgs()),
createStateTemplate(typeFactory, null, null),
createOutputTemplate(typeFactory, defaultAsNull(hint, ProcedureHint::output)));
}
/** Creates an instance of {@link FunctionResultTemplate} from a {@link DataTypeHint}. */
static @Nullable FunctionOutputTemplate createOutputTemplate(
DataTypeFactory typeFactory, @Nullable DataTypeHint hint) {
if (hint == null) {
return null;
}
final DataTypeTemplate template;
try {
template = DataTypeTemplate.fromAnnotation(typeFactory, hint);
} catch (Throwable t) {
throw extractionError(t, "Error in data type hint annotation.");
}
if (template.dataType != null) {
return FunctionResultTemplate.ofOutput(template.dataType);
}
throw extractionError(
"Data type hint does not specify a data type for use as function result.");
}
/** Creates a {@link FunctionStateTemplate}s from {@link StateHint}s or accumulator. */
static @Nullable FunctionStateTemplate createStateTemplate(
DataTypeFactory typeFactory,
@Nullable DataTypeHint accumulatorHint,
@Nullable StateHint[] stateHints) {
if (accumulatorHint == null && stateHints == null) {
return null;
}
if (accumulatorHint != null && stateHints != null) {
throw extractionError(
"State hints and accumulator cannot be declared in the same function hint. "
+ "Use either one or the other.");
}
final LinkedHashMap<String, StateInfoTemplate> state = new LinkedHashMap<>();
if (accumulatorHint != null) {
state.put(
UserDefinedFunctionHelper.DEFAULT_ACCUMULATOR_NAME,
StateInfoTemplate.of(
createStateDataType(typeFactory, accumulatorHint, "accumulator"),
null));
return FunctionResultTemplate.ofState(state);
}
IntStream.range(0, stateHints.length)
.forEach(
pos -> {
final StateHint hint = stateHints[pos];
state.put(
hint.name(),
StateInfoTemplate.of(
createStateDataType(
typeFactory, hint.type(), "state entry"),
hint));
});
return FunctionResultTemplate.ofState(state);
}
@Nullable
FunctionSignatureTemplate getSignatureTemplate() {
return signatureTemplate;
}
@Nullable
FunctionResultTemplate getStateTemplate() {
return stateTemplate;
}
@Nullable
FunctionResultTemplate getOutputTemplate() {
return outputTemplate;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FunctionTemplate template = (FunctionTemplate) o;
return Objects.equals(signatureTemplate, template.signatureTemplate)
&& Objects.equals(stateTemplate, template.stateTemplate)
&& Objects.equals(outputTemplate, template.outputTemplate);
}
@Override
public int hashCode() {
return Objects.hash(signatureTemplate, stateTemplate, outputTemplate);
}
// --------------------------------------------------------------------------------------------
@ProcedureHint
@FunctionHint
@ArgumentHint
private static
|
FunctionTemplate
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/iterator/RocksQueueIterator.java
|
{
"start": 1374,
"end": 4827
}
|
class ____ implements SingleStateIterator {
private static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
private final DataOutputSerializer keyOut = new DataOutputSerializer(128);
private final HeapPriorityQueueStateSnapshot<?> queueSnapshot;
private final Iterator<Integer> keyGroupRangeIterator;
private final int kvStateId;
private final int keyGroupPrefixBytes;
private final TypeSerializer<Object> elementSerializer;
private Iterator<Object> elementsForKeyGroup;
private int afterKeyMark = 0;
private boolean isValid;
private byte[] currentKey;
public RocksQueueIterator(
HeapPriorityQueueStateSnapshot<?> queuesSnapshot,
KeyGroupRange keyGroupRange,
int keyGroupPrefixBytes,
int kvStateId) {
this.queueSnapshot = queuesSnapshot;
this.elementSerializer = castToType(queuesSnapshot.getMetaInfo().getElementSerializer());
this.keyGroupRangeIterator = keyGroupRange.iterator();
this.keyGroupPrefixBytes = keyGroupPrefixBytes;
this.kvStateId = kvStateId;
if (keyGroupRangeIterator.hasNext()) {
try {
if (moveToNextNonEmptyKeyGroup()) {
isValid = true;
next();
} else {
isValid = false;
}
} catch (IOException e) {
throw new FlinkRuntimeException(e);
}
}
}
@Override
public void next() {
try {
if (!elementsForKeyGroup.hasNext()) {
boolean hasElement = moveToNextNonEmptyKeyGroup();
if (!hasElement) {
isValid = false;
return;
}
}
keyOut.setPosition(afterKeyMark);
elementSerializer.serialize(elementsForKeyGroup.next(), keyOut);
this.currentKey = keyOut.getCopyOfBuffer();
} catch (IOException e) {
throw new FlinkRuntimeException(e);
}
}
private boolean moveToNextNonEmptyKeyGroup() throws IOException {
while (keyGroupRangeIterator.hasNext()) {
Integer keyGroupId = keyGroupRangeIterator.next();
elementsForKeyGroup = castToType(queueSnapshot.getIteratorForKeyGroup(keyGroupId));
if (elementsForKeyGroup.hasNext()) {
writeKeyGroupId(keyGroupId);
return true;
}
}
return false;
}
private void writeKeyGroupId(Integer keyGroupId) throws IOException {
keyOut.clear();
CompositeKeySerializationUtils.writeKeyGroup(keyGroupId, keyGroupPrefixBytes, keyOut);
afterKeyMark = keyOut.length();
}
@SuppressWarnings("unchecked")
private static <T> TypeSerializer<T> castToType(TypeSerializer<?> typeSerializer) {
return (TypeSerializer<T>) typeSerializer;
}
@SuppressWarnings("unchecked")
private static <T> Iterator<T> castToType(Iterator<?> iterator) {
return (Iterator<T>) iterator;
}
@Override
public boolean isValid() {
return isValid;
}
@Override
public byte[] key() {
return currentKey;
}
@Override
public byte[] value() {
return EMPTY_BYTE_ARRAY;
}
@Override
public int getKvStateId() {
return kvStateId;
}
@Override
public void close() {}
}
|
RocksQueueIterator
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/intTest/java/org/springframework/boot/devtools/tests/JvmLauncher.java
|
{
"start": 2446,
"end": 3045
}
|
class ____ {
private final Process process;
private final Instant launchTime = Instant.now();
private final File standardOut;
private final File standardError;
LaunchedJvm(Process process, File standardOut, File standardError) {
this.process = process;
this.standardOut = standardOut;
this.standardError = standardError;
}
Process getProcess() {
return this.process;
}
Instant getLaunchTime() {
return this.launchTime;
}
File getStandardOut() {
return this.standardOut;
}
File getStandardError() {
return this.standardError;
}
}
}
|
LaunchedJvm
|
java
|
grpc__grpc-java
|
xds/src/main/java/io/grpc/xds/EnvoyServerProtoData.java
|
{
"start": 13680,
"end": 14322
}
|
class ____ {
@Nullable
abstract Integer stdevFactor();
@Nullable
abstract Integer enforcementPercentage();
@Nullable
abstract Integer minimumHosts();
@Nullable
abstract Integer requestVolume();
static SuccessRateEjection create(
@Nullable Integer stdevFactor,
@Nullable Integer enforcementPercentage,
@Nullable Integer minimumHosts,
@Nullable Integer requestVolume) {
return new AutoValue_EnvoyServerProtoData_SuccessRateEjection(stdevFactor,
enforcementPercentage, minimumHosts, requestVolume);
}
}
@AutoValue
abstract static
|
SuccessRateEjection
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/transformer/DataFormatTransformerDefinition.java
|
{
"start": 4268,
"end": 8449
}
|
class ____ extends TransformerDefinition {
@XmlElements({
@XmlElement(name = "asn1", type = ASN1DataFormat.class),
@XmlElement(name = "avro", type = AvroDataFormat.class),
@XmlElement(name = "barcode", type = BarcodeDataFormat.class),
@XmlElement(name = "base64", type = Base64DataFormat.class),
@XmlElement(name = "beanio", type = BeanioDataFormat.class),
@XmlElement(name = "bindy", type = BindyDataFormat.class),
@XmlElement(name = "cbor", type = CBORDataFormat.class),
@XmlElement(name = "crypto", type = CryptoDataFormat.class),
@XmlElement(name = "csv", type = CsvDataFormat.class),
@XmlElement(name = "custom", type = CustomDataFormat.class),
@XmlElement(name = "dfdl", type = DfdlDataFormat.class),
@XmlElement(name = "fhirJson", type = FhirJsonDataFormat.class),
@XmlElement(name = "fhirXml", type = FhirXmlDataFormat.class),
@XmlElement(name = "flatpack", type = FlatpackDataFormat.class),
@XmlElement(name = "fory", type = ForyDataFormat.class),
@XmlElement(name = "grok", type = GrokDataFormat.class),
@XmlElement(name = "groovyXml", type = GroovyXmlDataFormat.class),
@XmlElement(name = "gzipDeflater", type = GzipDeflaterDataFormat.class),
@XmlElement(name = "hl7", type = HL7DataFormat.class),
@XmlElement(name = "ical", type = IcalDataFormat.class),
@XmlElement(name = "iso8583", type = Iso8583DataFormat.class),
@XmlElement(name = "jacksonXml", type = JacksonXMLDataFormat.class),
@XmlElement(name = "jaxb", type = JaxbDataFormat.class),
@XmlElement(name = "json", type = JsonDataFormat.class),
@XmlElement(name = "jsonApi", type = JsonApiDataFormat.class),
@XmlElement(name = "lzf", type = LZFDataFormat.class),
@XmlElement(name = "mimeMultipart", type = MimeMultipartDataFormat.class),
@XmlElement(name = "parquetAvro", type = ParquetAvroDataFormat.class),
@XmlElement(name = "protobuf", type = ProtobufDataFormat.class),
@XmlElement(name = "rss", type = RssDataFormat.class),
@XmlElement(name = "smooks", type = SmooksDataFormat.class),
@XmlElement(name = "soap", type = SoapDataFormat.class),
@XmlElement(name = "swiftMt", type = SwiftMtDataFormat.class),
@XmlElement(name = "swiftMx", type = SwiftMxDataFormat.class),
@XmlElement(name = "syslog", type = SyslogDataFormat.class),
@XmlElement(name = "tarFile", type = TarFileDataFormat.class),
@XmlElement(name = "thrift", type = ThriftDataFormat.class),
@XmlElement(name = "univocityCsv", type = UniVocityCsvDataFormat.class),
@XmlElement(name = "univocityFixed", type = UniVocityFixedDataFormat.class),
@XmlElement(name = "univocityTsv", type = UniVocityTsvDataFormat.class),
@XmlElement(name = "xmlSecurity", type = XMLSecurityDataFormat.class),
@XmlElement(name = "pgp", type = PGPDataFormat.class),
@XmlElement(name = "yaml", type = YAMLDataFormat.class),
@XmlElement(name = "zipDeflater", type = ZipDeflaterDataFormat.class),
@XmlElement(name = "zipFile", type = ZipFileDataFormat.class) })
private DataFormatDefinition dataFormatType;
public DataFormatTransformerDefinition() {
}
protected DataFormatTransformerDefinition(DataFormatTransformerDefinition source) {
super(source);
this.dataFormatType = source.dataFormatType != null ? source.dataFormatType.copyDefinition() : null;
}
@Override
public DataFormatTransformerDefinition copyDefinition() {
return new DataFormatTransformerDefinition(this);
}
public DataFormatDefinition getDataFormatType() {
return dataFormatType;
}
/**
* The data format to be used
*/
public void setDataFormatType(DataFormatDefinition dataFormatType) {
this.dataFormatType = dataFormatType;
}
}
|
DataFormatTransformerDefinition
|
java
|
grpc__grpc-java
|
testing/src/main/java/io/grpc/testing/GrpcCleanupRule.java
|
{
"start": 7129,
"end": 7752
}
|
class ____ implements Resource {
final ManagedChannel channel;
ManagedChannelResource(ManagedChannel channel) {
this.channel = channel;
}
@Override
public void cleanUp() {
channel.shutdown();
}
@Override
public void forceCleanUp() {
channel.shutdownNow();
}
@Override
public boolean awaitReleased(long duration, TimeUnit timeUnit) throws InterruptedException {
return channel.awaitTermination(duration, timeUnit);
}
@Override
public String toString() {
return channel.toString();
}
}
private static final
|
ManagedChannelResource
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/ExtendedBeanInfo.java
|
{
"start": 1637,
"end": 2834
}
|
class ____ {
*
* private Foo foo;
*
* public Foo getFoo() {
* return this.foo;
* }
*
* public Bean setFoo(Foo foo) {
* this.foo = foo;
* return this;
* }
* }</pre>
*
* The standard JavaBeans {@code Introspector} will discover the {@code getFoo} read
* method, but will bypass the {@code #setFoo(Foo)} write method, because its non-void
* returning signature does not comply with the JavaBeans specification.
* {@code ExtendedBeanInfo}, on the other hand, will recognize and include it. This is
* designed to allow APIs with "builder" or method-chaining style setter signatures to be
* used within Spring {@code <beans>} XML. {@link #getPropertyDescriptors()} returns all
* existing property descriptors from the wrapped {@code BeanInfo} as well any added for
* non-void returning setters. Both standard ("non-indexed") and
* <a href="https://docs.oracle.com/javase/tutorial/javabeans/writing/properties.html">
* indexed properties</a> are fully supported.
*
* @author Chris Beams
* @author Juergen Hoeller
* @since 3.1
* @see #ExtendedBeanInfo(BeanInfo)
* @see ExtendedBeanInfoFactory
* @see CachedIntrospectionResults
*/
|
Bean
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/streams/TargetAssignmentBuilderTest.java
|
{
"start": 3252,
"end": 25606
}
|
class ____ {
@Test
public void testBuildEmptyAssignmentWhenTopologyNotReady() {
String groupId = "test-group";
int groupEpoch = 1;
TaskAssignor assignor = mock(TaskAssignor.class);
ConfiguredTopology topology = mock(ConfiguredTopology.class);
Map<String, String> assignmentConfigs = new HashMap<>();
when(topology.isReady()).thenReturn(false);
TargetAssignmentBuilder builder = new TargetAssignmentBuilder(groupId, groupEpoch, assignor, assignmentConfigs)
.withTopology(topology);
TargetAssignmentBuilder.TargetAssignmentResult result = builder.build();
List<CoordinatorRecord> expectedRecords = List.of(
StreamsCoordinatorRecordHelpers.newStreamsGroupTargetAssignmentEpochRecord(groupId, groupEpoch)
);
assertEquals(expectedRecords, result.records());
assertEquals(Map.of(), result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testCreateAssignmentMemberSpec(TaskRole taskRole) {
String fooSubtopologyId = Uuid.randomUuid().toString();
String barSubtopologyId = Uuid.randomUuid().toString();
final Map<String, String> clientTags = mkMap(mkEntry("tag1", "value1"), mkEntry("tag2", "value2"));
StreamsGroupMember member = new StreamsGroupMember.Builder("member-id")
.setRackId("rackId")
.setInstanceId("instanceId")
.setProcessId("processId")
.setClientTags(clientTags)
.build();
TasksTuple assignment = mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
);
AssignmentMemberSpec assignmentMemberSpec = createAssignmentMemberSpec(
member,
assignment
);
assertEquals(new AssignmentMemberSpec(
Optional.of("instanceId"),
Optional.of("rackId"),
assignment.activeTasks(),
assignment.standbyTasks(),
assignment.warmupTasks(),
"processId",
clientTags,
Map.of(),
Map.of()
), assignmentMemberSpec);
}
@Test
public void testEmpty() {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(List.of(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
)), result.records());
assertEquals(Map.of(), result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testAssignmentHasNotChanged(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(List.of(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
)), result.records());
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testAssignmentSwapped(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(3, result.records().size());
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
))
)), result.records().subList(0, 2));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(2));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testNewMember(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
context.updateMemberMetadata("member-3");
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.prepareMemberAssignment("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(4, result.records().size());
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
))
)), result.records().subList(0, 3));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(3));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
expectedAssignment.put("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testUpdateMember(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 3, 4)
));
context.addGroupMember("member-3", mkTasksTuple(taskRole,
mkTasks(barSubtopologyId, 5, 6)
));
context.updateMemberMetadata(
"member-3",
Optional.of("instance-id-3"),
Optional.of("rack-0")
);
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.prepareMemberAssignment("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(4, result.records().size());
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
))
)), result.records().subList(0, 3));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(3));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
expectedAssignment.put("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testPartialAssignmentUpdate(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.addGroupMember("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4, 5),
mkTasks(barSubtopologyId, 3, 4, 5)
));
context.prepareMemberAssignment("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 6),
mkTasks(barSubtopologyId, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(3, result.records().size());
// Member 1 has no record because its assignment did not change.
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4, 5),
mkTasks(barSubtopologyId, 3, 4, 5)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 6),
mkTasks(barSubtopologyId, 6)
))
)), result.records().subList(0, 2));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(2));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4, 5),
mkTasks(barSubtopologyId, 3, 4, 5)
));
expectedAssignment.put("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 6),
mkTasks(barSubtopologyId, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testDeleteMember(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.addGroupMember("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.addGroupMember("member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
context.removeMember("member-3");
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
org.apache.kafka.coordinator.group.streams.TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(3, result.records().size());
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
)),
newStreamsGroupTargetAssignmentRecord("my-group", "member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
))
)), result.records().subList(0, 2));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(2));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2, 3),
mkTasks(barSubtopologyId, 1, 2, 3)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 4, 5, 6),
mkTasks(barSubtopologyId, 4, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
@ParameterizedTest
@EnumSource(TaskRole.class)
public void testReplaceStaticMember(TaskRole taskRole) {
TargetAssignmentBuilderTestContext context = new TargetAssignmentBuilderTestContext(
"my-group",
20
);
String fooSubtopologyId = context.addSubtopologyWithSingleSourceTopic("foo", 6);
String barSubtopologyId = context.addSubtopologyWithSingleSourceTopic("bar", 6);
context.addGroupMember("member-1", "instance-member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.addGroupMember("member-2", "instance-member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.addGroupMember("member-3", "instance-member-3", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
// Static member 3 leaves
context.removeMember("member-3");
// Another static member joins with the same instance id as the departed one
context.updateMemberMetadata("member-3-a", Optional.of("instance-member-3"),
Optional.empty());
context.prepareMemberAssignment("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
context.prepareMemberAssignment("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
context.prepareMemberAssignment("member-3-a", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
assertEquals(2, result.records().size());
assertUnorderedRecordsEquals(List.of(List.of(
newStreamsGroupTargetAssignmentRecord("my-group", "member-3-a", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
))
)), result.records().subList(0, 1));
assertEquals(newStreamsGroupTargetAssignmentEpochRecord(
"my-group",
20
), result.records().get(1));
Map<String, TasksTuple> expectedAssignment = new HashMap<>();
expectedAssignment.put("member-1", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 1, 2),
mkTasks(barSubtopologyId, 1, 2)
));
expectedAssignment.put("member-2", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 3, 4),
mkTasks(barSubtopologyId, 3, 4)
));
expectedAssignment.put("member-3-a", mkTasksTuple(taskRole,
mkTasks(fooSubtopologyId, 5, 6),
mkTasks(barSubtopologyId, 5, 6)
));
assertEquals(expectedAssignment, result.targetAssignment());
}
public static
|
TargetAssignmentBuilderTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java
|
{
"start": 1303,
"end": 4602
}
|
class ____ extends AcknowledgedRequest<Request> {
private String jobId;
/**
* Internal parameter that allows resetting an open job
* when a job is reallocated to a new node.
*/
private boolean skipJobStateValidation;
/**
* Should this task store its result?
*/
private boolean shouldStoreResult;
/**
* Should user added annotations be removed when the job is reset?
*/
private boolean deleteUserAnnotations;
public Request(String jobId) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT);
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID);
}
public Request(StreamInput in) throws IOException {
super(in);
jobId = in.readString();
skipJobStateValidation = in.readBoolean();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) {
deleteUserAnnotations = in.readBoolean();
} else {
deleteUserAnnotations = false;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(jobId);
out.writeBoolean(skipJobStateValidation);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) {
out.writeBoolean(deleteUserAnnotations);
}
}
public void setSkipJobStateValidation(boolean skipJobStateValidation) {
this.skipJobStateValidation = skipJobStateValidation;
}
public boolean isSkipJobStateValidation() {
return skipJobStateValidation;
}
/**
* Should this task store its result after it has finished?
*/
public void setShouldStoreResult(boolean shouldStoreResult) {
this.shouldStoreResult = shouldStoreResult;
}
@Override
public boolean getShouldStoreResult() {
return shouldStoreResult;
}
public void setDeleteUserAnnotations(boolean deleteUserAnnotations) {
this.deleteUserAnnotations = deleteUserAnnotations;
}
public boolean getDeleteUserAnnotations() {
return deleteUserAnnotations;
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, MlTasks.JOB_TASK_ID_PREFIX + jobId, parentTaskId, headers);
}
public String getJobId() {
return jobId;
}
@Override
public int hashCode() {
return Objects.hash(jobId, skipJobStateValidation, deleteUserAnnotations);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || o.getClass() != getClass()) return false;
Request that = (Request) o;
return Objects.equals(jobId, that.jobId)
&& skipJobStateValidation == that.skipJobStateValidation
&& deleteUserAnnotations == that.deleteUserAnnotations;
}
}
}
|
Request
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java
|
{
"start": 1858,
"end": 34708
}
|
class ____ extends ESTestCase {
public static BlockFactory blockFactory(ByteSizeValue size) {
BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, size).withCircuitBreaking();
return new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays);
}
final CircuitBreaker breaker;
final BigArrays bigArrays;
final BlockFactory blockFactory;
@ParametersFactory
public static List<Object[]> params() {
List<Supplier<BlockFactory>> l = List.of(new Supplier<>() {
@Override
public BlockFactory get() {
CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1));
BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker));
return BlockFactory.getInstance(breaker, bigArrays);
}
@Override
public String toString() {
return "1gb";
}
});
return l.stream().map(s -> new Object[] { s }).toList();
}
public BlockFactoryTests(@Name("blockFactorySupplier") Supplier<BlockFactory> blockFactorySupplier) {
this.blockFactory = blockFactorySupplier.get();
this.breaker = blockFactory.breaker();
this.bigArrays = blockFactory.bigArrays();
}
@Before
@After
public void checkBreaker() {
assertThat(breaker.getUsed(), is(0L));
}
public void testPreAdjusters() {
for (int i = 0; i < 1000; i++) {
int positions = randomIntBetween(1, 16384);
long preAdjustBytes = blockFactory.preAdjustBreakerForBoolean(positions);
assertThat(preAdjustBytes, is((long) positions));
blockFactory.adjustBreaker(-preAdjustBytes);
preAdjustBytes = blockFactory.preAdjustBreakerForInt(positions);
assertThat(preAdjustBytes, is((long) positions * 4));
blockFactory.adjustBreaker(-preAdjustBytes);
preAdjustBytes = blockFactory.preAdjustBreakerForLong(positions);
assertThat(preAdjustBytes, is((long) positions * 8));
blockFactory.adjustBreaker(-preAdjustBytes);
preAdjustBytes = blockFactory.preAdjustBreakerForDouble(positions);
assertThat(preAdjustBytes, is((long) positions * 8));
blockFactory.adjustBreaker(-preAdjustBytes);
}
}
public void testIntBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newIntArrayBlock(new int[] {}, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testIntBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048));
builder.appendInt(randomInt());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newIntArrayBlock(new int[] { randomInt() }, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testIntBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newIntBlockBuilder(randomIntBetween(0, 2048));
builder.appendInt(randomInt());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendInt(randomInt());
builder.appendInt(randomInt());
builder.endPositionEntry();
}
builder.appendInt(randomInt());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testIntVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newIntArrayVector(new int[] {}, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testIntVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048));
builder.appendInt(randomInt());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testIntVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newIntVectorBuilder(randomIntBetween(0, 2048));
builder.appendInt(randomInt());
if (randomBoolean()) { // constant-ness or not
builder.appendInt(randomInt());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testLongBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newLongArrayBlock(new long[] {}, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testLongBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048));
builder.appendLong(randomLong());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newLongArrayBlock(new long[] { randomLong() }, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testLongBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newLongBlockBuilder(randomIntBetween(0, 2048));
builder.appendLong(randomLong());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendLong(randomInt());
builder.appendLong(randomInt());
builder.endPositionEntry();
}
builder.appendLong(randomLong());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testLongVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newLongArrayVector(new long[] {}, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testLongVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048));
builder.appendLong(randomLong());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testLongVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newLongVectorBuilder(randomIntBetween(0, 2048));
builder.appendLong(randomLong());
if (randomBoolean()) { // constant-ness or not
builder.appendLong(randomLong());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testDoubleBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newDoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testDoubleBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048));
builder.appendDouble(randomDouble());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newDoubleArrayBlock(new double[] { randomDouble() }, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testDoubleBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newDoubleBlockBuilder(randomIntBetween(0, 2048));
builder.appendDouble(randomDouble());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendDouble(randomDouble());
builder.appendDouble(randomDouble());
builder.endPositionEntry();
}
builder.appendDouble(randomDouble());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testDoubleVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newDoubleArrayVector(new double[] {}, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testDoubleVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048));
builder.appendDouble(randomDouble());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testDoubleVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newDoubleVectorBuilder(randomIntBetween(0, 2048));
builder.appendDouble(randomDouble());
if (randomBoolean()) { // constant-ness or not
builder.appendDouble(randomDouble());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testFloatBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newFloatBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newFloatArrayBlock(new float[] {}, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testFloatBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newFloatBlockBuilder(randomIntBetween(0, 2048));
builder.appendFloat(randomFloat());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newFloatArrayBlock(new float[] { randomFloat() }, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantFloatBlockWith(randomFloat(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testFloatBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newFloatBlockBuilder(randomIntBetween(0, 2048));
builder.appendFloat(randomFloat());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendFloat(randomFloat());
builder.appendFloat(randomFloat());
builder.endPositionEntry();
}
builder.appendFloat(randomFloat());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testFloatVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newFloatVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newFloatArrayVector(new float[] {}, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testFloatVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newFloatVectorBuilder(randomIntBetween(0, 2048));
builder.appendFloat(randomFloat());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newFloatArrayVector(new float[] { randomFloat() }, 1);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantFloatBlockWith(randomFloat(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testFloatVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newFloatVectorBuilder(randomIntBetween(0, 2048));
builder.appendFloat(randomFloat());
if (randomBoolean()) { // constant-ness or not
builder.appendFloat(randomFloat());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testBooleanBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newBooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testBooleanBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048));
builder.appendBoolean(randomBoolean());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
block = blockFactory.newBooleanArrayBlock(new boolean[] { randomBoolean() }, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantBooleanBlockWith(randomBoolean(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testBooleanBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newBooleanBlockBuilder(randomIntBetween(0, 2048));
builder.appendBoolean(randomBoolean());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendBoolean(randomBoolean());
builder.appendBoolean(randomBoolean());
builder.endPositionEntry();
}
builder.appendBoolean(randomBoolean());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testBooleanVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newBooleanArrayVector(new boolean[] {}, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testBooleanVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048));
builder.appendBoolean(randomBoolean());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
vector = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantBooleanBlockWith(randomBoolean(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testBooleanVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newBooleanVectorBuilder(randomIntBetween(0, 2048));
builder.appendBoolean(randomBoolean());
if (randomBoolean()) { // constant-ness or not
builder.appendBoolean(randomBoolean());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testBytesRefBlockBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
var emptyArray = new BytesRefArray(0, bigArrays);
block = blockFactory.newBytesRefArrayBlock(emptyArray, 0, new int[] { 0 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testBytesRefBlockBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048));
builder.appendBytesRef(randomBytesRef());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
var array = new BytesRefArray(1, bigArrays);
array.append(randomBytesRef());
block = blockFactory.newBytesRefArrayBlock(array, 1, new int[] { 0, 1 }, new BitSet(), randomOrdering());
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
block = blockFactory.newConstantBytesRefBlockWith(randomBytesRef(), randomIntBetween(1, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(block);
}
public void testBytesRefBlockBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newBytesRefBlockBuilder(randomIntBetween(0, 2048));
builder.appendBytesRef(randomBytesRef());
if (randomBoolean()) { // null-ness
builder.appendNull();
}
if (randomBoolean()) { // mv-ness
builder.beginPositionEntry();
builder.appendBytesRef(randomBytesRef());
builder.appendBytesRef(randomBytesRef());
builder.endPositionEntry();
}
builder.appendBytesRef(randomBytesRef());
assertThat(breaker.getUsed(), greaterThan(0L));
var block = builder.build();
releaseAndAssertBreaker(block);
}
}
public void testBytesRefVectorBuilderWithPossiblyLargeEstimateEmpty() {
var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048));
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
var emptyArray = new BytesRefArray(0, bigArrays);
vector = blockFactory.newBytesRefArrayVector(emptyArray, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testBytesRefVectorBuilderWithPossiblyLargeEstimateSingle() {
var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048));
builder.appendBytesRef(randomBytesRef());
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
var array = new BytesRefArray(1, bigArrays);
array.append(randomBytesRef());
vector = blockFactory.newBytesRefArrayVector(array, 0);
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
vector = blockFactory.newConstantBytesRefBlockWith(randomBytesRef(), randomIntBetween(1, 2048)).asVector();
assertThat(breaker.getUsed(), greaterThan(0L));
releaseAndAssertBreaker(vector);
}
public void testBytesRefVectorBuilderWithPossiblyLargeEstimateRandom() {
for (int i = 0; i < 1000; i++) {
assertThat(breaker.getUsed(), is(0L));
var builder = blockFactory.newBytesRefVectorBuilder(randomIntBetween(0, 2048));
builder.appendBytesRef(randomBytesRef());
if (randomBoolean()) { // constant-ness or not
builder.appendBytesRef(randomBytesRef());
}
assertThat(breaker.getUsed(), greaterThan(0L));
var vector = builder.build();
releaseAndAssertBreaker(vector);
}
}
public void testReleaseVector() {
int positionCount = randomIntBetween(1, 10);
IntVector vector = blockFactory.newIntArrayVector(new int[positionCount], positionCount);
if (randomBoolean()) {
vector.asBlock().close();
} else {
vector.close();
}
assertTrue(vector.isReleased());
assertThat(breaker.getUsed(), equalTo(0L));
}
public void testParent() {
long overLimit = between(1, 10);
long maxOverLimit = randomLongBetween(overLimit, 1000);
LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(blockFactory.breaker(), overLimit, maxOverLimit);
BlockFactory childFactory = blockFactory.newChildFactory(localBreaker);
assertThat(childFactory.parent(), sameInstance(blockFactory));
assertThat(blockFactory.parent(), sameInstance(blockFactory));
localBreaker.close();
}
private Block randomBlock(BlockFactory blockFactory, int positionCount) {
return RandomBlock.randomBlock(
blockFactory,
randomFrom(ElementType.BYTES_REF, ElementType.LONG, ElementType.BOOLEAN),
positionCount,
randomBoolean(),
between(0, 1),
between(1, 3),
between(0, 1),
between(1, 3)
).block();
}
public void testAllowPassingBlockToDifferentContext() throws Exception {
long overLimit1 = between(0, 10 * 1024);
long maxOverLimit1 = randomLongBetween(overLimit1, 100 * 1024);
LocalCircuitBreaker localBreaker1 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit1, maxOverLimit1);
long overLimit2 = between(0, 10 * 1024);
long maxOverLimit2 = randomLongBetween(overLimit1, 100 * 1024);
LocalCircuitBreaker localBreaker2 = new LocalCircuitBreaker(blockFactory.breaker(), overLimit2, maxOverLimit2);
BlockFactory childFactory1 = blockFactory.newChildFactory(localBreaker1);
BlockFactory childFactory2 = blockFactory.newChildFactory(localBreaker2);
Thread[] releasingThreads = new Thread[between(1, 4)];
Page[] passedPages = new Page[releasingThreads.length];
for (int i = 0; i < passedPages.length; i++) {
int positionCount = between(1, 100);
Block[] blocks = new Block[between(1, 10)];
for (int b = 0; b < blocks.length; b++) {
blocks[b] = randomBlock(randomFrom(childFactory1, childFactory2), positionCount);
blocks[b].allowPassingToDifferentDriver();
assertThat(blocks[b].blockFactory(), equalTo(blockFactory));
}
passedPages[i] = new Page(blocks);
}
Block[] localBlocks = new Block[between(1, 100)];
for (int i = 0; i < localBlocks.length; i++) {
BlockFactory childFactory = randomFrom(childFactory1, childFactory2);
localBlocks[i] = randomBlock(childFactory, between(1, 100));
assertThat(localBlocks[i].blockFactory(), equalTo(childFactory));
}
CyclicBarrier barrier = new CyclicBarrier(releasingThreads.length + 1);
for (int i = 0; i < releasingThreads.length; i++) {
int threadIndex = i;
releasingThreads[threadIndex] = new Thread(() -> {
try {
barrier.await(30, TimeUnit.SECONDS);
passedPages[threadIndex].releaseBlocks();
} catch (Exception e) {
throw new AssertionError(e);
}
});
releasingThreads[threadIndex].start();
}
barrier.await(30, TimeUnit.SECONDS);
for (Block block : localBlocks) {
block.close();
}
for (Thread releasingThread : releasingThreads) {
releasingThread.join();
}
assertThat(localBreaker1.getReservedBytes(), lessThanOrEqualTo(maxOverLimit1));
assertThat(localBreaker2.getReservedBytes(), lessThanOrEqualTo(maxOverLimit2));
localBreaker1.close();
localBreaker2.close();
}
public void testOwningFactoryOfVectorBlock() {
BlockFactory parentFactory = blockFactory(ByteSizeValue.ofBytes(between(1024, 4096)));
LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(parentFactory.breaker(), between(0, 1024), between(0, 1024));
BlockFactory localFactory = parentFactory.newChildFactory(localBreaker);
int numValues = between(2, 10);
try (var builder = localFactory.newIntVectorBuilder(numValues)) {
for (int i = 0; i < numValues; i++) {
builder.appendInt(randomInt());
}
IntBlock block = builder.build().asBlock();
assertThat(block.blockFactory(), equalTo(localFactory));
block.allowPassingToDifferentDriver();
assertThat(block.blockFactory(), equalTo(parentFactory));
}
}
static BytesRef randomBytesRef() {
return new BytesRef(randomByteArrayOfLength(between(1, 20)));
}
static Block.MvOrdering randomOrdering() {
return randomFrom(Block.MvOrdering.values());
}
<T extends Releasable & Accountable> void releaseAndAssertBreaker(T data) {
Page page = data instanceof Block block ? new Page(block) : null;
assertThat(breaker.getUsed(), greaterThan(0L));
Releasables.closeExpectNoException(data);
if (data instanceof Block block) {
assertThat(block.isReleased(), is(true));
Exception e = expectThrows(IllegalStateException.class, () -> page.getBlock(0));
assertThat(e.getMessage(), containsString("can't read released block"));
e = expectThrows(IllegalArgumentException.class, () -> new Page(block));
assertThat(e.getMessage(), containsString("can't build page out of released blocks"));
}
assertThat(breaker.getUsed(), is(0L));
}
// A breaker service that always returns the given breaker for getBreaker(CircuitBreaker.REQUEST)
static CircuitBreakerService mockBreakerService(CircuitBreaker breaker) {
CircuitBreakerService breakerService = mock(CircuitBreakerService.class);
when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker);
return breakerService;
}
}
|
BlockFactoryTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/filesystem/FsSegmentDataInputStreamTest.java
|
{
"start": 1475,
"end": 5415
}
|
class ____ {
@Rule public final TemporaryFolder tmp = new TemporaryFolder();
private static final CloseableRegistry closeableRegistry = new CloseableRegistry();
private static final Random random = new Random();
@Test
public void testReadFromFileSegments() throws IOException {
Path dirPath = new Path(tmp.getRoot().getPath());
Path filePath = new Path(dirPath, UUID.randomUUID().toString());
byte[] fileContent = prepareFileToRead(filePath, 512);
int startPos, segmentSize;
// 1. whole file as one segment
startPos = 0;
segmentSize = 512;
FsSegmentDataInputStream inputStream = openSegment(filePath, startPos, segmentSize);
closeableRegistry.registerCloseable(inputStream);
byte[] readResult = new byte[segmentSize];
int readLen = inputStream.read(readResult);
assertThat(readLen).isEqualTo(segmentSize);
assertBytesContentEqual(fileContent, startPos, segmentSize, readResult);
assertThat(inputStream.read()).isEqualTo(-1);
// 2. read with a file segment
startPos = 26;
segmentSize = 483;
inputStream = openSegment(filePath, startPos, segmentSize);
readResult = new byte[segmentSize];
readLen = inputStream.read(readResult);
assertThat(readLen).isEqualTo(segmentSize);
assertBytesContentEqual(fileContent, startPos, segmentSize, readResult);
assertThat(inputStream.read()).isEqualTo(-1);
// 3. seek to a relative position
startPos = 56;
segmentSize = 123;
inputStream = openSegment(filePath, startPos, segmentSize);
int readBufferSize = 32;
readResult = new byte[readBufferSize];
int seekPos = 74;
inputStream.seek(seekPos);
assertThat(inputStream.getPos()).isEqualTo(seekPos);
readLen = inputStream.read(readResult);
assertThat(readLen).isEqualTo(readBufferSize);
assertBytesContentEqual(fileContent, startPos + seekPos, readLen, readResult);
assertThat(inputStream.getPos()).isEqualTo(seekPos + readBufferSize);
// current relative position is (74 + 32 = 106)
// reading another 32 bytes will cross the segment boundary
assertThat(inputStream.read(readResult)).isEqualTo(segmentSize - seekPos - readBufferSize);
assertThat(inputStream.read()).isEqualTo(-1);
assertThat(inputStream.read(new byte[10])).isEqualTo(-1);
assertThat(inputStream.read(new byte[10], 0, 1)).isEqualTo(-1);
}
private byte[] prepareFileToRead(Path filePath, int fileSize) throws IOException {
OutputStreamAndPath streamAndPath =
EntropyInjector.createEntropyAware(
filePath.getFileSystem(), filePath, FileSystem.WriteMode.NO_OVERWRITE);
FSDataOutputStream outputStream = streamAndPath.stream();
byte[] fileContent = randomBytes(fileSize);
outputStream.write(fileContent);
outputStream.close();
return fileContent;
}
private FsSegmentDataInputStream openSegment(
Path filePath, long startPosition, long segmentSize) throws IOException {
FSDataInputStream inputStream = filePath.getFileSystem().open(filePath);
return new FsSegmentDataInputStream(inputStream, startPosition, segmentSize);
}
private byte[] randomBytes(int len) {
byte[] bytes = new byte[len];
random.nextBytes(bytes);
return bytes;
}
private void assertBytesContentEqual(
byte[] expected, int startPosInExpected, int sizeInExpected, byte[] actual) {
assertThat(actual.length).isEqualTo(sizeInExpected);
byte[] expectedSegment = new byte[sizeInExpected];
System.arraycopy(expected, startPosInExpected, expectedSegment, 0, sizeInExpected);
assertThat(actual).isEqualTo(expectedSegment);
}
}
|
FsSegmentDataInputStreamTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/LoadBalancingKMSClientProvider.java
|
{
"start": 3071,
"end": 20247
}
|
class ____ extends RuntimeException {
public WrapperException(Throwable cause) {
super(cause);
}
}
private final KMSClientProvider[] providers;
private final AtomicInteger currentIdx;
private final Text dtService; // service in token.
private final Text canonicalService; // credentials alias for token.
private RetryPolicy retryPolicy = null;
public LoadBalancingKMSClientProvider(URI providerUri,
KMSClientProvider[] providers, Configuration conf) {
this(providerUri, providers, Time.monotonicNow(), conf);
}
@VisibleForTesting
LoadBalancingKMSClientProvider(KMSClientProvider[] providers, long seed,
Configuration conf) {
this(URI.create("kms://testing"), providers, seed, conf);
}
private LoadBalancingKMSClientProvider(URI uri,
KMSClientProvider[] providers, long seed, Configuration conf) {
super(conf);
// uri is the token service so it can be instantiated for renew/cancel.
dtService = KMSClientProvider.getDtService(uri);
// if provider not in conf, new client will alias on uri else addr.
if (KMSUtil.getKeyProviderUri(conf) == null) {
canonicalService = dtService;
} else {
// canonical service (credentials alias) will be the first underlying
// provider's service. must be deterministic before shuffle so multiple
// calls for a token do not obtain another unnecessary token.
canonicalService = new Text(providers[0].getCanonicalServiceName());
}
// shuffle unless seed is 0 which is used by tests for determinism.
this.providers = (seed != 0) ? shuffle(providers) : providers;
for (KMSClientProvider provider : providers) {
provider.setClientTokenProvider(this);
}
this.currentIdx = new AtomicInteger((int)(seed % providers.length));
int maxNumRetries = conf.getInt(CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_MAX_RETRIES_KEY, providers.length);
int sleepBaseMillis = conf.getInt(CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_SLEEP_BASE_MILLIS_KEY,
CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_SLEEP_BASE_MILLIS_DEFAULT);
int sleepMaxMillis = conf.getInt(CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_SLEEP_MAX_MILLIS_KEY,
CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_SLEEP_MAX_MILLIS_DEFAULT);
Preconditions.checkState(maxNumRetries >= 0);
Preconditions.checkState(sleepBaseMillis >= 0);
Preconditions.checkState(sleepMaxMillis >= 0);
this.retryPolicy = RetryPolicies.failoverOnNetworkException(
RetryPolicies.TRY_ONCE_THEN_FAIL, maxNumRetries, 0, sleepBaseMillis,
sleepMaxMillis);
LOG.debug("Created LoadBalancingKMSClientProvider for KMS url: {} with {} "
+ "providers. delegation token service: {}, canonical service: {}",
uri, providers.length, dtService, canonicalService);
}
@VisibleForTesting
public KMSClientProvider[] getProviders() {
return providers;
}
@Override
public org.apache.hadoop.security.token.Token<? extends TokenIdentifier>
selectDelegationToken(Credentials creds) {
Token<? extends TokenIdentifier> token =
KMSClientProvider.selectDelegationToken(creds, canonicalService);
if (token == null) {
token = KMSClientProvider.selectDelegationToken(creds, dtService);
}
// fallback to querying each sub-provider.
if (token == null) {
for (KMSClientProvider provider : getProviders()) {
token = provider.selectDelegationToken(creds);
if (token != null) {
break;
}
}
}
return token;
}
private <T> T doOp(ProviderCallable<T> op, int currPos,
boolean isIdempotent) throws IOException {
if (providers.length == 0) {
throw new IOException("No providers configured !");
}
int numFailovers = 0;
for (int i = 0;; i++, numFailovers++) {
KMSClientProvider provider = providers[(currPos + i) % providers.length];
try {
return op.call(provider);
} catch (AccessControlException ace) {
// No need to retry on AccessControlException
// and AuthorizationException.
// This assumes all the servers are configured with identical
// permissions and identical key acls.
throw ace;
} catch (IOException ioe) {
LOG.warn("KMS provider at [{}] threw an IOException: ",
provider.getKMSUrl(), ioe);
// SSLException can occur here because of lost connection
// with the KMS server, creating a ConnectException from it,
// so that the FailoverOnNetworkExceptionRetry policy will retry
if (ioe instanceof SSLException || ioe instanceof SocketException) {
Exception cause = ioe;
ioe = new ConnectException("SSLHandshakeException: "
+ cause.getMessage());
ioe.initCause(cause);
}
RetryAction action = null;
try {
action = retryPolicy.shouldRetry(ioe, 0, numFailovers, isIdempotent);
} catch (Exception e) {
if (e instanceof IOException) {
throw (IOException)e;
}
throw new IOException(e);
}
// make sure each provider is tried at least once, to keep behavior
// compatible with earlier versions of LBKMSCP
if (action.action == RetryAction.RetryDecision.FAIL
&& numFailovers >= providers.length - 1) {
LOG.error("Aborting since the Request has failed with all KMS"
+ " providers(depending on {}={} setting and numProviders={})"
+ " in the group OR the exception is not recoverable",
CommonConfigurationKeysPublic.KMS_CLIENT_FAILOVER_MAX_RETRIES_KEY,
getConf().getInt(
CommonConfigurationKeysPublic.
KMS_CLIENT_FAILOVER_MAX_RETRIES_KEY, providers.length),
providers.length);
throw ioe;
}
if (((numFailovers + 1) % providers.length) == 0) {
// Sleep only after we try all the providers for every cycle.
try {
Thread.sleep(action.delayMillis);
} catch (InterruptedException e) {
throw new InterruptedIOException("Thread Interrupted");
}
}
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new WrapperException(e);
}
}
}
}
private int nextIdx() {
while (true) {
int current = currentIdx.get();
int next = (current + 1) % providers.length;
if (currentIdx.compareAndSet(current, next)) {
return current;
}
}
}
@Override
public String getCanonicalServiceName() {
return canonicalService.toString();
}
@Override
public Token<?> getDelegationToken(String renewer) throws IOException {
return doOp(new ProviderCallable<Token<?>>() {
@Override
public Token<?> call(KMSClientProvider provider) throws IOException {
Token<?> token = provider.getDelegationToken(renewer);
// override sub-providers service with our own so it can be used
// across all providers.
token.setService(dtService);
LOG.debug("New token service set. Token: ({})", token);
return token;
}
}, nextIdx(), false);
}
@Override
public long renewDelegationToken(final Token<?> token) throws IOException {
return doOp(new ProviderCallable<Long>() {
@Override
public Long call(KMSClientProvider provider) throws IOException {
return provider.renewDelegationToken(token);
}
}, nextIdx(), false);
}
@Override
public Void cancelDelegationToken(final Token<?> token) throws IOException {
return doOp(new ProviderCallable<Void>() {
@Override
public Void call(KMSClientProvider provider) throws IOException {
provider.cancelDelegationToken(token);
return null;
}
}, nextIdx(), false);
}
// This request is sent to all providers in the load-balancing group
@Override
public void warmUpEncryptedKeys(String... keyNames) throws IOException {
Preconditions.checkArgument(providers.length > 0,
"No providers are configured");
boolean success = false;
IOException e = null;
for (KMSClientProvider provider : providers) {
try {
provider.warmUpEncryptedKeys(keyNames);
success = true;
} catch (IOException ioe) {
e = ioe;
LOG.error(
"Error warming up keys for provider with url"
+ "[" + provider.getKMSUrl() + "]", ioe);
}
}
if (!success && e != null) {
throw e;
}
}
// This request is sent to all providers in the load-balancing group
@Override
public void drain(String keyName) {
for (KMSClientProvider provider : providers) {
provider.drain(keyName);
}
}
// This request is sent to all providers in the load-balancing group
@Override
public void invalidateCache(String keyName) throws IOException {
for (KMSClientProvider provider : providers) {
provider.invalidateCache(keyName);
}
}
@Override
public EncryptedKeyVersion
generateEncryptedKey(final String encryptionKeyName)
throws IOException, GeneralSecurityException {
try {
return doOp(new ProviderCallable<EncryptedKeyVersion>() {
@Override
public EncryptedKeyVersion call(KMSClientProvider provider)
throws IOException, GeneralSecurityException {
return provider.generateEncryptedKey(encryptionKeyName);
}
}, nextIdx(), true);
} catch (WrapperException we) {
if (we.getCause() instanceof GeneralSecurityException) {
throw (GeneralSecurityException) we.getCause();
}
throw new IOException(we.getCause());
}
}
@Override
public KeyVersion
decryptEncryptedKey(final EncryptedKeyVersion encryptedKeyVersion)
throws IOException, GeneralSecurityException {
try {
return doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider)
throws IOException, GeneralSecurityException {
return provider.decryptEncryptedKey(encryptedKeyVersion);
}
}, nextIdx(), true);
} catch (WrapperException we) {
if (we.getCause() instanceof GeneralSecurityException) {
throw (GeneralSecurityException) we.getCause();
}
throw new IOException(we.getCause());
}
}
@Override
public EncryptedKeyVersion reencryptEncryptedKey(
final EncryptedKeyVersion ekv)
throws IOException, GeneralSecurityException {
try {
return doOp(new ProviderCallable<EncryptedKeyVersion>() {
@Override
public EncryptedKeyVersion call(KMSClientProvider provider)
throws IOException, GeneralSecurityException {
return provider.reencryptEncryptedKey(ekv);
}
}, nextIdx(), true);
} catch (WrapperException we) {
if (we.getCause() instanceof GeneralSecurityException) {
throw (GeneralSecurityException) we.getCause();
}
throw new IOException(we.getCause());
}
}
@Override
public void reencryptEncryptedKeys(final List<EncryptedKeyVersion> ekvs)
throws IOException, GeneralSecurityException {
try {
doOp(new ProviderCallable<Void>() {
@Override
public Void call(KMSClientProvider provider)
throws IOException, GeneralSecurityException {
provider.reencryptEncryptedKeys(ekvs);
return null;
}
}, nextIdx(), true);
} catch (WrapperException we) {
if (we.getCause() instanceof GeneralSecurityException) {
throw (GeneralSecurityException) we.getCause();
}
throw new IOException(we.getCause());
}
}
@Override
public KeyVersion getKeyVersion(final String versionName) throws IOException {
return doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException {
return provider.getKeyVersion(versionName);
}
}, nextIdx(), true);
}
@Override
public List<String> getKeys() throws IOException {
return doOp(new ProviderCallable<List<String>>() {
@Override
public List<String> call(KMSClientProvider provider) throws IOException {
return provider.getKeys();
}
}, nextIdx(), true);
}
@Override
public Metadata[] getKeysMetadata(final String... names) throws IOException {
return doOp(new ProviderCallable<Metadata[]>() {
@Override
public Metadata[] call(KMSClientProvider provider) throws IOException {
return provider.getKeysMetadata(names);
}
}, nextIdx(), true);
}
@Override
public List<KeyVersion> getKeyVersions(final String name) throws IOException {
return doOp(new ProviderCallable<List<KeyVersion>>() {
@Override
public List<KeyVersion> call(KMSClientProvider provider)
throws IOException {
return provider.getKeyVersions(name);
}
}, nextIdx(), true);
}
@Override
public KeyVersion getCurrentKey(final String name) throws IOException {
return doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException {
return provider.getCurrentKey(name);
}
}, nextIdx(), true);
}
@Override
public Metadata getMetadata(final String name) throws IOException {
return doOp(new ProviderCallable<Metadata>() {
@Override
public Metadata call(KMSClientProvider provider) throws IOException {
return provider.getMetadata(name);
}
}, nextIdx(), true);
}
@Override
public KeyVersion createKey(final String name, final byte[] material,
final Options options) throws IOException {
return doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException {
return provider.createKey(name, material, options);
}
}, nextIdx(), false);
}
@Override
public KeyVersion createKey(final String name, final Options options)
throws NoSuchAlgorithmException, IOException {
try {
return doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException,
NoSuchAlgorithmException {
return provider.createKey(name, options);
}
}, nextIdx(), false);
} catch (WrapperException e) {
if (e.getCause() instanceof GeneralSecurityException) {
throw (NoSuchAlgorithmException) e.getCause();
}
throw new IOException(e.getCause());
}
}
@Override
public void deleteKey(final String name) throws IOException {
doOp(new ProviderCallable<Void>() {
@Override
public Void call(KMSClientProvider provider) throws IOException {
provider.deleteKey(name);
return null;
}
}, nextIdx(), false);
invalidateCache(name);
}
@Override
public KeyVersion rollNewVersion(final String name, final byte[] material)
throws IOException {
final KeyVersion newVersion = doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException {
return provider.rollNewVersion(name, material);
}
}, nextIdx(), false);
invalidateCache(name);
return newVersion;
}
@Override
public KeyVersion rollNewVersion(final String name)
throws NoSuchAlgorithmException, IOException {
try {
final KeyVersion newVersion = doOp(new ProviderCallable<KeyVersion>() {
@Override
public KeyVersion call(KMSClientProvider provider) throws IOException,
NoSuchAlgorithmException {
return provider.rollNewVersion(name);
}
}, nextIdx(), false);
invalidateCache(name);
return newVersion;
} catch (WrapperException e) {
if (e.getCause() instanceof GeneralSecurityException) {
throw (NoSuchAlgorithmException) e.getCause();
}
throw new IOException(e.getCause());
}
}
// Close all providers in the LB group
@Override
public void close() throws IOException {
for (KMSClientProvider provider : providers) {
try {
provider.close();
} catch (IOException ioe) {
LOG.error("Error closing provider with url"
+ "[" + provider.getKMSUrl() + "]");
}
}
}
@Override
public void flush() throws IOException {
for (KMSClientProvider provider : providers) {
try {
provider.flush();
} catch (IOException ioe) {
LOG.error("Error flushing provider with url"
+ "[" + provider.getKMSUrl() + "]");
}
}
}
private static KMSClientProvider[] shuffle(KMSClientProvider[] providers) {
List<KMSClientProvider> list = Arrays.asList(providers);
Collections.shuffle(list);
return list.toArray(providers);
}
}
|
WrapperException
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/AbstractDelegationTokenBinding.java
|
{
"start": 11248,
"end": 11939
}
|
class ____
extends SecretManager<AbstractS3ATokenIdentifier> {
@Override
protected byte[] createPassword(AbstractS3ATokenIdentifier identifier) {
return getSecretManagerPasssword();
}
@Override
public byte[] retrievePassword(AbstractS3ATokenIdentifier identifier)
throws InvalidToken {
return getSecretManagerPasssword();
}
@Override
public AbstractS3ATokenIdentifier createIdentifier() {
try (DurationInfo ignored = new DurationInfo(LOG, DURATION_LOG_AT_INFO,
"Creating Delegation Token Identifier")) {
return AbstractDelegationTokenBinding.this.createEmptyIdentifier();
}
}
}
}
|
TokenSecretManager
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/type/AnnotatedTypeMetadata.java
|
{
"start": 9993,
"end": 12616
}
|
class ____ for exposure as values in the returned {@code AnnotationAttributes},
* instead of {@code Class} references which might potentially have to be loaded
* first
* @param sortByReversedMetaDistance {@code true} if the results should be
* sorted in reversed order based on each annotation's meta distance
* @return the set of all merged repeatable {@code AnnotationAttributes} found,
* or an empty set if none were found
* @since 6.1
* @see #getMergedRepeatableAnnotationAttributes(Class, Class, boolean)
* @see #getMergedRepeatableAnnotationAttributes(Class, Class, Predicate, boolean, boolean)
*/
default Set<AnnotationAttributes> getMergedRepeatableAnnotationAttributes(
Class<? extends Annotation> annotationType, Class<? extends Annotation> containerType,
boolean classValuesAsString, boolean sortByReversedMetaDistance) {
return getMergedRepeatableAnnotationAttributes(annotationType, containerType,
mergedAnnotation -> true, classValuesAsString, sortByReversedMetaDistance);
}
/**
* Retrieve all <em>repeatable annotations</em> of the given type within the
* annotation hierarchy <em>above</em> the underlying element (as direct
* annotation or meta-annotation); and for each annotation found, merge that
* annotation's attributes with <em>matching</em> attributes from annotations
* in lower levels of the annotation hierarchy and store the results in an
* instance of {@link AnnotationAttributes}.
* <p>{@link org.springframework.core.annotation.AliasFor @AliasFor} semantics
* are fully supported, both within a single annotation and within annotation
* hierarchies.
* <p>The supplied {@link Predicate} will be used to filter the results. For
* example, supply {@code mergedAnnotation -> true} to include all annotations
* in the results; supply {@code MergedAnnotation::isDirectlyPresent} to limit
* the results to directly declared annotations, etc.
* <p>If the {@code sortByReversedMetaDistance} flag is set to {@code true},
* the results will be sorted in {@link Comparator#reversed() reversed} order
* based on each annotation's {@linkplain MergedAnnotation#getDistance()
* meta distance}, which effectively orders meta-annotations before annotations
* that are declared directly on the underlying element.
* @param annotationType the annotation type to find
* @param containerType the type of the container that holds the annotations
* @param predicate a {@code Predicate} to apply to each {@code MergedAnnotation}
* to determine if it should be included in the results
* @param classValuesAsString whether to convert
|
names
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SagaComponentBuilderFactory.java
|
{
"start": 1830,
"end": 3924
}
|
interface ____ extends ComponentBuilder<SagaComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default SagaComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default SagaComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
|
SagaComponentBuilder
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/core/InterruptibleBatchPreparedStatementSetter.java
|
{
"start": 1878,
"end": 2799
}
|
interface ____ extends BatchPreparedStatementSetter {
/**
* Return whether the batch is complete, that is, whether there were no
* additional values added during the last {@code setValues} call.
* <p><b>NOTE:</b> If this method returns {@code true}, any parameters
* that might have been set during the last {@code setValues} call will
* be ignored! Make sure that you set a corresponding internal flag if you
* detect exhaustion <i>at the beginning</i> of your {@code setValues}
* implementation, letting this method return {@code true} based on the flag.
* @param i index of the statement we're issuing in the batch, starting from 0
* @return whether the batch is already exhausted
* @see #setValues
* @see org.springframework.jdbc.core.support.AbstractInterruptibleBatchPreparedStatementSetter#setValuesIfAvailable
*/
boolean isBatchExhausted(int i);
}
|
InterruptibleBatchPreparedStatementSetter
|
java
|
apache__camel
|
components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowProducerPutTest.java
|
{
"start": 1057,
"end": 2029
}
|
class ____ extends BaseUndertowTest {
@Test
public void testUndertowProducerPut() throws Exception {
getMockEndpoint("mock:input").expectedMessageCount(1);
fluentTemplate.withBody("Donald Duck").withHeader("id", "123").to("direct:start").send();
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// configure to use localhost with the given port
restConfiguration().component("undertow").host("localhost").port(getPort());
from("direct:start")
.to("rest:put:users/{id}");
// use the rest DSL to define the rest services
rest("/users/")
.put("{id}")
.to("mock:input");
}
};
}
}
|
RestUndertowProducerPutTest
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/util/ReflectionUtilsTests.java
|
{
"start": 12949,
"end": 13121
}
|
class ____ {
@SuppressWarnings({ "unused", "RedundantThrows" })
private void foo(Integer i) throws RemoteException {
}
}
@SuppressWarnings("unused")
private static
|
A
|
java
|
apache__camel
|
components/camel-netty/src/test/java/org/apache/camel/component/netty/NettyUDPByteArrayProviderTest.java
|
{
"start": 1627,
"end": 3647
}
|
class ____ extends BaseNettyTest {
private static final String SEND_STRING
= "ef3e00559f5faf0262f5ff0962d9008daa91001cd46b0fa9330ef0f3030fff250e46f72444d1cc501678c351e04b8004c"
+ "4000002080000fe850bbe011030000008031b031bfe9251305441593830354720020800050440ff";
private static final int SEND_COUNT = 10;
private volatile int receivedCount;
private EventLoopGroup group;
private Bootstrap bootstrap;
public void createNettyUdpReceiver() {
group = new NioEventLoopGroup();
bootstrap = new Bootstrap();
bootstrap.group(group)
.channel(NioDatagramChannel.class)
.handler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel channel) {
channel.pipeline().addLast(new UdpHandler());
channel.pipeline().addLast(new ByteArrayDecoder());
channel.pipeline().addLast(new ContentHandler());
}
}).localAddress(new InetSocketAddress(getPort()));
}
public void bind() {
bootstrap.bind().syncUninterruptibly();
}
public void stop() {
group.shutdownGracefully().syncUninterruptibly();
}
@Test
public void testSendingRawByteMessage() {
createNettyUdpReceiver();
bind();
for (int i = 0; i < SEND_COUNT; ++i) {
template.sendBody("direct:in", fromHexString(SEND_STRING));
}
stop();
assertTrue(receivedCount > 0, "We should have received some datagrams");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:in")
.to("netty:udp://localhost:{{port}}?sync=false&udpByteArrayCodec=true&udpConnectionlessSending=true");
}
};
}
public
|
NettyUDPByteArrayProviderTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java
|
{
"start": 1226,
"end": 1663
}
|
class ____ extends ActionType<AcknowledgedResponse> {
public static final PutComponentTemplateAction INSTANCE = new PutComponentTemplateAction();
public static final String NAME = "cluster:admin/component_template/put";
private PutComponentTemplateAction() {
super(NAME);
}
/**
* A request for putting a single component template into the cluster state
*/
public static
|
PutComponentTemplateAction
|
java
|
google__error-prone
|
docgen/src/main/java/com/google/errorprone/DocGenTool.java
|
{
"start": 2723,
"end": 2785
}
|
enum ____ {
INTERNAL,
EXTERNAL
}
public static
|
Target
|
java
|
apache__camel
|
components/camel-ai/camel-langchain4j-tools/src/test/java/org/apache/camel/component/langchain4j/tools/LangChain4jToolParameterValueTypeConversionTest.java
|
{
"start": 1502,
"end": 4934
}
|
class ____ extends CamelTestSupport {
protected ChatModel chatModel;
@RegisterExtension
static OpenAIMock openAIMock = new OpenAIMock().builder()
.when("A test user message\n")
.invokeTool("TestTool")
.withParam("int", 1)
.withParam("intNumeric", 2)
.withParam("long", Long.MIN_VALUE)
.withParam("double", 1.0)
.withParam("boolean", true)
.withParam("string", "1")
.build();
@Override
protected void setupResources() throws Exception {
super.setupResources();
chatModel = ToolsHelper.createModel(openAIMock.getBaseUrl());
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
LangChain4jToolsComponent component
= context.getComponent(LangChain4jTools.SCHEME, LangChain4jToolsComponent.class);
component.getConfiguration().setChatModel(chatModel);
return context;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:test")
.to("langchain4j-tools:test?tags=test")
.log("response is: ${body}");
from("langchain4j-tools:test?tags=test&name=TestTool&description=Test Tool¶meter.int=integer¶meter.intNumeric=number¶meter.long=number¶meter.double=number¶meter.boolean=boolean¶meter.string=string")
.setBody(simple("{\"content\": \"fake response\"}"));
}
};
}
@Test
void parameterValueTypeConversion() {
List<ChatMessage> messages = new ArrayList<>();
messages.add(new SystemMessage(
"""
You provide the requested information using the functions you hava available. You can invoke the functions to obtain the information you need to complete the answer.
"""));
messages.add(new UserMessage("""
A test user message
"""));
Exchange exchange = fluentTemplate.to("direct:test").withBody(messages).request(Exchange.class);
Assertions.assertThat(exchange).isNotNull();
Message message = exchange.getMessage();
Assertions.assertThat(message.getHeader("int")).isInstanceOf(Integer.class);
Assertions.assertThat(message.getHeader("int")).isEqualTo(1);
Assertions.assertThat(message.getHeader("intNumeric")).isInstanceOf(Integer.class);
Assertions.assertThat(message.getHeader("intNumeric")).isEqualTo(2);
Assertions.assertThat(message.getHeader("long")).isInstanceOf(Long.class);
Assertions.assertThat(message.getHeader("long")).isEqualTo(Long.MIN_VALUE);
Assertions.assertThat(message.getHeader("double")).isInstanceOf(Double.class);
Assertions.assertThat(message.getHeader("double")).isEqualTo(1.0);
Assertions.assertThat(message.getHeader("boolean")).isInstanceOf(Boolean.class);
Assertions.assertThat(message.getHeader("boolean")).isEqualTo(true);
Assertions.assertThat(message.getHeader("string")).isInstanceOf(String.class);
Assertions.assertThat(message.getHeader("string")).isEqualTo("1");
}
}
|
LangChain4jToolParameterValueTypeConversionTest
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareConsumerDelegateCreator.java
|
{
"start": 1982,
"end": 3996
}
|
class ____ {
public <K, V> ShareConsumerDelegate<K, V> create(final ConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer) {
try {
return new ShareConsumerImpl<>(config, keyDeserializer, valueDeserializer);
} catch (KafkaException e) {
throw e;
} catch (Throwable t) {
throw new KafkaException("Failed to construct Kafka share consumer", t);
}
}
public <K, V> ShareConsumerDelegate<K, V> create(final LogContext logContext,
final String clientId,
final String groupId,
final ConsumerConfig config,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer,
final Time time,
final KafkaClient client,
final SubscriptionState subscriptions,
final ShareConsumerMetadata metadata) {
try {
return new ShareConsumerImpl<>(
logContext,
clientId,
groupId,
config,
keyDeserializer,
valueDeserializer,
time,
client,
subscriptions,
metadata
);
} catch (KafkaException e) {
throw e;
} catch (Throwable t) {
throw new KafkaException("Failed to construct Kafka share consumer", t);
}
}
}
|
ShareConsumerDelegateCreator
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/AbstractTester.java
|
{
"start": 1402,
"end": 3414
}
|
class ____<G> extends TestCase {
private G subjectGenerator;
private String suiteName;
private @Nullable Runnable setUp;
private @Nullable Runnable tearDown;
// public so that it can be referenced in generated GWT tests.
@Override
public void setUp() throws Exception {
if (setUp != null) {
setUp.run();
}
}
// public so that it can be referenced in generated GWT tests.
@Override
public void tearDown() throws Exception {
if (tearDown != null) {
tearDown.run();
}
}
// public so that it can be referenced in generated GWT tests.
public final void init(
G subjectGenerator, String suiteName, @Nullable Runnable setUp, @Nullable Runnable tearDown) {
this.subjectGenerator = subjectGenerator;
this.suiteName = suiteName;
this.setUp = setUp;
this.tearDown = tearDown;
}
// public so that it can be referenced in generated GWT tests.
public final void init(G subjectGenerator, String suiteName) {
init(subjectGenerator, suiteName, null, null);
}
public G getSubjectGenerator() {
return subjectGenerator;
}
/** Returns the name of the test method invoked by this test instance. */
@J2ktIncompatible
@GwtIncompatible // not used under GWT, and super.getName() is not available under J2CL
public final String getTestMethodName() {
return super.getName();
}
@J2ktIncompatible
@GwtIncompatible // not used under GWT, and super.getName() is not available under J2CL
@Override
public String getName() {
return super.getName() + '[' + suiteName + ']';
}
/**
* Asserts that the given object is non-null, with a better failure message than {@link
* TestCase#assertNull(String, Object)}.
*
* <p>The {@link TestCase} version (which is from JUnit 3) produces a failure message that does
* not include the value of the object.
*
* @since 33.4.0
*/
public static void assertNull(String message, Object object) {
assertEquals(message, null, object);
}
}
|
AbstractTester
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/DebeziumSqlserverComponentBuilderFactory.java
|
{
"start": 1958,
"end": 5300
}
|
interface ____ extends ComponentBuilder<DebeziumSqlserverComponent> {
/**
* Additional properties for debezium components in case they can't be
* set directly on the camel configurations (e.g: setting Kafka Connect
* properties needed by Debezium engine, for example setting
* KafkaOffsetBackingStore), the properties have to be prefixed with
* additionalProperties.. E.g:
* additionalProperties.transactional.id=12345&additionalProperties.schema.registry.url=http://localhost:8811/avro. This is a multi-value option with prefix: additionalProperties.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
*
* Group: common
*
* @param additionalProperties the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder additionalProperties(java.util.Map<java.lang.String, java.lang.Object> additionalProperties) {
doSetProperty("additionalProperties", additionalProperties);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allow pre-configured Configurations to be set.
*
* The option is a:
* <code>org.apache.camel.component.debezium.sqlserver.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration</code> type.
*
* Group: consumer
*
* @param configuration the value to set
* @return the dsl builder
*/
default DebeziumSqlserverComponentBuilder configuration(org.apache.camel.component.debezium.sqlserver.configuration.SqlServerConnectorEmbeddedDebeziumConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* The Converter
|
DebeziumSqlserverComponentBuilder
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/web/servlet/setup/ConfigurableMockMvcBuilder.java
|
{
"start": 1474,
"end": 6945
}
|
interface ____<B extends ConfigurableMockMvcBuilder<B>> extends MockMvcBuilder {
/**
* Add filters mapped to all requests. Filters are invoked in the same order.
* <p>Note: if you need the filter to be initialized with {@link Filter#init(FilterConfig)},
* please use {@link #addFilter(Filter, String, Map, EnumSet, String...)} instead.
* @param filters the filters to add
*/
<T extends B> T addFilters(Filter... filters);
/**
* Add a filter mapped to specific patterns.
* <p>Note: if you need the filter to be initialized with {@link Filter#init(FilterConfig)},
* please use {@link #addFilter(Filter, String, Map, EnumSet, String...)} instead.
* @param filter the filter to add
* @param urlPatterns the URL patterns to map to; if empty, matches all requests
*/
<T extends B> T addFilter(Filter filter, String... urlPatterns);
/**
* Add a filter that will be initialized via {@link Filter#init(FilterConfig)}
* with the given init parameters, and will also apply only to requests that
* match the given dispatcher types and URL patterns.
* @param filter the filter to add
* @param filterName the name to use for the filter; if {@code null}, then
* {@link org.springframework.mock.web.MockFilterConfig} is created without
* a name, which defaults to an empty String for the name
* @param initParams the init parameters to initialize the filter with
* @param dispatcherTypes dispatcher types the filter applies to
* @param urlPatterns the URL patterns to map to; if empty, matches all requests
* @since 6.1
* @see org.springframework.mock.web.MockFilterConfig
*/
<T extends B> T addFilter(
Filter filter, @Nullable String filterName, Map<String, String> initParams,
EnumSet<DispatcherType> dispatcherTypes, String... urlPatterns);
/**
* Set the {@link ApiVersionInserter} to use to apply to versions specified via
* {@link org.springframework.test.web.servlet.request.AbstractMockHttpServletRequestBuilder#apiVersion(Object)}.
* <p>{@code ApiVersionInserter} exposes shortcut methods for several
* built-in inserter implementation types. See the class-level Javadoc
* of {@link ApiVersionInserter} for a list of choices.
* @param versionInserter the inserter to use
* @since 7.0
*/
<T extends B> T apiVersionInserter(@Nullable ApiVersionInserter versionInserter);
/**
* Define default request properties that should be merged into all
* performed requests. In effect this provides a mechanism for defining
* common initialization for all requests such as the content type, request
* parameters, session attributes, and any other request property.
*
* <p>Properties specified at the time of performing a request override the
* default properties defined here.
* @param requestBuilder a RequestBuilder; see static factory methods in
* {@link org.springframework.test.web.servlet.request.MockMvcRequestBuilders}
*/
<T extends B> T defaultRequest(RequestBuilder requestBuilder);
/**
* Define the default character encoding to be applied to every response.
* <p>The default implementation of this method throws an
* {@link UnsupportedOperationException}. Concrete implementations are therefore
* encouraged to override this method.
* @param defaultResponseCharacterEncoding the default response character encoding
* @since 5.3.10
*/
default <T extends B> T defaultResponseCharacterEncoding(Charset defaultResponseCharacterEncoding) {
throw new UnsupportedOperationException("defaultResponseCharacterEncoding is not supported by this MockMvcBuilder");
}
/**
* Define a global expectation that should <em>always</em> be applied to
* every response. For example, status code 200 (OK), content type
* {@code "application/json"}, etc.
* @param resultMatcher a ResultMatcher; see static factory methods in
* {@link org.springframework.test.web.servlet.result.MockMvcResultMatchers}
*/
<T extends B> T alwaysExpect(ResultMatcher resultMatcher);
/**
* Define a global action that should <em>always</em> be applied to every
* response. For example, writing detailed information about the performed
* request and resulting response to {@code System.out}.
* @param resultHandler a ResultHandler; see static factory methods in
* {@link org.springframework.test.web.servlet.result.MockMvcResultHandlers}
*/
<T extends B> T alwaysDo(ResultHandler resultHandler);
/**
* Whether to enable the DispatcherServlet property
* {@link org.springframework.web.servlet.DispatcherServlet#setDispatchOptionsRequest
* dispatchOptionsRequest} which allows processing of HTTP OPTIONS requests.
*/
<T extends B> T dispatchOptions(boolean dispatchOptions);
/**
* A more advanced variant of {@link #dispatchOptions(boolean)} that allows
* customizing any {@link org.springframework.web.servlet.DispatcherServlet}
* property.
* @since 5.3
*/
<T extends B> T addDispatcherServletCustomizer(DispatcherServletCustomizer customizer);
/**
* Add a {@code MockMvcConfigurer} that automates MockMvc setup and
* configures it for some specific purpose (for example, security).
* <p>There is a built-in {@link SharedHttpSessionConfigurer} that can be
* used to re-use the HTTP session across requests. 3rd party frameworks
* like Spring Security also use this mechanism to provide configuration
* shortcuts.
* @see SharedHttpSessionConfigurer
*/
<T extends B> T apply(MockMvcConfigurer configurer);
}
|
ConfigurableMockMvcBuilder
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/support/ReflectionHelperTests.java
|
{
"start": 2370,
"end": 20083
}
|
class ____ extends AbstractExpressionTests {
private final StandardTypeConverter tc = new StandardTypeConverter();
@Test
void utilities() throws ParseException {
SpelExpression expr = (SpelExpression)parser.parseExpression("3+4+5+6+7-2");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
SpelUtilities.printAbstractSyntaxTree(ps, expr);
ps.flush();
String s = baos.toString();
// ===> Expression '3+4+5+6+7-2' - AST start
// OperatorMinus value:(((((3 + 4) + 5) + 6) + 7) - 2) #children:2
// OperatorPlus value:((((3 + 4) + 5) + 6) + 7) #children:2
// OperatorPlus value:(((3 + 4) + 5) + 6) #children:2
// OperatorPlus value:((3 + 4) + 5) #children:2
// OperatorPlus value:(3 + 4) #children:2
// CompoundExpression value:3
// IntLiteral value:3
// CompoundExpression value:4
// IntLiteral value:4
// CompoundExpression value:5
// IntLiteral value:5
// CompoundExpression value:6
// IntLiteral value:6
// CompoundExpression value:7
// IntLiteral value:7
// CompoundExpression value:2
// IntLiteral value:2
// ===> Expression '3+4+5+6+7-2' - AST end
assertThat(s).contains("===> Expression '3+4+5+6+7-2' - AST start");
assertThat(s).contains(" OpPlus value:((((3 + 4) + 5) + 6) + 7) #children:2");
}
@Test
void typedValue() {
TypedValue tv1 = new TypedValue("hello");
TypedValue tv2 = new TypedValue("hello");
TypedValue tv3 = new TypedValue("bye");
assertThat(tv1.getTypeDescriptor().getType()).isEqualTo(String.class);
assertThat(tv1.toString()).isEqualTo("TypedValue: 'hello' of [java.lang.String]");
assertThat(tv2).isEqualTo(tv1);
assertThat(tv1).isEqualTo(tv2);
assertThat(tv3).isNotEqualTo(tv1);
assertThat(tv3).isNotEqualTo(tv2);
assertThat(tv1).isNotEqualTo(tv3);
assertThat(tv2).isNotEqualTo(tv3);
assertThat(tv2).hasSameHashCodeAs(tv1);
assertThat(tv3).doesNotHaveSameHashCodeAs(tv1);
assertThat(tv3).doesNotHaveSameHashCodeAs(tv2);
}
@Test
void reflectionHelperCompareArguments_ExactMatching() {
// Calling foo(String) with (String) is exact match
checkMatch(new Class<?>[] {String.class}, new Class<?>[] {String.class}, tc, EXACT);
// Calling foo(String,Integer) with (String,Integer) is exact match
checkMatch(new Class<?>[] {String.class, Integer.class}, new Class<?>[] {String.class, Integer.class}, tc, EXACT);
}
@Test
void reflectionHelperCompareArguments_CloseMatching() {
// Calling foo(List) with (ArrayList) is close match (no conversion required)
checkMatch(new Class<?>[] {ArrayList.class}, new Class<?>[] {List.class}, tc, CLOSE);
// Passing (Sub,String) on call to foo(Super,String) is close match
checkMatch(new Class<?>[] {Sub.class, String.class}, new Class<?>[] {Super.class, String.class}, tc, CLOSE);
// Passing (String,Sub) on call to foo(String,Super) is close match
checkMatch(new Class<?>[] {String.class, Sub.class}, new Class<?>[] {String.class, Super.class}, tc, CLOSE);
}
@Test
void reflectionHelperCompareArguments_CloseMatching_WithAutoBoxing() {
// Calling foo(String,int) with (String,Integer) requires boxing conversion of argument one
checkMatch(new Class<?>[] {String.class, int.class}, new Class<?>[] {String.class, Integer.class},tc, CLOSE);
// Passing (int,String) on call to foo(Integer,String) requires boxing conversion of argument zero
checkMatch(new Class<?>[] {int.class, String.class}, new Class<?>[] {Integer.class, String.class},tc, CLOSE);
// Passing (int,Sub) on call to foo(Integer,Super) requires boxing conversion of argument zero
checkMatch(new Class<?>[] {int.class, Sub.class}, new Class<?>[] {Integer.class, Super.class}, tc, CLOSE);
// Passing (int,Sub,boolean) on call to foo(Integer,Super,Boolean) requires boxing conversion of arguments zero and two
checkMatch(new Class<?>[] {int.class, Sub.class, boolean.class}, new Class<?>[] {Integer.class, Super.class, Boolean.class}, tc, CLOSE);
}
@Test
void reflectionHelperCompareArguments_NotAMatch() {
StandardTypeConverter typeConverter = new StandardTypeConverter();
// Passing (Super,String) on call to foo(Sub,String) is not a match
checkMatch(new Class<?>[] {Super.class,String.class}, new Class<?>[] {Sub.class,String.class}, typeConverter, null);
}
@Test
void reflectionHelperCompareArguments_Varargs() {
// Passing (String[]) on call to (String[]) is exact match
checkMatchVarargs(new Class<?>[] {String[].class}, new Class<?>[] {String[].class}, tc, EXACT);
// Passing (Integer, String[]) on call to (Integer, String[]) is exact match
checkMatchVarargs(new Class<?>[] {Integer.class, String[].class}, new Class<?>[] {Integer.class, String[].class}, tc, EXACT);
// Passing (String, Integer, String[]) on call to (String, String, String[]) is exact match
checkMatchVarargs(new Class<?>[] {String.class, Integer.class, String[].class}, new Class<?>[] {String.class,Integer.class, String[].class}, tc, EXACT);
// Passing (Sub, String[]) on call to (Super, String[]) is exact match
checkMatchVarargs(new Class<?>[] {Sub.class, String[].class}, new Class<?>[] {Super.class,String[].class}, tc, CLOSE);
// Passing (Integer, String[]) on call to (String, String[]) is exact match
checkMatchVarargs(new Class<?>[] {Integer.class, String[].class}, new Class<?>[] {String.class, String[].class}, tc, REQUIRES_CONVERSION);
// Passing (Integer, Sub, String[]) on call to (String, Super, String[]) is exact match
checkMatchVarargs(new Class<?>[] {Integer.class, Sub.class, String[].class}, new Class<?>[] {String.class, Super.class, String[].class}, tc, REQUIRES_CONVERSION);
// Passing (String) on call to (String[]) is exact match
checkMatchVarargs(new Class<?>[] {String.class}, new Class<?>[] {String[].class}, tc, EXACT);
// Passing (Integer,String) on call to (Integer,String[]) is exact match
checkMatchVarargs(new Class<?>[] {Integer.class, String.class}, new Class<?>[] {Integer.class, String[].class}, tc, EXACT);
// Passing (String) on call to (Integer[]) is conversion match (String to Integer)
checkMatchVarargs(new Class<?>[] {String.class}, new Class<?>[] {Integer[].class}, tc, REQUIRES_CONVERSION);
// Passing (Sub) on call to (Super[]) is close match
checkMatchVarargs(new Class<?>[] {Sub.class}, new Class<?>[] {Super[].class}, tc, CLOSE);
// Passing (Super) on call to (Sub[]) is not a match
checkMatchVarargs(new Class<?>[] {Super.class}, new Class<?>[] {Sub[].class}, tc, null);
checkMatchVarargs(new Class<?>[] {Unconvertable.class, String.class}, new Class<?>[] {Sub.class, Super[].class}, tc, null);
checkMatchVarargs(new Class<?>[] {Integer.class, Integer.class, String.class}, new Class<?>[] {String.class, String.class, Super[].class}, tc, null);
checkMatchVarargs(new Class<?>[] {Unconvertable.class, String.class}, new Class<?>[] {Sub.class, Super[].class}, tc, null);
checkMatchVarargs(new Class<?>[] {Integer.class, Integer.class, String.class}, new Class<?>[] {String.class, String.class, Super[].class}, tc, null);
checkMatchVarargs(new Class<?>[] {Integer.class, Integer.class, Sub.class}, new Class<?>[] {String.class, String.class, Super[].class}, tc, REQUIRES_CONVERSION);
checkMatchVarargs(new Class<?>[] {Integer.class, Integer.class, Integer.class}, new Class<?>[] {Integer.class, String[].class}, tc, REQUIRES_CONVERSION);
// what happens on (Integer,String) passed to (Integer[]) ?
}
@Test
void convertArguments() throws Exception {
Method oneArg = TestInterface.class.getMethod("oneArg", String.class);
Method twoArg = TestInterface.class.getMethod("twoArg", String.class, String[].class);
// basic conversion int>String
Object[] args = new Object[] {3};
ReflectionHelper.convertArguments(tc, args, oneArg, null);
checkArguments(args, "3");
// varargs but nothing to convert
args = new Object[] {3};
ReflectionHelper.convertArguments(tc, args, twoArg, 1);
checkArguments(args, "3");
// varargs with nothing needing conversion
args = new Object[] {3, "abc", "abc"};
ReflectionHelper.convertArguments(tc, args, twoArg, 1);
checkArguments(args, "3", "abc", "abc");
// varargs with conversion required
args = new Object[] {3, false ,3.0d};
ReflectionHelper.convertArguments(tc, args, twoArg, 1);
checkArguments(args, "3", "false", "3.0");
}
@Test
void convertAllArguments() throws Exception {
Method oneArg = TestInterface.class.getMethod("oneArg", String.class);
Method twoArg = TestInterface.class.getMethod("twoArg", String.class, String[].class);
// Simple conversion: int to string
Object[] args = new Object[] {3};
ReflectionHelper.convertAllArguments(tc, args, oneArg);
checkArguments(args, "3");
// varargs conversion
args = new Object[] {3, false, 3.0f};
ReflectionHelper.convertAllArguments(tc, args, twoArg);
checkArguments(args, "3", "false", "3.0");
// varargs conversion but no varargs
args = new Object[] {3};
ReflectionHelper.convertAllArguments(tc, args, twoArg);
checkArguments(args, "3");
// null value
args = new Object[] {3, null, 3.0f};
ReflectionHelper.convertAllArguments(tc, args, twoArg);
checkArguments(args, "3", null, "3.0");
}
@Test
void setupArgumentsForVarargsInvocationPreconditions() {
assertThatIllegalArgumentException()
.isThrownBy(() -> ReflectionHelper.setupArgumentsForVarargsInvocation(new Class[] {}, "a"))
.withMessage("Required parameter types array must not be empty");
assertThatIllegalArgumentException()
.isThrownBy(() -> ReflectionHelper.setupArgumentsForVarargsInvocation(
new Class<?>[] { Integer.class, Integer.class }, 123))
.withMessage("The last required parameter type must be an array to support varargs invocation");
}
@Test
void setupArgumentsForVarargsInvocation() {
Object[] newArray;
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(new Class<?>[] { String[].class }, "a", "b", "c");
assertThat(newArray)
.singleElement()
.asInstanceOf(array(String[].class))
.containsExactly("a", "b", "c");
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(new Class<?>[] { Object[].class }, "a", "b", "c");
assertThat(newArray)
.singleElement()
.asInstanceOf(array(Object[].class))
.containsExactly("a", "b", "c");
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(
new Class<?>[] { Integer.class, Integer.class, String[].class }, 123, 456, "a", "b", "c");
assertThat(newArray).satisfiesExactly(
one -> assertThat(one).isEqualTo(123),
two -> assertThat(two).isEqualTo(456),
three -> assertThat(three).asInstanceOf(array(String[].class)).containsExactly("a", "b", "c"));
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(new Class<?>[] { String[].class });
assertThat(newArray)
.singleElement()
.asInstanceOf(array(String[].class))
.isEmpty();
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(
new Class<?>[] { String[].class }, new Object[] { new String[] { "a", "b", "c" } });
assertThat(newArray)
.singleElement()
.asInstanceOf(array(String[].class))
.containsExactly("a", "b", "c");
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(
new Class<?>[] { Object[].class }, new Object[] { new String[] { "a", "b", "c" } });
assertThat(newArray)
.singleElement()
.asInstanceOf(array(Object[].class))
.containsExactly("a", "b", "c");
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(new Class<?>[] { String[].class }, "a");
assertThat(newArray)
.singleElement()
.asInstanceOf(array(String[].class))
.containsExactly("a");
newArray = ReflectionHelper.setupArgumentsForVarargsInvocation(new Class<?>[] { String[].class }, new Object[] { null });
assertThat(newArray)
.singleElement()
.asInstanceOf(array(String[].class))
.singleElement()
.isNull();
}
@Test
void reflectivePropertyAccessor() throws Exception {
ReflectivePropertyAccessor rpa = new ReflectivePropertyAccessor();
Tester t = new Tester();
t.setProperty("hello");
EvaluationContext ctx = new StandardEvaluationContext(t);
assertThat(rpa.canRead(ctx, t, "property")).isTrue();
assertThat(rpa.read(ctx, t, "property").getValue()).isEqualTo("hello");
// cached accessor used
assertThat(rpa.read(ctx, t, "property").getValue()).isEqualTo("hello");
assertThat(rpa.canRead(ctx, t, "field")).isTrue();
assertThat(rpa.read(ctx, t, "field").getValue()).isEqualTo(3);
// cached accessor used
assertThat(rpa.read(ctx, t, "field").getValue()).isEqualTo(3);
assertThat(rpa.canWrite(ctx, t, "property")).isTrue();
rpa.write(ctx, t, "property", "goodbye");
rpa.write(ctx, t, "property", "goodbye"); // cached accessor used
assertThat(rpa.canWrite(ctx, t, "field")).isTrue();
rpa.write(ctx, t, "field", 12);
rpa.write(ctx, t, "field", 12);
// Attempted write as first activity on this field and property to drive testing
// of populating type descriptor cache
rpa.write(ctx, t, "field2", 3);
rpa.write(ctx, t, "property2", "doodoo");
assertThat(rpa.read(ctx, t, "field2").getValue()).isEqualTo(3);
// Attempted read as first activity on this field and property (no canRead before them)
assertThat(rpa.read(ctx, t, "field3").getValue()).isEqualTo(0);
assertThat(rpa.read(ctx, t, "property3").getValue()).isEqualTo("doodoo");
// Access through is method
assertThat(rpa.read(ctx, t, "field3").getValue()).isEqualTo(0);
assertThat(rpa.read(ctx, t, "property4").getValue()).isEqualTo(false);
assertThat(rpa.canRead(ctx, t, "property4")).isTrue();
// repro SPR-9123, ReflectivePropertyAccessor JavaBean property names compliance tests
assertThat(rpa.read(ctx, t, "iD").getValue()).isEqualTo("iD");
assertThat(rpa.canRead(ctx, t, "iD")).isTrue();
assertThat(rpa.read(ctx, t, "id").getValue()).isEqualTo("id");
assertThat(rpa.canRead(ctx, t, "id")).isTrue();
assertThat(rpa.read(ctx, t, "ID").getValue()).isEqualTo("ID");
assertThat(rpa.canRead(ctx, t, "ID")).isTrue();
// note: "Id" is not a valid JavaBean name, nevertheless it is treated as "id"
assertThat(rpa.read(ctx, t, "Id").getValue()).isEqualTo("id");
assertThat(rpa.canRead(ctx, t, "Id")).isTrue();
// repro SPR-10994
assertThat(rpa.read(ctx, t, "xyZ").getValue()).isEqualTo("xyZ");
assertThat(rpa.canRead(ctx, t, "xyZ")).isTrue();
assertThat(rpa.read(ctx, t, "xY").getValue()).isEqualTo("xY");
assertThat(rpa.canRead(ctx, t, "xY")).isTrue();
// SPR-10122, ReflectivePropertyAccessor JavaBean property names compliance tests - setters
rpa.write(ctx, t, "pEBS", "Test String");
assertThat(rpa.read(ctx, t, "pEBS").getValue()).isEqualTo("Test String");
}
@Test
void optimalReflectivePropertyAccessor() throws Exception {
ReflectivePropertyAccessor reflective = new ReflectivePropertyAccessor();
Tester tester = new Tester();
tester.setProperty("hello");
EvaluationContext ctx = new StandardEvaluationContext(tester);
assertThat(reflective.canRead(ctx, tester, "property")).isTrue();
assertThat(reflective.read(ctx, tester, "property").getValue()).isEqualTo("hello");
// cached accessor used
assertThat(reflective.read(ctx, tester, "property").getValue()).isEqualTo("hello");
PropertyAccessor property = reflective.createOptimalAccessor(ctx, tester, "property");
assertThat(property.canRead(ctx, tester, "property")).isTrue();
assertThat(property.canRead(ctx, tester, "property2")).isFalse();
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
property.canWrite(ctx, tester, "property"));
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
property.canWrite(ctx, tester, "property2"));
assertThat(property.read(ctx, tester, "property").getValue()).isEqualTo("hello");
// cached accessor used
assertThat(property.read(ctx, tester, "property").getValue()).isEqualTo("hello");
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(property::getSpecificTargetClasses);
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
property.write(ctx, tester, "property", null));
PropertyAccessor field = reflective.createOptimalAccessor(ctx, tester, "field");
assertThat(field.canRead(ctx, tester, "field")).isTrue();
assertThat(field.canRead(ctx, tester, "field2")).isFalse();
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
field.canWrite(ctx, tester, "field"));
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
field.canWrite(ctx, tester, "field2"));
assertThat(field.read(ctx, tester, "field").getValue()).isEqualTo(3);
// cached accessor used
assertThat(field.read(ctx, tester, "field").getValue()).isEqualTo(3);
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(field::getSpecificTargetClasses);
assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() ->
field.write(ctx, tester, "field", null));
}
@Test
void reflectiveMethodResolverForJdkProxies() throws Exception {
Object proxy = Proxy.newProxyInstance(getClass().getClassLoader(), new Class<?>[] { Runnable.class }, (p, m, args) -> null);
MethodResolver resolver = new ReflectiveMethodResolver();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext();
// Nonexistent method
MethodExecutor bogus = resolver.resolve(evaluationContext, proxy, "bogus", List.of());
assertThat(bogus).as("MethodExecutor for bogus()").isNull();
// Method in
|
ReflectionHelperTests
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAlterTableAddColumn.java
|
{
"start": 864,
"end": 2813
}
|
class ____ extends SQLObjectImpl implements SQLAlterTableItem {
private final List<SQLColumnDefinition> columns = new ArrayList<SQLColumnDefinition>();
// for mysql
private SQLName firstColumn;
private SQLName afterColumn;
private boolean first;
private Boolean restrict;
private boolean cascade;
private boolean ifNotExists;
public SQLAlterTableAddColumn() {
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, columns);
}
visitor.endVisit(this);
}
public boolean isIfNotExists() {
return ifNotExists;
}
public void setIfNotExists(boolean ifNotExists) {
this.ifNotExists = ifNotExists;
}
public List<SQLColumnDefinition> getColumns() {
return columns;
}
public void addColumn(SQLColumnDefinition column) {
if (column != null) {
column.setParent(this);
}
this.columns.add(column);
}
public SQLName getFirstColumn() {
return firstColumn;
}
public void setFirstColumn(SQLName first) {
this.firstColumn = first;
}
public boolean isFirst() {
return first;
}
public void setFirst(boolean first) {
this.first = first;
}
public SQLName getAfterColumn() {
return afterColumn;
}
public void setAfterColumn(SQLName after) {
this.afterColumn = after;
}
public Boolean getRestrict() {
return restrict;
}
public boolean isRestrict() {
if (restrict == null) {
return !cascade;
}
return restrict;
}
public void setRestrict(boolean restrict) {
this.restrict = restrict;
}
public boolean isCascade() {
return cascade;
}
public void setCascade(boolean cascade) {
this.cascade = cascade;
}
}
|
SQLAlterTableAddColumn
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/JoinReuseInCorrelatedSubqueryTest.java
|
{
"start": 4209,
"end": 4455
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private Integer foo;
public ReferencedEntity() {
}
public ReferencedEntity(Integer foo) {
this.foo = foo;
}
public Integer getId() {
return id;
}
}
}
|
ReferencedEntity
|
java
|
apache__logging-log4j2
|
log4j-to-slf4j/src/test/java/org/apache/logging/slf4j/TestUtil.java
|
{
"start": 1118,
"end": 1953
}
|
class ____ {
public static StringListAppender<ILoggingEvent> getListAppender(final SLF4JLogger slf4jLogger, final String name) {
final Logger logger = slf4jLogger.getLogger();
if (!(logger instanceof AppenderAttachable)) {
throw new AssertionError("SLF4JLogger.getLogger() did not return an instance of AppenderAttachable");
}
@SuppressWarnings("unchecked")
final AppenderAttachable<ILoggingEvent> attachable = (AppenderAttachable<ILoggingEvent>) logger;
return getListAppender(attachable, name);
}
public static StringListAppender<ILoggingEvent> getListAppender(
final AppenderAttachable<ILoggingEvent> logger, final String name) {
return (StringListAppender<ILoggingEvent>) logger.getAppender(name);
}
private TestUtil() {}
}
|
TestUtil
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ProducerModuleFactoryGeneratorTest.java
|
{
"start": 12193,
"end": 12674
}
|
class ____ {",
" @Produces",
" int produceInt() {",
" return 42;",
" }",
"}");
Source okNonPublicModuleFile =
CompilerTests.javaSource(
"test.OkNonPublicModule",
"package test;",
"",
"import dagger.producers.ProducerModule;",
"import dagger.producers.Produces;",
"",
"@ProducerModule",
"final
|
BadNonPublicModule
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/ttl/TtlTimeProvider.java
|
{
"start": 926,
"end": 1043
}
|
interface ____ {
TtlTimeProvider DEFAULT = System::currentTimeMillis;
long currentTimestamp();
}
|
TtlTimeProvider
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/type/SqlTypeFactoryImpl.java
|
{
"start": 1468,
"end": 1794
}
|
class ____ copied over because of
* FLINK-31350.
*
* <p>FLINK modifications are at lines
*
* <ol>
* <li>Should be removed after fixing CALCITE-6342: Lines 100-102
* <li>Should be removed after fixing CALCITE-6342: Lines 484-496
* <li>Should be removed after fix of FLINK-31350: Lines 563-575.
* </ol>
*/
public
|
was
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1200/Issue1299.java
|
{
"start": 1127,
"end": 1499
}
|
class ____<T> extends BaseResultDo implements Serializable {
@JSONField(
name = "data"
)
private T data;
public ServiceResult() {
}
public T getData() {
return this.data;
}
public void setData(T data) {
this.data = data;
}
}
public static
|
ServiceResult
|
java
|
resilience4j__resilience4j
|
resilience4j-spring6/src/main/java/io/github/resilience4j/spring6/timelimiter/configure/TimeLimiterAspect.java
|
{
"start": 1694,
"end": 6700
}
|
class ____ implements Ordered, AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(TimeLimiterAspect.class);
private final TimeLimiterRegistry timeLimiterRegistry;
private final TimeLimiterConfigurationProperties properties;
private final ScheduledExecutorService timeLimiterExecutorService;
@Nullable
private final List<TimeLimiterAspectExt> timeLimiterAspectExtList;
private final FallbackExecutor fallbackExecutor;
private final SpelResolver spelResolver;
public TimeLimiterAspect(TimeLimiterRegistry timeLimiterRegistry,
TimeLimiterConfigurationProperties properties,
@Nullable List<TimeLimiterAspectExt> timeLimiterAspectExtList,
FallbackExecutor fallbackExecutor,
SpelResolver spelResolver,
@Nullable ContextAwareScheduledThreadPoolExecutor contextAwareScheduledThreadPoolExecutor) {
this.timeLimiterRegistry = timeLimiterRegistry;
this.properties = properties;
this.timeLimiterAspectExtList = timeLimiterAspectExtList;
this.fallbackExecutor = fallbackExecutor;
this.spelResolver = spelResolver;
this.timeLimiterExecutorService = contextAwareScheduledThreadPoolExecutor != null ?
contextAwareScheduledThreadPoolExecutor :
Executors.newScheduledThreadPool(Runtime.getRuntime().availableProcessors());
}
@Pointcut(value = "@within(timeLimiter) || @annotation(timeLimiter)", argNames = "timeLimiter")
public void matchAnnotatedClassOrMethod(TimeLimiter timeLimiter) {
// a marker method
}
@Around(value = "matchAnnotatedClassOrMethod(timeLimiterAnnotation)", argNames = "proceedingJoinPoint, timeLimiterAnnotation")
public Object timeLimiterAroundAdvice(ProceedingJoinPoint proceedingJoinPoint,
@Nullable TimeLimiter timeLimiterAnnotation) throws Throwable {
Method method = ((MethodSignature) proceedingJoinPoint.getSignature()).getMethod();
String methodName = method.getDeclaringClass().getName() + "#" + method.getName();
if (timeLimiterAnnotation == null) {
timeLimiterAnnotation = getTimeLimiterAnnotation(proceedingJoinPoint);
}
if(timeLimiterAnnotation == null) {
return proceedingJoinPoint.proceed();
}
String name = spelResolver.resolve(method, proceedingJoinPoint.getArgs(), timeLimiterAnnotation.name());
String configKey = timeLimiterAnnotation.configuration().isEmpty() ? name : timeLimiterAnnotation.configuration();
var timeLimiter = getOrCreateTimeLimiter(methodName, name, configKey);
Class<?> returnType = method.getReturnType();
final CheckedSupplier<Object> timeLimiterExecution = () -> proceed(proceedingJoinPoint, methodName, timeLimiter, returnType);
return fallbackExecutor.execute(proceedingJoinPoint, method, timeLimiterAnnotation.fallbackMethod(), timeLimiterExecution);
}
private Object proceed(ProceedingJoinPoint proceedingJoinPoint, String methodName,
io.github.resilience4j.timelimiter.TimeLimiter timeLimiter, Class<?> returnType)
throws Throwable {
if (timeLimiterAspectExtList != null && !timeLimiterAspectExtList.isEmpty()) {
for (TimeLimiterAspectExt timeLimiterAspectExt : timeLimiterAspectExtList) {
if (timeLimiterAspectExt.canHandleReturnType(returnType)) {
return timeLimiterAspectExt.handle(proceedingJoinPoint, timeLimiter, methodName);
}
}
}
if (!CompletionStage.class.isAssignableFrom(returnType)) {
throw new IllegalReturnTypeException(returnType, methodName,
"CompletionStage expected.");
}
return handleJoinPointCompletableFuture(proceedingJoinPoint, timeLimiter);
}
private io.github.resilience4j.timelimiter.TimeLimiter getOrCreateTimeLimiter(String methodName, String name, String configKey) {
TimeLimiterConfig config = timeLimiterRegistry.getConfiguration(configKey).orElseGet(timeLimiterRegistry::getDefaultConfig);
var timeLimiter = timeLimiterRegistry.timeLimiter(name, config);
if (logger.isDebugEnabled()) {
TimeLimiterConfig timeLimiterConfig = timeLimiter.getTimeLimiterConfig();
logger.debug(
"Created or retrieved time limiter '{}' with timeout duration '{}' and cancelRunningFuture '{}' for method: '{}'",
name, timeLimiterConfig.getTimeoutDuration(), timeLimiterConfig.shouldCancelRunningFuture(), methodName
);
}
return timeLimiter;
}
@Nullable
private static TimeLimiter getTimeLimiterAnnotation(ProceedingJoinPoint proceedingJoinPoint) {
if (proceedingJoinPoint.getTarget() instanceof Proxy) {
logger.debug("The TimeLimiter annotation is kept on a
|
TimeLimiterAspect
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/io/stream/CountingFilterInputStream.java
|
{
"start": 612,
"end": 1718
}
|
class ____ extends FilterInputStream {
private int bytesRead = 0;
public CountingFilterInputStream(InputStream in) {
super(in);
}
@Override
public int read() throws IOException {
assert assertInvariant();
final int result = super.read();
if (result != -1) {
bytesRead += 1;
}
return result;
}
// Not overriding read(byte[]) because FilterInputStream delegates to read(byte[], int, int)
@Override
public int read(byte[] b, int off, int len) throws IOException {
assert assertInvariant();
final int n = super.read(b, off, len);
if (n != -1) {
bytesRead += n;
}
return n;
}
@Override
public long skip(long n) throws IOException {
assert assertInvariant();
final long skipped = super.skip(n);
bytesRead += Math.toIntExact(skipped);
return skipped;
}
public int getBytesRead() {
return bytesRead;
}
protected boolean assertInvariant() {
return true;
}
}
|
CountingFilterInputStream
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/orphan/one2one/fk/reversed/bidirectional/DeleteOneToOneOrphansTest.java
|
{
"start": 878,
"end": 3074
}
|
class ____ {
@BeforeEach
public void createData(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Employee emp = new Employee();
emp.setInfo( new EmployeeInfo( emp ) );
session.persist( emp );
}
);
}
@AfterEach
public void cleanupData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testOrphanedWhileManaged(SessionFactoryScope scope) {
Employee e = scope.fromTransaction(
session -> {
List results = session.createQuery( "from EmployeeInfo" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Employee" ).list();
assertEquals( 1, results.size() );
Employee emp = (Employee) results.get( 0 );
assertNotNull( emp.getInfo() );
emp.setInfo( null );
return emp;
}
);
scope.inTransaction(
session -> {
Employee emp = session.get( Employee.class, e.getId() );
assertNull( emp.getInfo() );
List results = session.createQuery( "from EmployeeInfo" ).list();
assertEquals( 0, results.size() );
results = session.createQuery( "from Employee" ).list();
assertEquals( 1, results.size() );
}
);
}
@Test
@JiraKey(value = "HHH-6484")
public void testReplacedWhileManaged(SessionFactoryScope scope) {
Employee e = scope.fromTransaction(
session -> {
List results = session.createQuery( "from EmployeeInfo" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Employee" ).list();
assertEquals( 1, results.size() );
Employee emp = (Employee) results.get( 0 );
assertNotNull( emp.getInfo() );
// Replace with a new EmployeeInfo instance
emp.setInfo( new EmployeeInfo( emp ) );
return emp;
}
);
scope.inTransaction(
session -> {
Employee emp = session.get( Employee.class, e.getId() );
assertNotNull( emp.getInfo() );
List results = session.createQuery( "from EmployeeInfo" ).list();
assertEquals( 1, results.size() );
results = session.createQuery( "from Employee" ).list();
assertEquals( 1, results.size() );
}
);
}
}
|
DeleteOneToOneOrphansTest
|
java
|
qos-ch__slf4j
|
slf4j-ext/src/main/java/org/slf4j/instrumentation/LogTransformer.java
|
{
"start": 6790,
"end": 7475
}
|
class ____.");
}
return bytes;
}
} catch (ClassNotFoundException e) {
if (verbose) {
System.err.println("Skipping " + className + " as slf4j is not available to it");
}
return bytes;
}
if (verbose) {
System.err.println("Processing " + className);
}
return doClass(className, clazz, bytes);
} catch (Throwable e) {
System.out.println("e = " + e);
return bytes;
}
}
private String loggerName;
/**
* doClass() process a single
|
loader
|
java
|
google__guice
|
extensions/grapher/src/com/google/inject/grapher/graphviz/GraphvizEdge.java
|
{
"start": 914,
"end": 2829
}
|
class ____ {
private final NodeId headNodeId;
private String headPortId;
private CompassPoint headCompassPoint;
private List<ArrowType> arrowHead = ImmutableList.of(ArrowType.NORMAL);
private final NodeId tailNodeId;
private String tailPortId;
private CompassPoint tailCompassPoint;
private List<ArrowType> arrowTail = ImmutableList.of(ArrowType.NONE);
private EdgeStyle style = EdgeStyle.SOLID;
public GraphvizEdge(NodeId tailNodeId, NodeId headNodeId) {
this.tailNodeId = tailNodeId;
this.headNodeId = headNodeId;
}
/** @since 4.0 */
public NodeId getHeadNodeId() {
return headNodeId;
}
public String getHeadPortId() {
return headPortId;
}
public void setHeadPortId(String headPortId) {
this.headPortId = headPortId;
}
public CompassPoint getHeadCompassPoint() {
return headCompassPoint;
}
public void setHeadCompassPoint(CompassPoint headCompassPoint) {
this.headCompassPoint = headCompassPoint;
}
public List<ArrowType> getArrowHead() {
return arrowHead;
}
public void setArrowHead(List<ArrowType> arrowHead) {
this.arrowHead = ImmutableList.copyOf(arrowHead);
}
/** @since 4.0 */
public NodeId getTailNodeId() {
return tailNodeId;
}
public String getTailPortId() {
return tailPortId;
}
public void setTailPortId(String tailPortId) {
this.tailPortId = tailPortId;
}
public CompassPoint getTailCompassPoint() {
return tailCompassPoint;
}
public void setTailCompassPoint(CompassPoint tailCompassPoint) {
this.tailCompassPoint = tailCompassPoint;
}
public List<ArrowType> getArrowTail() {
return arrowTail;
}
public void setArrowTail(List<ArrowType> arrowTail) {
this.arrowTail = ImmutableList.copyOf(arrowTail);
}
public EdgeStyle getStyle() {
return style;
}
public void setStyle(EdgeStyle style) {
this.style = style;
}
}
|
GraphvizEdge
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AbstractOptionalAssert.java
|
{
"start": 12537,
"end": 22142
}
|
class ____ the value inside the {@link Optional}.
* @return this assertion object.
*/
public SELF containsInstanceOf(Class<?> clazz) {
assertValueIsPresent();
// noinspection OptionalGetWithoutIsPresent
if (!clazz.isInstance(actual.get())) throwAssertionError(shouldContainInstanceOf(actual, clazz));
return myself;
}
/**
* Use given custom comparator instead of relying on actual type A <code>equals</code> method to compare the
* {@link Optional} value's object for incoming assertion checks.
* <p>
* Custom comparator is bound to assertion instance, meaning that if a new assertion is created, it will use default
* comparison strategy.
* <p>
* Examples :
*
* <pre><code class='java'> TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
* TolkienCharacter frodoClone = new TolkienCharacter("Frodo", 33, HOBBIT);
*
* // Fail if equals has not been overridden in TolkienCharacter as equals default implementation only compares references
* assertThat(Optional.of(frodo)).contains(frodoClone);
*
* // frodo and frodoClone are equals when doing a field by field comparison.
* assertThat(Optional.of(frodo)).usingValueComparator(new FieldByFieldComparator()).contains(frodoClone);</code></pre>
*
* @param customComparator the comparator to use for incoming assertion checks.
* @throws NullPointerException if the given comparator is {@code null}.
* @return {@code this} assertion object.
*/
@CheckReturnValue
public SELF usingValueComparator(Comparator<? super VALUE> customComparator) {
optionalValueComparisonStrategy = new ComparatorBasedComparisonStrategy(customComparator);
return myself;
}
/**
* Revert to standard comparison for incoming assertion {@link Optional} value checks.
* <p>
* This method should be used to disable a custom comparison strategy set by calling
* {@link #usingValueComparator(Comparator)}.
*
* @return {@code this} assertion object.
*/
@CheckReturnValue
public SELF usingDefaultValueComparator() {
// fall back to default strategy to compare actual with other objects.
optionalValueComparisonStrategy = StandardComparisonStrategy.instance();
return myself;
}
/**
* Verifies that the actual {@link java.util.Optional} contains the instance given as an argument (i.e. it must be the
* same instance).
* <p>
* Assertion will pass :
*
* <pre><code class='java'> String someString = "something";
* assertThat(Optional.of(someString)).containsSame(someString);
*
* // Java will create the same 'Integer' instance when boxing small ints
* assertThat(Optional.of(10)).containsSame(10);</code></pre>
*
* Assertion will fail :
*
* <pre><code class='java'> // not even equal:
* assertThat(Optional.of("something")).containsSame("something else");
* assertThat(Optional.of(20)).containsSame(10);
*
* // equal but not the same:
* assertThat(Optional.of(new String("something"))).containsSame(new String("something"));
* assertThat(Optional.of(new Integer(10))).containsSame(new Integer(10));</code></pre>
*
* @param expectedValue the expected value inside the {@link java.util.Optional}.
* @return this assertion object.
*/
public SELF containsSame(VALUE expectedValue) {
isNotNull();
checkNotNull(expectedValue);
if (actual.isEmpty()) throwAssertionError(shouldContain(expectedValue));
// noinspection OptionalGetWithoutIsPresent
if (actual.get() != expectedValue) throwAssertionError(shouldContainSame(actual, expectedValue));
return myself;
}
/**
* Call {@link Optional#flatMap(Function) flatMap} on the {@code Optional} under test, assertions chained afterward are performed on the {@code Optional} resulting from the flatMap call.
* <p>
* Examples:
* <pre><code class='java'> Function<String, Optional<String>> UPPER_CASE_OPTIONAL_STRING =
* s -> s == null ? Optional.empty() : Optional.of(s.toUpperCase());
*
* // assertions succeed
* assertThat(Optional.of("something")).contains("something")
* .flatMap(UPPER_CASE_OPTIONAL_STRING)
* .contains("SOMETHING");
*
* assertThat(Optional.<String>empty()).flatMap(UPPER_CASE_OPTIONAL_STRING)
* .isEmpty();
*
* assertThat(Optional.<String>ofNullable(null)).flatMap(UPPER_CASE_OPTIONAL_STRING)
* .isEmpty();
*
* // assertion fails
* assertThat(Optional.of("something")).flatMap(UPPER_CASE_OPTIONAL_STRING)
* .contains("something");</code></pre>
*
* @param <U> the type wrapped in the {@link Optional} after the {@link Optional#flatMap(Function) flatMap} operation.
* @param mapper the {@link Function} to use in the {@link Optional#flatMap(Function) flatMap} operation.
* @return a new {@link AbstractOptionalAssert} for assertions chaining on the flatMap of the Optional.
* @throws AssertionError if the actual {@link Optional} is null.
* @since 3.6.0
*/
@CheckReturnValue
public <U> AbstractOptionalAssert<?, U> flatMap(Function<? super VALUE, Optional<U>> mapper) {
isNotNull();
return assertThat(actual.flatMap(mapper));
}
/**
* Call {@link Optional#map(Function) map} on the {@code Optional} under test, assertions chained afterward are performed on the {@code Optional} resulting from the map call.
* <p>
* Examples:
* <pre><code class='java'> // assertions succeed
* assertThat(Optional.<String>empty()).map(String::length)
* .isEmpty();
*
* assertThat(Optional.of("42")).contains("42")
* .map(String::length)
* .contains(2);
*
* // assertion fails
* assertThat(Optional.of("42")).map(String::length)
* .contains(3);</code></pre>
*
* @param <U> the type wrapped in the {@link Optional} after the {@link Optional#map(Function) map} operation.
* @param mapper the {@link Function} to use in the {@link Optional#map(Function) map} operation.
* @return a new {@link AbstractOptionalAssert} for assertions chaining on the map of the Optional.
* @throws AssertionError if the actual {@link Optional} is null.
* @since 3.6.0
*/
@CheckReturnValue
public <U> AbstractOptionalAssert<?, U> map(Function<? super VALUE, ? extends U> mapper) {
isNotNull();
return assertThat(actual.map(mapper));
}
/**
* Verifies that the actual {@link Optional} is not {@code null} and not empty and returns an Object assertion
* that allows chaining (object) assertions on the optional value.
* <p>
* Note that it is only possible to return Object assertions after calling this method due to java generics limitations.
* <p>
* Example:
* <pre><code class='java'> TolkienCharacter frodo = new TolkienCharacter("Frodo", 33, HOBBIT);
* TolkienCharacter sam = new TolkienCharacter("Sam", 38, null);
*
* // assertion succeeds since all frodo's fields are set
* assertThat(Optional.of(frodo)).get().hasNoNullFieldsOrProperties();
*
* // assertion does not succeed because sam does not have its race set
* assertThat(Optional.of(sam)).get().hasNoNullFieldsOrProperties();</code></pre>
*
* @return a new {@link AbstractObjectAssert} for assertions chaining on the value of the Optional.
* @throws AssertionError if the actual {@link Optional} is null.
* @throws AssertionError if the actual {@link Optional} is empty.
* @since 3.9.0
* @see #get(InstanceOfAssertFactory)
*/
@CheckReturnValue
public AbstractObjectAssert<?, VALUE> get() {
return internalGet();
}
/**
* Verifies that the actual {@link Optional} is not {@code null} and not empty and returns a new assertion instance
* to chain assertions on the optional value.
* <p>
* The {@code assertFactory} parameter allows to specify an {@link InstanceOfAssertFactory}, which is used to get the
* assertions narrowed to the factory type.
* <p>
* Wrapping the given {@link InstanceOfAssertFactory} with {@link Assertions#as(InstanceOfAssertFactory)} makes the
* assertion more readable.
* <p>
* Example:
* <pre><code class='java'> // assertion succeeds
* assertThat(Optional.of("frodo")).get(as(InstanceOfAssertFactories.STRING)).startsWith("fro");
*
* // assertion does not succeed because frodo is not an Integer
* assertThat(Optional.of("frodo")).get(as(InstanceOfAssertFactories.INTEGER)).isZero();</code></pre>
*
* @param <ASSERT> the type of the resulting {@code Assert}
* @param assertFactory the factory which verifies the type and creates the new {@code Assert}
* @return a new narrowed {@link Assert} instance for assertions chaining on the value of the Optional
* @throws NullPointerException if the given factory is {@code null}
* @throws AssertionError if the actual {@link Optional} is null
* @throws AssertionError if the actual {@link Optional} is empty
* @since 3.14.0
*/
@CheckReturnValue
public <ASSERT extends AbstractAssert<?, ?>> ASSERT get(InstanceOfAssertFactory<?, ASSERT> assertFactory) {
return internalGet().asInstanceOf(assertFactory);
}
/**
* Enable using a recursive field by field comparison strategy when calling the chained {@link RecursiveComparisonAssert},
* <p>
* Example:
* <pre><code class='java'> public
|
of
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/io/service/ServiceScanner.java
|
{
"start": 1492,
"end": 1610
}
|
class ____ the tasks required to find services of a particular type.
*
* @param <S> service type
*/
@Internal
final
|
for
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1124/Issue1124Mapper.java
|
{
"start": 362,
"end": 748
}
|
class ____ {
private Long id;
private Entity entity;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Entity getEntity() {
return entity;
}
public void setEntity(Entity entity) {
this.entity = entity;
}
}
|
Entity
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java
|
{
"start": 3473,
"end": 34668
}
|
class ____ extends ESTestCase {
private final EsqlParser parser = new EsqlParser();
public void testBooleanLiterals() {
assertEquals(Literal.TRUE, whereExpression("true"));
assertEquals(Literal.FALSE, whereExpression("false"));
assertEquals(Literal.NULL, whereExpression("null"));
}
public void testNumberLiterals() {
assertEquals(l(123, INTEGER), whereExpression("123"));
assertEquals(l(123, INTEGER), whereExpression("+123"));
assertEquals(l(-123, INTEGER), whereExpression("-123"));
assertEquals(l(123.123, DOUBLE), whereExpression("123.123"));
assertEquals(l(123.123, DOUBLE), whereExpression("+123.123"));
assertEquals(l(-123.123, DOUBLE), whereExpression("-123.123"));
assertEquals(l(0.123, DOUBLE), whereExpression(".123"));
assertEquals(l(0.123, DOUBLE), whereExpression("0.123"));
assertEquals(l(0.123, DOUBLE), whereExpression("+0.123"));
assertEquals(l(-0.123, DOUBLE), whereExpression("-0.123"));
assertEquals(l(12345678901L, LONG), whereExpression("12345678901"));
assertEquals(l(12345678901L, LONG), whereExpression("+12345678901"));
assertEquals(l(-12345678901L, LONG), whereExpression("-12345678901"));
assertEquals(l(123e12, DOUBLE), whereExpression("123e12"));
assertEquals(l(123e-12, DOUBLE), whereExpression("123e-12"));
assertEquals(l(123E12, DOUBLE), whereExpression("123E12"));
assertEquals(l(123E-12, DOUBLE), whereExpression("123E-12"));
}
public void testMinusSign() {
assertEquals(l(-123, INTEGER), whereExpression("+(-123)"));
assertEquals(l(-123, INTEGER), whereExpression("+(+(-123))"));
// we could do better here. ES SQL is smarter and accounts for the number of minuses
assertEquals(new Neg(EMPTY, l(-123, INTEGER)), whereExpression("-(-123)"));
}
public void testStringLiterals() {
assertEquals(l("abc", KEYWORD), whereExpression("\"abc\""));
assertEquals(l("123.123", KEYWORD), whereExpression("\"123.123\""));
assertEquals(l("hello\"world", KEYWORD), whereExpression("\"hello\\\"world\""));
assertEquals(l("hello'world", KEYWORD), whereExpression("\"hello'world\""));
assertEquals(l("\"hello\"world\"", KEYWORD), whereExpression("\"\\\"hello\\\"world\\\"\""));
assertEquals(l("\"hello\nworld\"", KEYWORD), whereExpression("\"\\\"hello\\nworld\\\"\""));
assertEquals(l("hello\nworld", KEYWORD), whereExpression("\"hello\\nworld\""));
assertEquals(l("hello\\world", KEYWORD), whereExpression("\"hello\\\\world\""));
assertEquals(l("hello\rworld", KEYWORD), whereExpression("\"hello\\rworld\""));
assertEquals(l("hello\tworld", KEYWORD), whereExpression("\"hello\\tworld\""));
assertEquals(l("C:\\Program Files\\Elastic", KEYWORD), whereExpression("\"C:\\\\Program Files\\\\Elastic\""));
assertEquals(l("C:\\Program Files\\Elastic", KEYWORD), whereExpression("\"\"\"C:\\Program Files\\Elastic\"\"\""));
assertEquals(l("\"\"hello world\"\"", KEYWORD), whereExpression("\"\"\"\"\"hello world\"\"\"\"\""));
assertEquals(l("hello \"\"\" world", KEYWORD), whereExpression("\"hello \\\"\\\"\\\" world\""));
assertEquals(l("hello\\nworld", KEYWORD), whereExpression("\"\"\"hello\\nworld\"\"\""));
assertEquals(l("hello\\tworld", KEYWORD), whereExpression("\"\"\"hello\\tworld\"\"\""));
assertEquals(l("hello world\\", KEYWORD), whereExpression("\"\"\"hello world\\\"\"\""));
assertEquals(l("hello world\\", KEYWORD), whereExpression("\"\"\"hello world\\\"\"\""));
assertEquals(l("\t \n \r \" \\ ", KEYWORD), whereExpression("\"\\t \\n \\r \\\" \\\\ \""));
}
public void testStringLiteralsExceptions() {
assertParsingException(() -> whereExpression("\"\"\"\"\"\"foo\"\""), "line 1:22: mismatched input 'foo' expecting {<EOF>,");
assertParsingException(
() -> whereExpression("\"foo\" == \"\"\"\"\"\"bar\"\"\""),
"line 1:31: mismatched input 'bar' expecting {<EOF>,"
);
assertParsingException(
() -> whereExpression("\"\"\"\"\"\\\"foo\"\"\"\"\"\" != \"\"\"bar\"\"\""),
"line 1:31: mismatched input '\" != \"' expecting {<EOF>,"
);
assertParsingException(
() -> whereExpression("\"\"\"\"\"\\\"foo\"\"\\\"\"\"\" == \"\"\"\"\"\\\"bar\\\"\\\"\"\"\"\"\""),
"line 1:55: token recognition error at: '\"'"
);
assertParsingException(
() -> whereExpression("\"\"\"\"\"\" foo \"\"\"\" == abc"),
"line 1:23: mismatched input 'foo' expecting {<EOF>,"
);
var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining());
assertParsingException(() -> parse("row foo == " + number), "line 1:12: Number [" + number + "] is too large");
}
public void testBooleanLiteralsCondition() {
Expression expression = whereExpression("true and false");
assertThat(expression, instanceOf(And.class));
And and = (And) expression;
assertThat(and.left(), equalTo(Literal.TRUE));
assertThat(and.right(), equalTo(Literal.FALSE));
}
public void testArithmeticOperationCondition() {
Expression expression = whereExpression("-a-b*c == 123");
assertThat(expression, instanceOf(Equals.class));
Equals eq = (Equals) expression;
assertThat(eq.right(), instanceOf(Literal.class));
assertThat(((Literal) eq.right()).value(), equalTo(123));
assertThat(eq.left(), instanceOf(Sub.class));
Sub sub = (Sub) eq.left();
assertThat(sub.left(), instanceOf(Neg.class));
Neg subLeftNeg = (Neg) sub.left();
assertThat(subLeftNeg.field(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) subLeftNeg.field()).name(), equalTo("a"));
Mul mul = (Mul) sub.right();
assertThat(mul.left(), instanceOf(UnresolvedAttribute.class));
assertThat(mul.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) mul.left()).name(), equalTo("b"));
assertThat(((UnresolvedAttribute) mul.right()).name(), equalTo("c"));
}
public void testConjunctionDisjunctionCondition() {
Expression expression = whereExpression("not aaa and b or c");
assertThat(expression, instanceOf(Or.class));
Or or = (Or) expression;
assertThat(or.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) or.right()).name(), equalTo("c"));
assertThat(or.left(), instanceOf(And.class));
And and = (And) or.left();
assertThat(and.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) and.right()).name(), equalTo("b"));
assertThat(and.left(), instanceOf(Not.class));
Not not = (Not) and.left();
assertThat(not.children().size(), equalTo(1));
assertThat(not.children().get(0), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) not.children().get(0)).name(), equalTo("aaa"));
}
public void testParenthesizedExpression() {
Expression expression = whereExpression("((a and ((b and c))) or (((x or y))))");
assertThat(expression, instanceOf(Or.class));
Or or = (Or) expression;
assertThat(or.right(), instanceOf(Or.class));
Or orRight = (Or) or.right();
assertThat(orRight.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) orRight.right()).name(), equalTo("y"));
assertThat(orRight.left(), instanceOf(UnresolvedAttribute.class));
assertThat(orRight.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) orRight.left()).name(), equalTo("x"));
assertThat(or.left(), instanceOf(And.class));
And and = (And) or.left();
assertThat(and.right(), instanceOf(And.class));
And andRight = (And) and.right();
assertThat(andRight.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) andRight.right()).name(), equalTo("c"));
assertThat(andRight.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) andRight.left()).name(), equalTo("b"));
assertThat(and.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) and.left()).name(), equalTo("a"));
}
public void testCommandNamesAsIdentifiers() {
for (var commandName : List.of("dissect", "drop", "enrich", "eval", "keep", "limit", "sort")) {
Expression expr = whereExpression("from and " + commandName);
assertThat(expr, instanceOf(And.class));
And and = (And) expr;
assertThat(and.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) and.left()).name(), equalTo("from"));
assertThat(and.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) and.right()).name(), equalTo(commandName));
}
}
public void testIdentifiersCaseSensitive() {
Expression expr = whereExpression("hElLo");
assertThat(expr, instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) expr).name(), equalTo("hElLo"));
}
/*
* a > 1 and b > 1 + 2 => (a > 1) and (b > (1 + 2))
*/
public void testOperatorsPrecedenceWithConjunction() {
Expression expression = whereExpression("a > 1 and b > 1 + 2");
assertThat(expression, instanceOf(And.class));
And and = (And) expression;
assertThat(and.left(), instanceOf(GreaterThan.class));
GreaterThan gt = (GreaterThan) and.left();
assertThat(gt.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) gt.left()).name(), equalTo("a"));
assertThat(gt.right(), instanceOf(Literal.class));
assertThat(((Literal) gt.right()).value(), equalTo(1));
assertThat(and.right(), instanceOf(GreaterThan.class));
gt = (GreaterThan) and.right();
assertThat(gt.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) gt.left()).name(), equalTo("b"));
assertThat(gt.right(), instanceOf(Add.class));
Add add = (Add) gt.right();
assertThat(((Literal) add.right()).value(), equalTo(2));
assertThat(((Literal) add.left()).value(), equalTo(1));
}
/*
* a <= 1 or b >= 5 / 2 and c != 5 => (a <= 1) or (b >= (5 / 2) and not(c == 5))
*/
public void testOperatorsPrecedenceWithDisjunction() {
Expression expression = whereExpression("a <= 1 or b >= 5 / 2 and c != 5");
assertThat(expression, instanceOf(Or.class));
Or or = (Or) expression;
assertThat(or.left(), instanceOf(LessThanOrEqual.class));
LessThanOrEqual lte = (LessThanOrEqual) or.left();
assertThat(lte.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) lte.left()).name(), equalTo("a"));
assertThat(lte.right(), instanceOf(Literal.class));
assertThat(((Literal) lte.right()).value(), equalTo(1));
assertThat(or.right(), instanceOf(And.class));
And and = (And) or.right();
assertThat(and.left(), instanceOf(GreaterThanOrEqual.class));
GreaterThanOrEqual gte = (GreaterThanOrEqual) and.left();
assertThat(gte.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) gte.left()).name(), equalTo("b"));
assertThat(gte.right(), instanceOf(Div.class));
Div div = (Div) gte.right();
assertThat(div.right(), instanceOf(Literal.class));
assertThat(((Literal) div.right()).value(), equalTo(2));
assertThat(div.left(), instanceOf(Literal.class));
assertThat(((Literal) div.left()).value(), equalTo(5));
assertThat(and.right(), instanceOf(Not.class));
assertThat(((Not) and.right()).field(), instanceOf(Equals.class));
Equals e = (Equals) ((Not) and.right()).field();
assertThat(e.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) e.left()).name(), equalTo("c"));
assertThat(e.right(), instanceOf(Literal.class));
assertThat(((Literal) e.right()).value(), equalTo(5));
}
/*
* not a == 1 or not b >= 5 and c == 5 => (not (a == 1)) or ((not (b >= 5)) and c == 5)
*/
public void testOperatorsPrecedenceWithNegation() {
Expression expression = whereExpression("not a == 1 or not b >= 5 and c == 5");
assertThat(expression, instanceOf(Or.class));
Or or = (Or) expression;
assertThat(or.left(), instanceOf(Not.class));
assertThat(((Not) or.left()).field(), instanceOf(Equals.class));
Equals e = (Equals) ((Not) or.left()).field();
assertThat(e.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) e.left()).name(), equalTo("a"));
assertThat(e.right(), instanceOf(Literal.class));
assertThat(((Literal) e.right()).value(), equalTo(1));
assertThat(or.right(), instanceOf(And.class));
And and = (And) or.right();
assertThat(and.left(), instanceOf(Not.class));
assertThat(((Not) and.left()).field(), instanceOf(GreaterThanOrEqual.class));
GreaterThanOrEqual gte = (GreaterThanOrEqual) ((Not) and.left()).field();
assertThat(gte.right(), instanceOf(Literal.class));
assertThat(((Literal) gte.right()).value(), equalTo(5));
assertThat(and.right(), instanceOf(Equals.class));
e = (Equals) and.right();
assertThat(e.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) e.left()).name(), equalTo("c"));
assertThat(e.right(), instanceOf(Literal.class));
assertThat(((Literal) e.right()).value(), equalTo(5));
}
public void testOperatorsPrecedenceExpressionsEquality() {
assertThat(whereExpression("a-1>2 or b>=5 and c-1>=5"), equalToIgnoringIds(whereExpression("((a-1)>2 or (b>=5 and (c-1)>=5))")));
assertThat(
whereExpression("a*5==25 and b>5 and c%4>=1 or true or false"),
equalToIgnoringIds(whereExpression("(((((a*5)==25) and (b>5) and ((c%4)>=1)) or true) or false)"))
);
assertThat(
whereExpression("a*4-b*5<100 and b/2+c*6>=50 or c%5+x>=5"),
equalToIgnoringIds(whereExpression("((((a*4)-(b*5))<100) and (((b/2)+(c*6))>=50)) or (((c%5)+x)>=5)"))
);
assertThat(
whereExpression("true and false or true and c/12+x*5-y%2>=50"),
equalToIgnoringIds(whereExpression("((true and false) or (true and (((c/12)+(x*5)-(y%2))>=50)))"))
);
assertThat(
whereExpression("10 days > 5 hours and 1/5 minutes > 8 seconds * 3 and -1 minutes > foo"),
equalToIgnoringIds(whereExpression("((10 days) > (5 hours)) and ((1/(5 minutes) > ((8 seconds) * 3))) and (-1 minute > foo)"))
);
assertThat(
whereExpression("10 DAYS > 5 HOURS and 1/5 MINUTES > 8 SECONDS * 3 and -1 MINUTES > foo"),
equalToIgnoringIds(whereExpression("((10 days) > (5 hours)) and ((1/(5 minutes) > ((8 seconds) * 3))) and (-1 minute > foo)"))
);
}
public void testFunctionExpressions() {
assertEquals(new UnresolvedFunction(EMPTY, "fn", DEFAULT, new ArrayList<>()), whereExpression("fn()"));
assertEqualsIgnoringIds(
new UnresolvedFunction(
EMPTY,
"invoke",
DEFAULT,
new ArrayList<>(
List.of(
new UnresolvedAttribute(EMPTY, "a"),
new Add(EMPTY, new UnresolvedAttribute(EMPTY, "b"), new UnresolvedAttribute(EMPTY, "c"))
)
)
),
whereExpression("invoke(a, b + c)")
);
assertEqualsIgnoringIds(whereExpression("(invoke((a + b)))"), whereExpression("invoke(a+b)"));
assertEqualsIgnoringIds(whereExpression("((fn()) + fn(fn()))"), whereExpression("fn() + fn(fn())"));
}
public void testUnquotedIdentifiers() {
for (String identifier : List.of("a", "_a", "a_b", "a9", "abc123", "a_____9", "__a_b", "@a", "_1", "@2")) {
assertEqualsIgnoringIds(new UnresolvedAttribute(EMPTY, identifier), whereExpression(identifier));
}
}
public void testDurationLiterals() {
int value = randomInt(Integer.MAX_VALUE);
assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 millisecond"));
assertEquals(l(Duration.ofMillis(value), TIME_DURATION), whereExpression(value + "millisecond"));
assertEquals(l(Duration.ofMillis(value), TIME_DURATION), whereExpression(value + " milliseconds"));
assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 second"));
assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + "second"));
assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " seconds"));
assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " sec"));
assertEquals(l(Duration.ofSeconds(value), TIME_DURATION), whereExpression(value + " s"));
assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 minute"));
assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + "minute"));
assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " minutes"));
assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " min"));
assertEquals(l(Duration.ofMinutes(value), TIME_DURATION), whereExpression(value + " m"));
assertEquals(l(Duration.ZERO, TIME_DURATION), whereExpression("0 hour"));
assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + "hour"));
assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " hours"));
assertEquals(l(Duration.ofHours(value), TIME_DURATION), whereExpression(value + " h"));
assertEquals(l(Duration.ofHours(-value), TIME_DURATION), whereExpression("-" + value + " hours"));
}
public void testDatePeriodLiterals() {
int value = randomInt(Integer.MAX_VALUE);
int weeksValue = randomInt(Integer.MAX_VALUE / 7);
int quartersValue = randomInt(Integer.MAX_VALUE / 3);
assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 day"));
assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + "day"));
assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " days"));
assertEquals(l(Period.ofDays(value), DATE_PERIOD), whereExpression(value + " d"));
assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0week"));
assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + "week"));
assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " weeks"));
assertEquals(l(Period.ofDays(weeksValue * 7), DATE_PERIOD), whereExpression(weeksValue + " w"));
assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 month"));
assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + "month"));
assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " months"));
assertEquals(l(Period.ofMonths(value), DATE_PERIOD), whereExpression(value + " mo"));
assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0 quarter"));
assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarter"));
assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " quarters"));
assertEquals(l(Period.ofMonths(Math.multiplyExact(quartersValue, 3)), DATE_PERIOD), whereExpression(quartersValue + " q"));
assertEquals(l(Period.ZERO, DATE_PERIOD), whereExpression("0year"));
assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + "year"));
assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " years"));
assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " yr"));
assertEquals(l(Period.ofYears(value), DATE_PERIOD), whereExpression(value + " y"));
assertEquals(l(Period.ofYears(-value), DATE_PERIOD), whereExpression("-" + value + " years"));
}
public void testUnknownNumericQualifier() {
assertParsingException(() -> whereExpression("1 decade"), "Unexpected temporal unit: 'decade'");
}
public void testQualifiedDecimalLiteral() {
assertParsingException(() -> whereExpression("1.1 hours"), "extraneous input 'hours' expecting <EOF>");
}
public void testOverflowingValueForDuration() {
for (String unit : List.of("milliseconds", "seconds", "minutes", "hours")) {
assertParsingException(
() -> parse("row x = 9223372036854775808 " + unit), // unsigned_long (Long.MAX_VALUE + 1)
"line 1:9: Number [9223372036854775808] outside of [" + unit + "] range"
);
assertParsingException(
() -> parse("row x = 18446744073709551616 " + unit), // double (UNSIGNED_LONG_MAX + 1)
"line 1:9: Number [18446744073709551616] outside of [" + unit + "] range"
);
}
assertParsingException(
() -> parse("row x = 153722867280912931 minutes"), // Long.MAX_VALUE / 60 + 1
"line 1:9: Number [153722867280912931] outside of [minutes] range"
);
assertParsingException(
() -> parse("row x = 2562047788015216 hours"), // Long.MAX_VALUE / 3600 + 1
"line 1:9: Number [2562047788015216] outside of [hours] range"
);
}
public void testOverflowingValueForPeriod() {
for (String unit : List.of("days", "weeks", "months", "years")) {
assertParsingException(
() -> parse("row x = 2147483648 " + unit), // long (Integer.MAX_VALUE + 1)
"line 1:9: Number [2147483648] outside of [" + unit + "] range"
);
}
assertParsingException(
() -> parse("row x = 306783379 weeks"), // Integer.MAX_VALUE / 7 + 1
"line 1:9: Number [306783379] outside of [weeks] range"
);
}
public void testWildcardProjectKeepPatterns() {
String[] exp = new String[] {
"a*",
"*a",
"a.*",
"a.a.*.*.a",
"*.a.a.a.*",
"*abc.*",
"a*b*c",
"*a*",
"*a*b",
"a*b*",
"*a*b*c*",
"a*b*c*",
"*a*b*c",
"a*b*c*a.b*",
"a*b*c*a.b.*",
"*a.b.c*b*c*a.b.*" };
List<?> projections;
Project p;
for (String e : exp) {
p = projectExpression(e);
projections = p.projections();
assertThat(projections.size(), equalTo(1));
assertThat("Projection [" + e + "] has an unexpected type", projections.get(0), instanceOf(UnresolvedNamePattern.class));
UnresolvedNamePattern ua = (UnresolvedNamePattern) projections.get(0);
assertThat(ua.name(), equalTo(e));
assertThat(ua.unresolvedMessage(), equalTo("Unresolved pattern [" + e + "]"));
}
}
public void testWildcardProjectKeep() {
Project p = projectExpression("*");
List<?> projections = p.projections();
assertThat(projections.size(), equalTo(1));
assertThat(projections.get(0), instanceOf(UnresolvedStar.class));
UnresolvedStar us = (UnresolvedStar) projections.get(0);
assertThat(us.qualifier(), equalTo(null));
assertThat(us.unresolvedMessage(), equalTo("Cannot determine columns for [*]"));
}
public void testWildcardProjectAwayPatterns() {
String[] exp = new String[] {
"a*",
"*a",
"a.*",
"a.a.*.*.a",
"*.a.a.a.*",
"*abc.*",
"a*b*c",
"*a*",
"*a*b",
"a*b*",
"*a*b*c*",
"a*b*c*",
"*a*b*c",
"a*b*c*a.b*",
"a*b*c*a.b.*",
"*a.b.c*b*c*a.b.*" };
List<?> removals;
for (String e : exp) {
Drop d = dropExpression(e);
removals = d.removals();
assertThat(removals.size(), equalTo(1));
assertThat("Projection [" + e + "] has an unexpected type", removals.get(0), instanceOf(UnresolvedNamePattern.class));
UnresolvedNamePattern ursa = (UnresolvedNamePattern) removals.get(0);
assertThat(ursa.name(), equalTo(e));
assertThat(ursa.unresolvedMessage(), equalTo("Unresolved pattern [" + e + "]"));
}
}
public void testForbidWildcardProjectAway() {
assertParsingException(() -> dropExpression("foo, *"), "line 1:20: Removing all fields is not allowed [*]");
}
public void testForbidMultipleIncludeStar() {
var errorMsg = "Cannot specify [*] more than once";
assertParsingException(() -> projectExpression("a, *, *, b"), errorMsg);
assertParsingException(() -> projectExpression("a, *, b, *, c"), errorMsg);
assertParsingException(() -> projectExpression("a, b, *, c, d, *"), errorMsg);
}
public void testProjectKeepPatterns() {
String[] exp = new String[] { "abc", "abc.xyz", "a.b.c.d.e" };
List<?> projections;
for (String e : exp) {
Project p = projectExpression(e);
projections = p.projections();
assertThat(projections.size(), equalTo(1));
assertThat(projections.get(0), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) projections.get(0)).name(), equalTo(e));
}
}
public void testProjectAwayPatterns() {
String[] exp = new String[] { "abc", "abc.xyz", "a.b.c.d.e" };
for (String e : exp) {
Drop d = dropExpression(e);
List<?> removals = d.removals();
assertThat(removals.size(), equalTo(1));
assertThat(removals.get(0), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) removals.get(0)).name(), equalTo(e));
}
}
public void testProjectRename() {
String[] newName = new String[] { "a", "a.b", "a", "x.y" };
String[] oldName = new String[] { "b", "a.c", "x.y", "a" };
List<?> renamings;
for (int i = 0; i < newName.length; i++) {
Rename r = renameExpression(randomBoolean() ? (oldName[i] + " AS " + newName[i]) : (newName[i] + " = " + oldName[i]));
renamings = r.renamings();
assertThat(renamings.size(), equalTo(1));
assertThat(renamings.get(0), instanceOf(Alias.class));
Alias a = (Alias) renamings.get(0);
assertThat(a.child(), instanceOf(UnresolvedAttribute.class));
UnresolvedAttribute ua = (UnresolvedAttribute) a.child();
assertThat(a.name(), equalTo(newName[i]));
assertThat(ua.name(), equalTo(oldName[i]));
}
}
public void testMultipleProjectPatterns() {
LogicalPlan plan = parse("from a | rename y as x | keep abc, xyz*, x, *");
Project p = as(plan, Project.class);
List<?> projections = p.projections();
assertThat(projections.size(), equalTo(4));
assertThat(projections.get(0), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) projections.get(0)).name(), equalTo("abc"));
assertThat(projections.get(1), instanceOf(UnresolvedNamePattern.class));
assertThat(((UnresolvedNamePattern) projections.get(1)).name(), equalTo("xyz*"));
assertThat(projections.get(2), instanceOf(UnresolvedAttribute.class));
assertThat(projections.get(3), instanceOf(UnresolvedStar.class));
}
public void testForbidWildcardProjectRename() {
assertParsingException(() -> renameExpression("b* AS a*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [b* AS a*]");
assertParsingException(() -> renameExpression("a* = b*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [a* = b*]");
}
public void testSimplifyInWithSingleElementList() {
Expression e = whereExpression("a IN (1)");
assertThat(e, instanceOf(Equals.class));
Equals eq = (Equals) e;
assertThat(eq.left(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) eq.left()).name(), equalTo("a"));
assertThat(as(eq.right(), Literal.class).value(), equalTo(1));
e = whereExpression("1 IN (a)");
assertThat(e, instanceOf(Equals.class));
eq = (Equals) e;
assertThat(eq.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) eq.right()).name(), equalTo("a"));
assertThat(eq.left().fold(FoldContext.small()), equalTo(1));
e = whereExpression("1 NOT IN (a)");
assertThat(e, instanceOf(Not.class));
e = e.children().get(0);
assertThat(e, instanceOf(Equals.class));
eq = (Equals) e;
assertThat(eq.right(), instanceOf(UnresolvedAttribute.class));
assertThat(((UnresolvedAttribute) eq.right()).name(), equalTo("a"));
assertThat(eq.left().fold(FoldContext.small()), equalTo(1));
}
private Expression whereExpression(String e) {
return ((Filter) parse("from a | where " + e)).condition();
}
private Drop dropExpression(String e) {
return (Drop) parse("from a | drop " + e);
}
private Rename renameExpression(String e) {
return (Rename) parse("from a | rename " + e);
}
private Project projectExpression(String e) {
return (Project) parse("from a | keep " + e);
}
private LogicalPlan parse(String s) {
return parser.createStatement(s);
}
private Literal l(Object value, DataType type) {
if (value instanceof String && (type == TEXT || type == KEYWORD)) {
value = BytesRefs.toBytesRef(value);
}
return new Literal(null, value, type);
}
private void assertParsingException(ThrowingRunnable expression, String expectedError) {
ParsingException e = expectThrows(ParsingException.class, "Expected syntax error", expression);
assertThat(e.getMessage(), containsString(expectedError));
}
}
|
ExpressionTests
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/proxy/jdbc/CallableStatementProxy.java
|
{
"start": 747,
"end": 872
}
|
interface ____ extends CallableStatement, PreparedStatementProxy {
CallableStatement getRawObject();
}
|
CallableStatementProxy
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/aot/RuntimeHintsBeanFactoryInitializationAotProcessorTests.java
|
{
"start": 6481,
"end": 6671
}
|
class ____ {
@Bean
@ImportRuntimeHints(SampleRuntimeHintsRegistrar.class)
SampleBean sampleBean() {
return new SampleBean();
}
}
public static
|
ConfigurationWithBeanDeclaringHints
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/main/java/org/springframework/jdbc/core/support/JdbcDaoSupport.java
|
{
"start": 1969,
"end": 5407
}
|
class ____ extends DaoSupport {
private @Nullable JdbcTemplate jdbcTemplate;
/**
* Set the JDBC DataSource to be used by this DAO.
*/
public final void setDataSource(DataSource dataSource) {
if (this.jdbcTemplate == null || dataSource != this.jdbcTemplate.getDataSource()) {
this.jdbcTemplate = createJdbcTemplate(dataSource);
initTemplateConfig();
}
}
/**
* Create a JdbcTemplate for the given DataSource.
* Only invoked if populating the DAO with a DataSource reference!
* <p>Can be overridden in subclasses to provide a JdbcTemplate instance
* with different configuration, or a custom JdbcTemplate subclass.
* @param dataSource the JDBC DataSource to create a JdbcTemplate for
* @return the new JdbcTemplate instance
* @see #setDataSource
*/
protected JdbcTemplate createJdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
/**
* Return the JDBC DataSource used by this DAO.
*/
public final @Nullable DataSource getDataSource() {
return (this.jdbcTemplate != null ? this.jdbcTemplate.getDataSource() : null);
}
/**
* Set the JdbcTemplate for this DAO explicitly,
* as an alternative to specifying a DataSource.
*/
public final void setJdbcTemplate(@Nullable JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
initTemplateConfig();
}
/**
* Return the JdbcTemplate for this DAO,
* pre-initialized with the DataSource or set explicitly.
*/
public final @Nullable JdbcTemplate getJdbcTemplate() {
return this.jdbcTemplate;
}
/**
* Initialize the template-based configuration of this DAO.
* Called after a new JdbcTemplate has been set, either directly
* or through a DataSource.
* <p>This implementation is empty. Subclasses may override this
* to configure further objects based on the JdbcTemplate.
* @see #getJdbcTemplate()
*/
protected void initTemplateConfig() {
}
@Override
protected void checkDaoConfig() {
if (this.jdbcTemplate == null) {
throw new IllegalArgumentException("'dataSource' or 'jdbcTemplate' is required");
}
}
/**
* Return the SQLExceptionTranslator of this DAO's JdbcTemplate,
* for translating SQLExceptions in custom JDBC access code.
* @see org.springframework.jdbc.core.JdbcTemplate#getExceptionTranslator()
*/
protected final SQLExceptionTranslator getExceptionTranslator() {
JdbcTemplate jdbcTemplate = getJdbcTemplate();
Assert.state(jdbcTemplate != null, "No JdbcTemplate set");
return jdbcTemplate.getExceptionTranslator();
}
/**
* Get a JDBC Connection, either from the current transaction or a new one.
* @return the JDBC Connection
* @throws CannotGetJdbcConnectionException if the attempt to get a Connection failed
* @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection(javax.sql.DataSource)
*/
protected final Connection getConnection() throws CannotGetJdbcConnectionException {
DataSource dataSource = getDataSource();
Assert.state(dataSource != null, "No DataSource set");
return DataSourceUtils.getConnection(dataSource);
}
/**
* Close the given JDBC Connection, created via this DAO's DataSource,
* if it isn't bound to the thread.
* @param con the Connection to close
* @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection
*/
protected final void releaseConnection(Connection con) {
DataSourceUtils.releaseConnection(con, getDataSource());
}
}
|
JdbcDaoSupport
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 108736,
"end": 109491
}
|
class ____<T> {
@RequestMapping("/myPath2.do")
public void myHandle(@RequestParam("param1") T p1, int param2, @RequestHeader Integer header1,
@CookieValue int cookie1, HttpServletResponse response) throws IOException {
response.getWriter().write("test-" + p1 + "-" + param2 + "-" + header1 + "-" + cookie1);
}
@InitBinder
public void initBinder(@RequestParam("param1") String p1,
@RequestParam(value="paramX", required=false) String px, int param2) {
assertThat(px).isNull();
}
@ModelAttribute
public void modelAttribute(@RequestParam("param1") String p1,
@RequestParam(value="paramX", required=false) String px, int param2) {
assertThat(px).isNull();
}
}
@RequestMapping("/*.do")
static
|
MyAdaptedControllerBase
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/scheduling/concurrent/SimpleAsyncTaskScheduler.java
|
{
"start": 5168,
"end": 13899
}
|
class ____ extends SimpleAsyncTaskExecutor implements TaskScheduler,
ApplicationContextAware, SmartLifecycle, ApplicationListener<ContextClosedEvent> {
/**
* The default phase for an executor {@link SmartLifecycle}: {@code Integer.MAX_VALUE / 2}.
* @since 6.2
* @see #getPhase()
* @see ExecutorConfigurationSupport#DEFAULT_PHASE
*/
public static final int DEFAULT_PHASE = ExecutorConfigurationSupport.DEFAULT_PHASE;
private static final TimeUnit NANO = TimeUnit.NANOSECONDS;
private final ScheduledExecutorService triggerExecutor = createScheduledExecutor();
private final ExecutorLifecycleDelegate triggerLifecycle = new ExecutorLifecycleDelegate(this.triggerExecutor);
private final ScheduledExecutorService fixedDelayExecutor = createFixedDelayExecutor();
private final ExecutorLifecycleDelegate fixedDelayLifecycle = new ExecutorLifecycleDelegate(this.fixedDelayExecutor);
private @Nullable ErrorHandler errorHandler;
private Clock clock = Clock.systemDefaultZone();
private int phase = DEFAULT_PHASE;
private @Nullable Executor targetTaskExecutor;
private @Nullable ApplicationContext applicationContext;
/**
* Provide an {@link ErrorHandler} strategy.
* @since 6.2
*/
public void setErrorHandler(ErrorHandler errorHandler) {
Assert.notNull(errorHandler, "ErrorHandler must not be null");
this.errorHandler = errorHandler;
}
/**
* Set the clock to use for scheduling purposes.
* <p>The default clock is the system clock for the default time zone.
* @see Clock#systemDefaultZone()
*/
public void setClock(Clock clock) {
Assert.notNull(clock, "Clock must not be null");
this.clock = clock;
}
@Override
public Clock getClock() {
return this.clock;
}
/**
* Specify the lifecycle phase for pausing and resuming this executor.
* The default is {@link #DEFAULT_PHASE}.
* @see SmartLifecycle#getPhase()
*/
public void setPhase(int phase) {
this.phase = phase;
}
/**
* Return the lifecycle phase for pausing and resuming this executor.
* @see #setPhase
*/
@Override
public int getPhase() {
return this.phase;
}
/**
* Specify a custom target {@link Executor} to delegate to for
* the individual execution of scheduled tasks. This can for example
* be set to a separate thread pool for executing scheduled tasks,
* whereas this scheduler keeps using its single scheduler thread.
* <p>If not set, the regular {@link SimpleAsyncTaskExecutor}
* arrangements kicks in with a new thread per task.
*/
public void setTargetTaskExecutor(Executor targetTaskExecutor) {
this.targetTaskExecutor = (targetTaskExecutor == this ? null : targetTaskExecutor);
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
private ScheduledExecutorService createScheduledExecutor() {
return new ScheduledThreadPoolExecutor(1, this::newThread) {
@Override
protected void beforeExecute(Thread thread, Runnable task) {
triggerLifecycle.beforeExecute(thread);
}
@Override
protected void afterExecute(Runnable task, Throwable ex) {
triggerLifecycle.afterExecute();
}
};
}
private ScheduledExecutorService createFixedDelayExecutor() {
return new ScheduledThreadPoolExecutor(1, this::newThread) {
@Override
protected void beforeExecute(Thread thread, Runnable task) {
fixedDelayLifecycle.beforeExecute(thread);
}
@Override
protected void afterExecute(Runnable task, Throwable ex) {
fixedDelayLifecycle.afterExecute();
}
};
}
@Override
protected void doExecute(Runnable task) {
if (this.targetTaskExecutor != null) {
this.targetTaskExecutor.execute(task);
}
else {
super.doExecute(task);
}
}
private Runnable taskOnSchedulerThread(Runnable task) {
return new DelegatingErrorHandlingRunnable(task,
(this.errorHandler != null ? this.errorHandler : TaskUtils.getDefaultErrorHandler(true)));
}
private Runnable scheduledTask(Runnable task) {
return () -> execute(new DelegatingErrorHandlingRunnable(task, this::shutdownAwareErrorHandler));
}
private void shutdownAwareErrorHandler(Throwable ex) {
if (this.errorHandler != null) {
this.errorHandler.handleError(ex);
}
else if (this.triggerExecutor.isShutdown()) {
LogFactory.getLog(getClass()).debug("Ignoring scheduled task exception after shutdown", ex);
}
else {
TaskUtils.getDefaultErrorHandler(true).handleError(ex);
}
}
@Override
public void execute(Runnable task) {
super.execute(TaskUtils.decorateTaskWithErrorHandler(task, this.errorHandler, false));
}
@Override
public Future<?> submit(Runnable task) {
return super.submit(TaskUtils.decorateTaskWithErrorHandler(task, this.errorHandler, false));
}
@Override
public <T> Future<T> submit(Callable<T> task) {
return super.submit(new DelegatingErrorHandlingCallable<>(task, this.errorHandler));
}
@Override
public @Nullable ScheduledFuture<?> schedule(Runnable task, Trigger trigger) {
try {
Runnable delegate = scheduledTask(task);
ErrorHandler errorHandler =
(this.errorHandler != null ? this.errorHandler : TaskUtils.getDefaultErrorHandler(true));
return new ReschedulingRunnable(
delegate, trigger, this.clock, this.triggerExecutor, errorHandler).schedule();
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.triggerExecutor, task, ex);
}
}
@Override
public ScheduledFuture<?> schedule(Runnable task, Instant startTime) {
Duration delay = Duration.between(this.clock.instant(), startTime);
try {
return this.triggerExecutor.schedule(scheduledTask(task), NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.triggerExecutor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable task, Instant startTime, Duration period) {
Duration initialDelay = Duration.between(this.clock.instant(), startTime);
try {
return this.triggerExecutor.scheduleAtFixedRate(scheduledTask(task),
NANO.convert(initialDelay), NANO.convert(period), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.triggerExecutor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable task, Duration period) {
try {
return this.triggerExecutor.scheduleAtFixedRate(scheduledTask(task),
0, NANO.convert(period), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.triggerExecutor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable task, Instant startTime, Duration delay) {
Duration initialDelay = Duration.between(this.clock.instant(), startTime);
try {
// Blocking task on scheduler thread for fixed delay semantics
return this.fixedDelayExecutor.scheduleWithFixedDelay(taskOnSchedulerThread(task),
NANO.convert(initialDelay), NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.fixedDelayExecutor, task, ex);
}
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable task, Duration delay) {
try {
// Blocking task on scheduler thread for fixed delay semantics
return this.fixedDelayExecutor.scheduleWithFixedDelay(taskOnSchedulerThread(task),
0, NANO.convert(delay), NANO);
}
catch (RejectedExecutionException ex) {
throw new TaskRejectedException(this.fixedDelayExecutor, task, ex);
}
}
@Override
public void start() {
this.triggerLifecycle.start();
this.fixedDelayLifecycle.start();
}
@Override
public void stop() {
this.triggerLifecycle.stop();
this.fixedDelayLifecycle.stop();
}
@Override
public void stop(Runnable callback) {
this.triggerLifecycle.stop(); // no callback necessary since it's just triggers with hand-offs
this.fixedDelayLifecycle.stop(callback); // callback for currently executing fixed-delay tasks
}
@Override
public boolean isRunning() {
return (this.triggerLifecycle.isRunning() || this.fixedDelayLifecycle.isRunning());
}
@Override
public void onApplicationEvent(ContextClosedEvent event) {
if (event.getApplicationContext() == this.applicationContext) {
this.triggerExecutor.shutdown();
this.fixedDelayExecutor.shutdown();
}
}
@Override
public void close() {
for (Runnable remainingTask : this.triggerExecutor.shutdownNow()) {
if (remainingTask instanceof Future<?> future) {
future.cancel(true);
}
}
for (Runnable remainingTask : this.fixedDelayExecutor.shutdownNow()) {
if (remainingTask instanceof Future<?> future) {
future.cancel(true);
}
}
super.close();
}
}
|
SimpleAsyncTaskScheduler
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractionUtilsTest.java
|
{
"start": 1283,
"end": 2807
}
|
class ____ {
@Test
void testIsGeneric() throws Exception {
Method method = getMethod(IsGeneric.class, "m1");
Type firstParam = method.getGenericParameterTypes()[0];
assertThat(TypeExtractionUtils.isGenericOfClass(List.class, firstParam)).isTrue();
method = getMethod(IsGeneric.class, "m2");
firstParam = method.getGenericParameterTypes()[0];
assertThat(TypeExtractionUtils.isGenericOfClass(List.class, firstParam)).isTrue();
}
@Test
void testGetParameterizedType() throws Exception {
Method method = getMethod(IsGeneric.class, "m1");
Type firstParam = method.getGenericParameterTypes()[0];
Optional<ParameterizedType> parameterizedType =
TypeExtractionUtils.getParameterizedType(firstParam);
assertThat(parameterizedType).isPresent();
assertThat(parameterizedType.get().getRawType()).isEqualTo(List.class);
assertThat(parameterizedType.get().getActualTypeArguments()[0]).isEqualTo(Integer.class);
method = getMethod(IsGeneric.class, "m2");
firstParam = method.getGenericParameterTypes()[0];
assertThat(TypeExtractionUtils.getParameterizedType(firstParam)).isEmpty();
}
private Method getMethod(Class<?> clazz, String name) throws Exception {
return getAllDeclaredMethods(clazz).stream()
.filter(m -> m.getName().equals(name))
.findFirst()
.orElseThrow();
}
public static
|
TypeExtractionUtilsTest
|
java
|
apache__kafka
|
tools/src/main/java/org/apache/kafka/tools/consumer/group/CsvUtils.java
|
{
"start": 1123,
"end": 1902
}
|
class ____ {
private static final CsvMapper MAPPER = new CsvMapper();
public static ObjectReader readerFor(Class<?> clazz) {
return MAPPER.readerFor(clazz).with(getSchema(clazz));
}
public static ObjectWriter writerFor(Class<?> clazz) {
return MAPPER.writerFor(clazz).with(getSchema(clazz));
}
private static CsvSchema getSchema(Class<?> clazz) {
String[] fields;
if (CsvRecordWithGroup.class == clazz)
fields = CsvRecordWithGroup.FIELDS;
else if (CsvRecordNoGroup.class == clazz)
fields = CsvRecordNoGroup.FIELDS;
else
throw new IllegalStateException("Unhandled class " + clazz);
return MAPPER.schemaFor(clazz).sortedBy(fields);
}
public static
|
CsvUtils
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/cluster/api/NodeSelectionSupport.java
|
{
"start": 369,
"end": 465
}
|
interface ____ to invoke multi-node operations.
* @author Mark Paluch
* @since 4.0
*/
public
|
type
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncCatchFunction.java
|
{
"start": 1419,
"end": 1709
}
|
interface ____ part of the asynchronous utilities provided by the Hadoop
* Distributed File System (HDFS) Federation router. It is used in conjunction
* with other asynchronous interfaces such as AsyncRun to build complex,
* non-blocking operations.</p>
*
* <p>An implementation of this
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownConjunctionsToKnnPrefilters.java
|
{
"start": 1205,
"end": 5814
}
|
class ____ extends OptimizerRules.OptimizerRule<Filter> {
@Override
protected LogicalPlan rule(Filter filter) {
Stack<Expression> filters = new Stack<>();
Expression condition = filter.condition();
Expression newCondition = pushConjunctionsToKnn(condition, filters, null);
return condition.equals(newCondition) ? filter : filter.with(newCondition);
}
/**
* Updates knn function prefilters. This method processes conjunctions so knn functions on one side of the conjunction receive
* the other side of the conjunction as a prefilter
*
* @param expression expression to process recursively
* @param filters current filters to apply to the expression. They contain expressions on the other side of the traversed conjunctions
* @param addedFilter a new filter to add to the list of filters for the processing
* @return the updated expression, or the original expression if it doesn't need to be updated
*/
private static Expression pushConjunctionsToKnn(Expression expression, Stack<Expression> filters, Expression addedFilter) {
if (addedFilter != null) {
filters.push(addedFilter);
}
Expression result = switch (expression) {
case And and:
// Traverse both sides of the And, using the other side as the added filter
Expression newLeft = pushConjunctionsToKnn(and.left(), filters, and.right());
Expression newRight = pushConjunctionsToKnn(and.right(), filters, and.left());
if (newLeft.equals(and.left()) && newRight.equals(and.right())) {
yield and;
}
yield and.replaceChildrenSameSize(List.of(newLeft, newRight));
case Knn knn:
// We don't want knn expressions to have other knn expressions as a prefilter to avoid circular dependencies
List<Expression> newFilters = filters.stream()
.map(PushDownConjunctionsToKnnPrefilters::removeKnn)
.filter(Objects::nonNull)
.toList();
if (newFilters.equals(knn.filterExpressions())) {
yield knn;
}
yield knn.withFilters(newFilters);
default:
List<Expression> children = expression.children();
boolean childrenChanged = false;
// This copies transformChildren algorithm to avoid unnecessary changes
List<Expression> transformedChildren = null;
for (int i = 0, s = children.size(); i < s; i++) {
Expression child = children.get(i);
Expression next = pushConjunctionsToKnn(child, filters, null);
if (child.equals(next) == false) {
// lazy copy + replacement in place
if (childrenChanged == false) {
childrenChanged = true;
transformedChildren = new ArrayList<>(children);
}
transformedChildren.set(i, next);
}
}
yield (childrenChanged ? expression.replaceChildrenSameSize(transformedChildren) : expression);
};
if (addedFilter != null) {
filters.pop();
}
return result;
}
/**
* Removes knn functions from the expression tree
* @param expression expression to process
* @return expression without knn functions, or null if the expression is a knn function
*/
private static Expression removeKnn(Expression expression) {
if (expression.children().isEmpty()) {
return expression;
}
if (expression instanceof Knn) {
return null;
}
List<Expression> filteredChildren = expression.children()
.stream()
.map(PushDownConjunctionsToKnnPrefilters::removeKnn)
.filter(Objects::nonNull)
.toList();
if (filteredChildren.equals(expression.children())) {
return expression;
} else if (filteredChildren.isEmpty()) {
return null;
} else if (expression instanceof BinaryLogic && filteredChildren.size() == 1) {
// Simplify an AND / OR expression to a single child
return filteredChildren.getFirst();
} else {
return expression.replaceChildrenSameSize(filteredChildren);
}
}
}
|
PushDownConjunctionsToKnnPrefilters
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/configuration/interfaces/ContextConfigurationTestInterface.java
|
{
"start": 1116,
"end": 1209
}
|
class ____ {
@Bean
Employee employee() {
return new Employee("Dilbert");
}
}
}
|
Config
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java
|
{
"start": 760,
"end": 1645
}
|
class ____ extends ESTestCase {
public void testStatus() {
var task = testTask();
task.setProgress(100, 0);
var taskInfo = task.taskInfo("node", true);
var status = Strings.toString(taskInfo.status());
assertThat(status, containsString("{\"total_parts\":100,\"downloaded_parts\":0}"));
task.setProgress(100, 1);
taskInfo = task.taskInfo("node", true);
status = Strings.toString(taskInfo.status());
assertThat(status, containsString("{\"total_parts\":100,\"downloaded_parts\":1}"));
}
public static ModelDownloadTask testTask() {
return new ModelDownloadTask(
0L,
MODEL_IMPORT_TASK_TYPE,
MODEL_IMPORT_TASK_ACTION,
downloadModelTaskDescription("foo"),
TaskId.EMPTY_TASK_ID,
Map.of()
);
}
}
|
ModelDownloadTaskTests
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-client/src/test/java/org/springframework/security/oauth2/client/authentication/OAuth2LoginAuthenticationTokenTests.java
|
{
"start": 1808,
"end": 6362
}
|
class ____ {
private OAuth2User principal;
private Collection<? extends GrantedAuthority> authorities;
private ClientRegistration clientRegistration;
private OAuth2AuthorizationExchange authorizationExchange;
private OAuth2AccessToken accessToken;
@BeforeEach
public void setUp() {
this.principal = mock(OAuth2User.class);
this.authorities = Collections.emptyList();
this.clientRegistration = TestClientRegistrations.clientRegistration().build();
this.authorizationExchange = new OAuth2AuthorizationExchange(TestOAuth2AuthorizationRequests.request().build(),
TestOAuth2AuthorizationResponses.success().code("code").build());
this.accessToken = TestOAuth2AccessTokens.noScopes();
}
@Test
public void constructorAuthorizationRequestResponseWhenClientRegistrationIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2LoginAuthenticationToken(null, this.authorizationExchange));
}
@Test
public void constructorAuthorizationRequestResponseWhenAuthorizationExchangeIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2LoginAuthenticationToken(this.clientRegistration, null));
}
@Test
public void constructorAuthorizationRequestResponseWhenAllParametersProvidedAndValidThenCreated() {
OAuth2LoginAuthenticationToken authentication = new OAuth2LoginAuthenticationToken(this.clientRegistration,
this.authorizationExchange);
assertThat(authentication.getPrincipal()).isNull();
assertThat(authentication.getCredentials()).isEqualTo("");
assertThat(authentication.getAuthorities()).isEqualTo(Collections.emptyList());
assertThat(authentication.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authentication.getAuthorizationExchange()).isEqualTo(this.authorizationExchange);
assertThat(authentication.getAccessToken()).isNull();
assertThat(authentication.isAuthenticated()).isEqualTo(false);
}
@Test
public void constructorTokenRequestResponseWhenClientRegistrationIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException().isThrownBy(() -> new OAuth2LoginAuthenticationToken(null,
this.authorizationExchange, this.principal, this.authorities, this.accessToken));
}
@Test
public void constructorTokenRequestResponseWhenAuthorizationExchangeIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2LoginAuthenticationToken(this.clientRegistration, null, this.principal,
this.authorities, this.accessToken));
}
@Test
public void constructorTokenRequestResponseWhenPrincipalIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2LoginAuthenticationToken(this.clientRegistration, this.authorizationExchange,
null, this.authorities, this.accessToken));
}
@Test
public void constructorTokenRequestResponseWhenAuthoritiesIsNullThenCreated() {
new OAuth2LoginAuthenticationToken(this.clientRegistration, this.authorizationExchange, this.principal, null,
this.accessToken);
}
@Test
public void constructorTokenRequestResponseWhenAuthoritiesIsEmptyThenCreated() {
new OAuth2LoginAuthenticationToken(this.clientRegistration, this.authorizationExchange, this.principal,
Collections.emptyList(), this.accessToken);
}
@Test
public void constructorTokenRequestResponseWhenAccessTokenIsNullThenThrowIllegalArgumentException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new OAuth2LoginAuthenticationToken(this.clientRegistration, this.authorizationExchange,
this.principal, this.authorities, null));
}
@Test
public void constructorTokenRequestResponseWhenAllParametersProvidedAndValidThenCreated() {
OAuth2LoginAuthenticationToken authentication = new OAuth2LoginAuthenticationToken(this.clientRegistration,
this.authorizationExchange, this.principal, this.authorities, this.accessToken);
assertThat(authentication.getPrincipal()).isEqualTo(this.principal);
assertThat(authentication.getCredentials()).isEqualTo("");
assertThat(authentication.getAuthorities()).isEqualTo(this.authorities);
assertThat(authentication.getClientRegistration()).isEqualTo(this.clientRegistration);
assertThat(authentication.getAuthorizationExchange()).isEqualTo(this.authorizationExchange);
assertThat(authentication.getAccessToken()).isEqualTo(this.accessToken);
assertThat(authentication.isAuthenticated()).isEqualTo(true);
}
}
|
OAuth2LoginAuthenticationTokenTests
|
java
|
apache__kafka
|
share-coordinator/src/main/java/org/apache/kafka/coordinator/share/PersisterStateBatchCombiner.java
|
{
"start": 1081,
"end": 16104
}
|
class ____ {
private List<PersisterStateBatch> combinedBatchList; // link between pruning and merging
private final long startOffset;
private TreeSet<PersisterStateBatch> sortedBatches;
private List<PersisterStateBatch> finalBatchList; // final list is built here
public PersisterStateBatchCombiner(
List<PersisterStateBatch> batchesSoFar,
List<PersisterStateBatch> newBatches,
long startOffset
) {
initializeCombinedList(batchesSoFar, newBatches);
int estimatedResultSize = (combinedBatchList.size() * 3) / 2; // heuristic size - 50% overallocation
finalBatchList = new ArrayList<>(estimatedResultSize);
this.startOffset = startOffset;
}
private void initializeCombinedList(List<PersisterStateBatch> batchesSoFar, List<PersisterStateBatch> newBatches) {
boolean soFarEmpty = batchesSoFar == null || batchesSoFar.isEmpty();
boolean newBatchesEmpty = newBatches == null || newBatches.isEmpty();
if (soFarEmpty && newBatchesEmpty) {
combinedBatchList = new ArrayList<>();
} else if (soFarEmpty) {
combinedBatchList = new ArrayList<>(newBatches); // new list as the original one could be unmodifiable
} else if (newBatchesEmpty) {
combinedBatchList = new ArrayList<>(batchesSoFar); // new list as the original one could be unmodifiable
} else {
combinedBatchList = new ArrayList<>(batchesSoFar.size() + newBatches.size());
combinedBatchList.addAll(batchesSoFar);
combinedBatchList.addAll(newBatches);
}
}
/**
* Algorithm: Merge current state batches and new batches into a single non-overlapping batch list.
* Input: batchesSoFar, newBatches, startOffset
* Output: combined list with non-overlapping batches (finalBatchList)
* <p>
* - Add both currentBatches and newBatches into a single list combinedBatchList
* - if combinedBatchList.size() <= 1 return combinedBatchList
* <p>
* - Remove/prune any batches from the combinedBatchList:
* - if batch.lastOffset < startOffset then remove batch from combinedBatchList
* - else if batch.firstOffset > startOffset then we will keep the batch
* - else if batch.firstOffset <= startOffset <= batch.lastOffset then keep [startOffset, batch.lastOffset] part only and discard rest.
* <p>
* - create a treeset sortedBatches using pruned combinedBatchList
* - find first 2 mergeable batches in sortedBatches set, say, prev and candidate.
* - remove any non-overlapping batches from sortedBatches encountered during the find operation and add them to a finalBatchList
* - do repeat until a mergeable pair is not found:
* - based on various conditions of offset overlap and batch state differences combine the batches or
* create new batches, if required, and add to the sortedBatches.
* - find first 2 mergeable batches in sortedBatches set, say, prev and candidate.
* - remove any non-mergeable batches from sortedBatches encountered during the find operation and add them to a finalBatchList
* - done
* - return the finalBatchList
*
* @return list of {@link PersisterStateBatch} representing non-overlapping combined batches
*/
public List<PersisterStateBatch> combineStateBatches() {
pruneBatches();
mergeBatches();
return finalBatchList;
}
private void mergeBatches() {
if (combinedBatchList.size() < 2) {
finalBatchList = combinedBatchList;
return;
}
sortedBatches = new TreeSet<>(combinedBatchList);
MergeCandidatePair overlapState = getMergeCandidatePair();
while (overlapState != MergeCandidatePair.EMPTY) {
PersisterStateBatch prev = overlapState.prev();
PersisterStateBatch candidate = overlapState.candidate();
// remove both previous and candidate for easier
// assessment about adding batches to sortedBatches
sortedBatches.remove(prev);
sortedBatches.remove(candidate);
if (compareBatchDeliveryInfo(candidate, prev) == 0) { // same state and overlap or contiguous
// overlap and same state (prev.firstOffset <= candidate.firstOffset) due to sort
// covers:
// case: 1 2 3 4 5 6 7 (contiguous)
// prev: ------ ------- ------- ------- ------- -------- -------
// candidate: ------ ---- ---------- --- ---- ------- -------
handleSameStateMerge(prev, candidate); // pair can be contiguous or overlapping
} else {
// If we reach here then it is guaranteed that the batch pair is overlapping and
// non-contiguous because getMergeCandidatePair only returns contiguous pair if
// the constituents have the same delivery count and state.
// covers:
// case: 1 2* 3 4 5 6 7*
// prev: ------ ------- ------- ------- ------- -------- ------
// candidate: ------ ---- --------- ---- ---- ------- -------
// max batches: 1 2 2 3 2 2 2
// min batches: 1 1 1 1 1 2 1
// * not possible with treeset
handleDiffStateOverlap(prev, candidate);
}
overlapState = getMergeCandidatePair();
}
finalBatchList.addAll(sortedBatches); // some non overlapping batches might have remained
}
/**
* Compares the non-offset state of 2 batches i.e. the deliveryCount and deliverState.
* <p>
* Uses standard compareTo contract x < y => +int, x > y => -int, x == y => 0
*
* @param b1 - {@link PersisterStateBatch} to compare
* @param b2 - {@link PersisterStateBatch} to compare
* @return int representing comparison result.
*/
private int compareBatchDeliveryInfo(PersisterStateBatch b1, PersisterStateBatch b2) {
int deltaCount = Short.compare(b1.deliveryCount(), b2.deliveryCount());
// Delivery state could be:
// 0 - AVAILABLE (non-terminal)
// 1 - ACQUIRED - should not be persisted yet
// 2 - ACKNOWLEDGED (terminal)
// 3 - ARCHIVING - not implemented in KIP-932 - non-terminal - leads only to ARCHIVED
// 4 - ARCHIVED (terminal)
if (deltaCount == 0) { // same delivery count
return Byte.compare(b1.deliveryState(), b2.deliveryState());
}
return deltaCount;
}
/**
* Accepts a sorted set of state batches and finds the first 2 batches which can be merged.
* Merged implies that they have some offsets in common or, they are contiguous with the same state.
* <p>
* Any non-mergeable batches prefixing a good mergeable pair are removed from the sortedBatches.
* For example:
* ----- ---- ----- ----- -----
* ------
* <---------------> <-------->
* non-overlapping 1st overlapping pair
*
* @return object representing the overlap state
*/
private MergeCandidatePair getMergeCandidatePair() {
if (sortedBatches == null || sortedBatches.isEmpty()) {
return MergeCandidatePair.EMPTY;
}
Iterator<PersisterStateBatch> iter = sortedBatches.iterator();
PersisterStateBatch prev = iter.next();
List<PersisterStateBatch> nonOverlapping = new LinkedList<>();
while (iter.hasNext()) {
PersisterStateBatch candidate = iter.next();
if (candidate.firstOffset() <= prev.lastOffset() || // overlap
prev.lastOffset() + 1 == candidate.firstOffset() && compareBatchDeliveryInfo(prev, candidate) == 0) { // contiguous
updateBatchContainers(nonOverlapping);
return new MergeCandidatePair(
prev,
candidate
);
}
nonOverlapping.add(prev);
prev = candidate;
}
updateBatchContainers(nonOverlapping);
return MergeCandidatePair.EMPTY;
}
private void updateBatchContainers(List<PersisterStateBatch> nonOverlappingBatches) {
nonOverlappingBatches.forEach(sortedBatches::remove);
finalBatchList.addAll(nonOverlappingBatches);
}
/**
* Accepts a list of {@link PersisterStateBatch} and checks:
* - last offset is < start offset => batch is removed
* - first offset > start offset => batch is preserved
* - start offset intersects the batch => part of batch before start offset is removed and
* the part after it is preserved.
*/
private void pruneBatches() {
if (startOffset != -1) {
List<PersisterStateBatch> retainedBatches = new ArrayList<>(combinedBatchList.size());
combinedBatchList.forEach(batch -> {
if (batch.lastOffset() < startOffset) {
// batch is expired, skip current iteration
// -------
// | -> start offset
return;
}
if (batch.firstOffset() >= startOffset) {
// complete batch is valid
// ---------
// | -> start offset
retainedBatches.add(batch);
} else {
// start offset intersects batch
// ---------
// | -> start offset
retainedBatches.add(new PersisterStateBatch(startOffset, batch.lastOffset(), batch.deliveryState(), batch.deliveryCount()));
}
});
// update the instance variable
combinedBatchList = retainedBatches;
}
}
private void handleSameStateMerge(PersisterStateBatch prev, PersisterStateBatch candidate) {
sortedBatches.add(new PersisterStateBatch(
prev.firstOffset(),
// cover cases
// prev: ------ -------- ---------
// candidate: --- ---------- -----
Math.max(candidate.lastOffset(), prev.lastOffset()),
prev.deliveryState(),
prev.deliveryCount()
));
}
private void handleDiffStateOverlap(PersisterStateBatch prev, PersisterStateBatch candidate) {
if (candidate.firstOffset() == prev.firstOffset()) {
handleDiffStateOverlapFirstOffsetAligned(prev, candidate);
} else { // candidate.firstOffset() > prev.firstOffset()
handleDiffStateOverlapFirstOffsetNotAligned(prev, candidate);
}
}
private void handleDiffStateOverlapFirstOffsetAligned(PersisterStateBatch prev, PersisterStateBatch candidate) {
if (candidate.lastOffset() == prev.lastOffset()) { // case 1
// candidate can never have lower or equal priority
// since sortedBatches order takes that into account.
// -------
// -------
sortedBatches.add(candidate);
} else {
// case 2 is not possible with TreeSet. It is symmetric to case 3.
// case 3
// --------
// -----------
if (compareBatchDeliveryInfo(candidate, prev) < 0) {
sortedBatches.add(prev);
sortedBatches.add(new PersisterStateBatch(
prev.lastOffset() + 1,
candidate.lastOffset(),
candidate.deliveryState(),
candidate.deliveryCount()
));
} else {
// candidate priority is >= prev
sortedBatches.add(candidate);
}
}
}
private void handleDiffStateOverlapFirstOffsetNotAligned(PersisterStateBatch prev, PersisterStateBatch candidate) {
if (candidate.lastOffset() < prev.lastOffset()) { // case 4
handleDiffStateOverlapPrevSwallowsCandidate(prev, candidate);
} else if (candidate.lastOffset() == prev.lastOffset()) { // case 5
handleDiffStateOverlapLastOffsetAligned(prev, candidate);
} else { // case 6
handleDiffStateOverlapCandidateOffsetsLarger(prev, candidate);
}
}
private void handleDiffStateOverlapPrevSwallowsCandidate(PersisterStateBatch prev, PersisterStateBatch candidate) {
// --------
// ----
if (compareBatchDeliveryInfo(candidate, prev) < 0) {
sortedBatches.add(prev);
} else {
sortedBatches.add(new PersisterStateBatch(
prev.firstOffset(),
candidate.firstOffset() - 1,
prev.deliveryState(),
prev.deliveryCount()
));
sortedBatches.add(candidate);
sortedBatches.add(new PersisterStateBatch(
candidate.lastOffset() + 1,
prev.lastOffset(),
prev.deliveryState(),
prev.deliveryCount()
));
}
}
private void handleDiffStateOverlapLastOffsetAligned(PersisterStateBatch prev, PersisterStateBatch candidate) {
// --------
// -----
if (compareBatchDeliveryInfo(candidate, prev) < 0) {
sortedBatches.add(prev);
} else {
sortedBatches.add(new PersisterStateBatch(
prev.firstOffset(),
candidate.firstOffset() - 1,
prev.deliveryState(),
prev.deliveryCount()
));
sortedBatches.add(candidate);
}
}
private void handleDiffStateOverlapCandidateOffsetsLarger(PersisterStateBatch prev, PersisterStateBatch candidate) {
// -------
// -------
if (compareBatchDeliveryInfo(candidate, prev) < 0) {
sortedBatches.add(prev);
sortedBatches.add(new PersisterStateBatch(
prev.lastOffset() + 1,
candidate.lastOffset(),
candidate.deliveryState(),
candidate.deliveryCount()
));
} else {
// candidate has higher priority
sortedBatches.add(new PersisterStateBatch(
prev.firstOffset(),
candidate.firstOffset() - 1,
prev.deliveryState(),
prev.deliveryCount()
));
sortedBatches.add(candidate);
}
}
/**
* Holder
|
PersisterStateBatchCombiner
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
|
{
"start": 4708,
"end": 10653
}
|
class ____ extends HardLinkCommandGetter {
/**
* Build the windows link command. This must not
* use an exception-raising reference to WINUTILS, as
* some tests examine the command.
*/
@SuppressWarnings("deprecation")
static String[] getLinkCountCommand = {
Shell.WINUTILS, "hardlink", "stat", null};
/*
* @see org.apache.hadoop.fs.HardLink.HardLinkCommandGetter#linkCount(java.io.File)
*/
@Override
String[] linkCount(File file) throws IOException {
// trigger the check for winutils
Shell.getWinUtilsFile();
String[] buf = new String[getLinkCountCommand.length];
System.arraycopy(getLinkCountCommand, 0, buf, 0,
getLinkCountCommand.length);
buf[getLinkCountCommand.length - 1] = file.getCanonicalPath();
return buf;
}
}
/*
* ****************************************************
* Complexity is above. User-visible functionality is below
* ****************************************************
*/
/**
* Creates a hardlink.
* @param file - existing source file
* @param linkName - desired target link file
* @throws IOException raised on errors performing I/O.
*/
public static void createHardLink(File file, File linkName)
throws IOException {
if (file == null) {
throw new IOException(
"invalid arguments to createHardLink: source file is null");
}
if (linkName == null) {
throw new IOException(
"invalid arguments to createHardLink: link name is null");
}
createLink(linkName.toPath(), file.toPath());
}
/**
* Creates hardlinks from multiple existing files within one parent
* directory, into one target directory.
* @param parentDir - directory containing source files
* @param fileBaseNames - list of path-less file names, as returned by
* parentDir.list()
* @param linkDir - where the hardlinks should be put. It must already exist.
* @throws IOException raised on errors performing I/O.
*/
public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
File linkDir) throws IOException {
if (parentDir == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: parent directory is null");
}
if (linkDir == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: link directory is null");
}
if (fileBaseNames == null) {
throw new IOException(
"invalid arguments to createHardLinkMult: "
+ "filename list can be empty but not null");
}
if (!linkDir.exists()) {
throw new FileNotFoundException(linkDir + " not found.");
}
for (String name : fileBaseNames) {
createLink(linkDir.toPath().resolve(name),
parentDir.toPath().resolve(name));
}
}
/**
* Determines whether the system supports hardlinks.
* @param f - file to examine
* @return true if hardlinks are supported, false otherwise
*/
public static boolean supportsHardLink(File f) {
try {
FileStore store = Files.getFileStore(f.toPath());
return store.supportsFileAttributeView(FILE_ATTRIBUTE_VIEW);
} catch (IOException e) {
LOG.warn("Failed to determine if hardlink is supported", e);
return false;
}
}
/**
* Retrieves the number of links to the specified file.
*
* @param fileName file name.
* @throws IOException raised on errors performing I/O.
* @return link count.
*/
public static int getLinkCount(File fileName) throws IOException {
if (fileName == null) {
throw new IOException(
"invalid argument to getLinkCount: file name is null");
}
if (!fileName.exists()) {
throw new FileNotFoundException(fileName + " not found.");
}
if (supportsHardLink(fileName)) {
return (int) Files.getAttribute(fileName.toPath(), FILE_ATTRIBUTE);
}
// construct and execute shell command
String[] cmd = getHardLinkCommand.linkCount(fileName);
String inpMsg = null;
String errMsg = null;
int exitValue = -1;
BufferedReader in = null;
ShellCommandExecutor shexec = new ShellCommandExecutor(cmd);
try {
shexec.execute();
in = new BufferedReader(new StringReader(shexec.getOutput()));
inpMsg = in.readLine();
exitValue = shexec.getExitCode();
if (inpMsg == null || exitValue != 0) {
throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
}
if (Shell.SOLARIS) {
String[] result = inpMsg.split("\\s+");
return Integer.parseInt(result[1]);
} else {
return Integer.parseInt(inpMsg);
}
} catch (ExitCodeException e) {
inpMsg = shexec.getOutput();
errMsg = e.getMessage();
exitValue = e.getExitCode();
throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
} catch (NumberFormatException e) {
throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
} finally {
IOUtils.closeStream(in);
}
}
/* Create an IOException for failing to get link count. */
private static IOException createIOException(File f, String message,
String error, int exitvalue, Exception cause) {
final String s = "Failed to get link count on file " + f
+ ": message=" + message
+ "; error=" + error
+ "; exit value=" + exitvalue;
return (cause == null) ? new IOException(s) : new IOException(s, cause);
}
/**
* HardLink statistics counters and methods.
* Not multi-thread safe, obviously.
* Init is called during HardLink instantiation, above.
*
* These are intended for use by knowledgeable clients, not internally,
* because many of the internal methods are static and can't update these
* per-instance counters.
*/
public static
|
HardLinkCGWin
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adapter/DefaultSchedulingPipelinedRegionTest.java
|
{
"start": 2273,
"end": 7617
}
|
class ____ {
@RegisterExtension
private static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION =
TestingUtils.defaultExecutorExtension();
@Test
void gettingUnknownVertexThrowsException() {
final Map<IntermediateResultPartitionID, DefaultResultPartition> resultPartitionById =
Collections.emptyMap();
final DefaultSchedulingPipelinedRegion pipelinedRegion =
new DefaultSchedulingPipelinedRegion(
Collections.emptySet(), resultPartitionById::get);
final ExecutionVertexID unknownVertexId = new ExecutionVertexID(new JobVertexID(), 0);
assertThatThrownBy(() -> pipelinedRegion.getVertex(unknownVertexId))
.withFailMessage("Expected exception not thrown")
.isInstanceOf(IllegalArgumentException.class)
.hasMessageContaining(unknownVertexId + " not found");
}
@Test
void returnsVertices() {
final DefaultExecutionVertex vertex =
new DefaultExecutionVertex(
new ExecutionVertexID(new JobVertexID(), 0),
Collections.emptyList(),
() -> ExecutionState.CREATED,
Collections.emptyList(),
partitionID -> {
throw new UnsupportedOperationException();
});
final Set<DefaultExecutionVertex> vertices = Collections.singleton(vertex);
final Map<IntermediateResultPartitionID, DefaultResultPartition> resultPartitionById =
Collections.emptyMap();
final DefaultSchedulingPipelinedRegion pipelinedRegion =
new DefaultSchedulingPipelinedRegion(vertices, resultPartitionById::get);
final Iterator<DefaultExecutionVertex> vertexIterator =
pipelinedRegion.getVertices().iterator();
assertThat(vertexIterator).hasNext();
assertThat(vertexIterator.next()).isSameAs(vertex);
assertThat(vertexIterator.hasNext()).isFalse();
}
/**
* Tests if the consumed inputs of the pipelined regions are computed correctly using the Job
* graph below.
*
* <pre>
* c
* / X
* a -+- b e
* \ /
* d
* </pre>
*
* <p>Pipelined regions: {a}, {b, c, d, e}
*/
@Test
void returnsIncidentBlockingPartitions() throws Exception {
final JobVertex a = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex b = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex c = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex d = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex e = ExecutionGraphTestUtils.createNoOpVertex(1);
connectNewDataSetAsInput(b, a, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING);
connectNewDataSetAsInput(
c, b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
connectNewDataSetAsInput(
d, b, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
connectNewDataSetAsInput(e, c, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING);
connectNewDataSetAsInput(
e, d, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED);
final DefaultExecutionGraph simpleTestGraph =
ExecutionGraphTestUtils.createExecutionGraph(
EXECUTOR_EXTENSION.getExecutor(), a, b, c, d, e);
final DefaultExecutionTopology topology =
DefaultExecutionTopology.fromExecutionGraph(simpleTestGraph);
final DefaultSchedulingPipelinedRegion firstPipelinedRegion =
topology.getPipelinedRegionOfVertex(new ExecutionVertexID(a.getID(), 0));
final DefaultSchedulingPipelinedRegion secondPipelinedRegion =
topology.getPipelinedRegionOfVertex(new ExecutionVertexID(e.getID(), 0));
final DefaultExecutionVertex vertexB0 =
topology.getVertex(new ExecutionVertexID(b.getID(), 0));
final IntermediateResultPartitionID b0ConsumedResultPartition =
Iterables.getOnlyElement(vertexB0.getConsumedResults()).getId();
final Set<IntermediateResultPartitionID> secondPipelinedRegionConsumedResults =
new HashSet<>();
for (ConsumedPartitionGroup consumedPartitionGroup :
secondPipelinedRegion.getAllNonPipelinedConsumedPartitionGroups()) {
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
if (!secondPipelinedRegion.contains(
topology.getResultPartition(partitionId).getProducer().getId())) {
secondPipelinedRegionConsumedResults.add(partitionId);
}
}
}
assertThat(
firstPipelinedRegion
.getAllNonPipelinedConsumedPartitionGroups()
.iterator()
.hasNext())
.isFalse();
assertThat(secondPipelinedRegionConsumedResults).contains(b0ConsumedResultPartition);
}
}
|
DefaultSchedulingPipelinedRegionTest
|
java
|
apache__camel
|
components/camel-jte/src/test/java/org/apache/camel/component/jte/MyModel.java
|
{
"start": 851,
"end": 913
}
|
class ____ {
public String foo;
public int age;
}
|
MyModel
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java
|
{
"start": 1559,
"end": 22789
}
|
class ____ extends ESTestCase {
public void testSendRequestsToNodes() throws InterruptedException {
ParsedScrollId scrollId = getParsedScrollId(
new SearchContextIdForNode(null, "node1", new ShardSearchContextId(UUIDs.randomBase64UUID(), 1)),
new SearchContextIdForNode(null, "node2", new ShardSearchContextId(UUIDs.randomBase64UUID(), 2)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId(UUIDs.randomBase64UUID(), 17)),
new SearchContextIdForNode(null, "node1", new ShardSearchContextId(UUIDs.randomBase64UUID(), 0)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId(UUIDs.randomBase64UUID(), 0))
);
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(DiscoveryNodeUtils.create("node1"))
.add(DiscoveryNodeUtils.create("node2"))
.add(DiscoveryNodeUtils.create("node3"))
.build();
AtomicArray<SearchAsyncActionTests.TestSearchPhaseResult> results = new AtomicArray<>(scrollId.getContext().length);
SearchScrollRequest request = new SearchScrollRequest();
request.scroll(TimeValue.timeValueMinutes(1));
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger movedCounter = new AtomicInteger(0);
SearchScrollAsyncAction<SearchAsyncActionTests.TestSearchPhaseResult> action = new SearchScrollAsyncAction<
SearchAsyncActionTests.TestSearchPhaseResult>(scrollId, logger, discoveryNodes, dummyListener(), request, null) {
@Override
protected void executeInitialPhase(
Transport.Connection connection,
InternalScrollSearchRequest internalRequest,
ActionListener<SearchAsyncActionTests.TestSearchPhaseResult> searchActionListener
) {
new Thread(() -> {
SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(
internalRequest.contextId(),
connection.getNode()
);
testSearchPhaseResult.setSearchShardTarget(
new SearchShardTarget(connection.getNode().getId(), new ShardId("test", "_na_", 1), null)
);
searchActionListener.onResponse(testSearchPhaseResult);
}).start();
}
@Override
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return new SearchAsyncActionTests.MockConnection(node);
}
@Override
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
assertEquals(1, movedCounter.incrementAndGet());
return new SearchPhase("test") {
@Override
protected void run() {
latch.countDown();
}
};
}
@Override
protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) {
results.setOnce(shardId, result);
}
};
action.run();
latch.await();
ShardSearchFailure[] shardSearchFailures = action.buildShardFailures();
assertEquals(0, shardSearchFailures.length);
SearchContextIdForNode[] context = scrollId.getContext();
for (int i = 0; i < results.length(); i++) {
assertNotNull(results.get(i));
assertEquals(context[i].getSearchContextId(), results.get(i).getContextId());
assertEquals(context[i].getNode(), results.get(i).node.getId());
}
}
public void testFailNextPhase() throws InterruptedException {
ParsedScrollId scrollId = getParsedScrollId(
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)),
new SearchContextIdForNode(null, "node2", new ShardSearchContextId("a", 2)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("b", 17)),
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("c", 0)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("d", 0))
);
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(DiscoveryNodeUtils.create("node1"))
.add(DiscoveryNodeUtils.create("node2"))
.add(DiscoveryNodeUtils.create("node3"))
.build();
AtomicArray<SearchAsyncActionTests.TestSearchPhaseResult> results = new AtomicArray<>(scrollId.getContext().length);
SearchScrollRequest request = new SearchScrollRequest();
request.scroll(TimeValue.timeValueMinutes(1));
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger movedCounter = new AtomicInteger(0);
ActionListener<SearchResponse> listener = new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse o) {
try {
fail("got a result");
} finally {
latch.countDown();
}
}
@Override
public void onFailure(Exception e) {
try {
assertTrue(e instanceof SearchPhaseExecutionException);
SearchPhaseExecutionException ex = (SearchPhaseExecutionException) e;
assertEquals("BOOM", ex.getCause().getMessage());
assertEquals("TEST_PHASE", ex.getPhaseName());
assertEquals("Phase failed", ex.getMessage());
} finally {
latch.countDown();
}
}
};
SearchScrollAsyncAction<SearchAsyncActionTests.TestSearchPhaseResult> action = new SearchScrollAsyncAction<
SearchAsyncActionTests.TestSearchPhaseResult>(scrollId, logger, discoveryNodes, listener, request, null) {
@Override
protected void executeInitialPhase(
Transport.Connection connection,
InternalScrollSearchRequest internalRequest,
ActionListener<SearchAsyncActionTests.TestSearchPhaseResult> searchActionListener
) {
new Thread(() -> {
SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(
internalRequest.contextId(),
connection.getNode()
);
testSearchPhaseResult.setSearchShardTarget(
new SearchShardTarget(connection.getNode().getId(), new ShardId("test", "_na_", 1), null)
);
searchActionListener.onResponse(testSearchPhaseResult);
}).start();
}
@Override
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return new SearchAsyncActionTests.MockConnection(node);
}
@Override
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
assertEquals(1, movedCounter.incrementAndGet());
return new SearchPhase("TEST_PHASE") {
@Override
protected void run() {
throw new IllegalArgumentException("BOOM");
}
};
}
@Override
protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) {
results.setOnce(shardId, result);
}
};
action.run();
latch.await();
ShardSearchFailure[] shardSearchFailures = action.buildShardFailures();
assertEquals(0, shardSearchFailures.length);
SearchContextIdForNode[] context = scrollId.getContext();
for (int i = 0; i < results.length(); i++) {
assertNotNull(results.get(i));
assertEquals(context[i].getSearchContextId(), results.get(i).getContextId());
assertEquals(context[i].getNode(), results.get(i).node.getId());
}
}
public void testNodeNotAvailable() throws InterruptedException {
ParsedScrollId scrollId = getParsedScrollId(
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)),
new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 17)),
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))
);
// node2 is not available
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(DiscoveryNodeUtils.create("node1"))
.add(DiscoveryNodeUtils.create("node3"))
.build();
AtomicArray<SearchAsyncActionTests.TestSearchPhaseResult> results = new AtomicArray<>(scrollId.getContext().length);
SearchScrollRequest request = new SearchScrollRequest();
request.scroll(TimeValue.timeValueMinutes(1));
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger movedCounter = new AtomicInteger(0);
SearchScrollAsyncAction<SearchAsyncActionTests.TestSearchPhaseResult> action = new SearchScrollAsyncAction<
SearchAsyncActionTests.TestSearchPhaseResult>(scrollId, logger, discoveryNodes, dummyListener(), request, null) {
@Override
protected void executeInitialPhase(
Transport.Connection connection,
InternalScrollSearchRequest internalRequest,
ActionListener<SearchAsyncActionTests.TestSearchPhaseResult> searchActionListener
) {
try {
assertNotEquals("node2 is not available", "node2", connection.getNode().getId());
} catch (NullPointerException e) {
logger.warn(e);
}
new Thread(() -> {
SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = new SearchAsyncActionTests.TestSearchPhaseResult(
internalRequest.contextId(),
connection.getNode()
);
testSearchPhaseResult.setSearchShardTarget(
new SearchShardTarget(connection.getNode().getId(), new ShardId("test", "_na_", 1), null)
);
searchActionListener.onResponse(testSearchPhaseResult);
}).start();
}
@Override
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return new SearchAsyncActionTests.MockConnection(node);
}
@Override
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
assertEquals(1, movedCounter.incrementAndGet());
return new SearchPhase("test") {
@Override
protected void run() {
latch.countDown();
}
};
}
@Override
protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) {
results.setOnce(shardId, result);
}
};
action.run();
latch.await();
ShardSearchFailure[] shardSearchFailures = action.buildShardFailures();
assertEquals(1, shardSearchFailures.length);
// .reason() returns the full stack trace
assertThat(shardSearchFailures[0].reason(), startsWith("java.lang.IllegalStateException: node [node2] is not available"));
SearchContextIdForNode[] context = scrollId.getContext();
for (int i = 0; i < results.length(); i++) {
if (context[i].getNode().equals("node2")) {
assertNull(results.get(i));
} else {
assertNotNull(results.get(i));
assertEquals(context[i].getSearchContextId(), results.get(i).getContextId());
assertEquals(context[i].getNode(), results.get(i).node.getId());
}
}
}
public void testShardFailures() throws InterruptedException {
ParsedScrollId scrollId = getParsedScrollId(
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)),
new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 17)),
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))
);
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(DiscoveryNodeUtils.create("node1"))
.add(DiscoveryNodeUtils.create("node2"))
.add(DiscoveryNodeUtils.create("node3"))
.build();
AtomicArray<SearchAsyncActionTests.TestSearchPhaseResult> results = new AtomicArray<>(scrollId.getContext().length);
SearchScrollRequest request = new SearchScrollRequest();
request.scroll(TimeValue.timeValueMinutes(1));
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger movedCounter = new AtomicInteger(0);
SearchScrollAsyncAction<SearchAsyncActionTests.TestSearchPhaseResult> action = new SearchScrollAsyncAction<
SearchAsyncActionTests.TestSearchPhaseResult>(scrollId, logger, discoveryNodes, dummyListener(), request, null) {
@Override
protected void executeInitialPhase(
Transport.Connection connection,
InternalScrollSearchRequest internalRequest,
ActionListener<SearchAsyncActionTests.TestSearchPhaseResult> searchActionListener
) {
new Thread(() -> {
if (internalRequest.contextId().getId() == 17) {
searchActionListener.onFailure(new IllegalArgumentException("BOOM on shard"));
} else {
SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult =
new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.contextId(), connection.getNode());
testSearchPhaseResult.setSearchShardTarget(
new SearchShardTarget(connection.getNode().getId(), new ShardId("test", "_na_", 1), null)
);
searchActionListener.onResponse(testSearchPhaseResult);
}
}).start();
}
@Override
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return new SearchAsyncActionTests.MockConnection(node);
}
@Override
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
assertEquals(1, movedCounter.incrementAndGet());
return new SearchPhase("test") {
@Override
protected void run() {
latch.countDown();
}
};
}
@Override
protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) {
results.setOnce(shardId, result);
}
};
action.run();
latch.await();
ShardSearchFailure[] shardSearchFailures = action.buildShardFailures();
assertEquals(1, shardSearchFailures.length);
assertThat(shardSearchFailures[0].reason(), containsString("IllegalArgumentException: BOOM on shard"));
SearchContextIdForNode[] context = scrollId.getContext();
for (int i = 0; i < results.length(); i++) {
if (context[i].getSearchContextId().getId() == 17) {
assertNull(results.get(i));
} else {
assertNotNull(results.get(i));
assertEquals(context[i].getSearchContextId(), results.get(i).getContextId());
assertEquals(context[i].getNode(), results.get(i).node.getId());
}
}
}
public void testAllShardsFailed() throws InterruptedException {
ParsedScrollId scrollId = getParsedScrollId(
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)),
new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 17)),
new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)),
new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))
);
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(DiscoveryNodeUtils.create("node1"))
.add(DiscoveryNodeUtils.create("node2"))
.add(DiscoveryNodeUtils.create("node3"))
.build();
AtomicArray<SearchAsyncActionTests.TestSearchPhaseResult> results = new AtomicArray<>(scrollId.getContext().length);
SearchScrollRequest request = new SearchScrollRequest();
request.scroll(TimeValue.timeValueMinutes(1));
CountDownLatch latch = new CountDownLatch(1);
ActionListener<SearchResponse> listener = new ActionListener<SearchResponse>() {
@Override
public void onResponse(SearchResponse o) {
try {
fail("got a result");
} finally {
latch.countDown();
}
}
@Override
public void onFailure(Exception e) {
try {
assertTrue(e instanceof SearchPhaseExecutionException);
SearchPhaseExecutionException ex = (SearchPhaseExecutionException) e;
assertEquals("BOOM on shard", ex.getCause().getMessage());
assertEquals("query", ex.getPhaseName());
assertEquals("all shards failed", ex.getMessage());
} finally {
latch.countDown();
}
}
};
SearchScrollAsyncAction<SearchAsyncActionTests.TestSearchPhaseResult> action = new SearchScrollAsyncAction<
SearchAsyncActionTests.TestSearchPhaseResult>(scrollId, logger, discoveryNodes, listener, request, null) {
@Override
protected void executeInitialPhase(
Transport.Connection connection,
InternalScrollSearchRequest internalRequest,
ActionListener<SearchAsyncActionTests.TestSearchPhaseResult> searchActionListener
) {
new Thread(() -> searchActionListener.onFailure(new IllegalArgumentException("BOOM on shard"))).start();
}
@Override
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return new SearchAsyncActionTests.MockConnection(node);
}
@Override
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
fail("don't move all shards failed");
return null;
}
@Override
protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) {
results.setOnce(shardId, result);
}
};
action.run();
latch.await();
SearchContextIdForNode[] context = scrollId.getContext();
ShardSearchFailure[] shardSearchFailures = action.buildShardFailures();
assertEquals(context.length, shardSearchFailures.length);
assertThat(shardSearchFailures[0].reason(), containsString("IllegalArgumentException: BOOM on shard"));
for (int i = 0; i < results.length(); i++) {
assertNull(results.get(i));
}
}
private static ParsedScrollId getParsedScrollId(SearchContextIdForNode... idsForNodes) {
List<SearchContextIdForNode> searchContextIdForNodes = Arrays.asList(idsForNodes);
Collections.shuffle(searchContextIdForNodes, random());
return new ParsedScrollId("test", searchContextIdForNodes.toArray(new SearchContextIdForNode[0]));
}
private ActionListener<SearchResponse> dummyListener() {
return ActionTestUtils.assertNoFailureListener(response -> fail("dummy"));
}
}
|
SearchScrollAsyncActionTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/CacheStats.java
|
{
"start": 1899,
"end": 2442
}
|
class ____ {
private final long osPageSize = NativeIO.POSIX.getCacheManipulator()
.getOperatingSystemPageSize();
/**
* Round up a number to the operating system page size.
*/
public long roundUp(long count) {
return (count + osPageSize - 1) & (~(osPageSize - 1));
}
/**
* Round down a number to the operating system page size.
*/
public long roundDown(long count) {
return count & (~(osPageSize - 1));
}
}
/**
* Counts used bytes for memory.
*/
private
|
PageRounder
|
java
|
quarkusio__quarkus
|
extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/LRAConfiguration.java
|
{
"start": 409,
"end": 1375
}
|
interface ____ {
/**
* The REST endpoint on which a coordinator is running.
* In order for an LRA to begin and end successfully and in order to
* join with an existing LRA, this coordinator must be available
* whenever a service method annotated with @LRA is invoked.
* <p>
* In this version of the extension, a failed coordinator with
* LRAs that have not yet finished must be restarted.
*/
@WithDefault("http://localhost:50000/lra-coordinator")
String coordinatorURL();
/**
* The base URI override for this participant service. This is useful when
* the service runs behind a reverse proxy or load balancer, and the
* coordinator can bypass the proxy with direct access to the service.
* <p>
* The coordinator will use this base URI to call the participant service
* to append complete, compensate, status, etc. endpoints.
*/
Optional<String> baseUri();
}
|
LRAConfiguration
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/main/java/io/quarkus/resteasy/reactive/jackson/deployment/processor/JacksonCodeGenerator.java
|
{
"start": 1483,
"end": 6262
}
|
class ____ {
protected final BuildProducer<GeneratedClassBuildItem> generatedClassBuildItemBuildProducer;
protected final IndexView jandexIndex;
protected final Set<String> generatedClassNames = new HashSet<>();
protected final Deque<ClassInfo> toBeGenerated = new ArrayDeque<>();
public JacksonCodeGenerator(BuildProducer<GeneratedClassBuildItem> generatedClassBuildItemBuildProducer,
IndexView jandexIndex) {
this.generatedClassBuildItemBuildProducer = generatedClassBuildItemBuildProducer;
this.jandexIndex = jandexIndex;
}
protected abstract String getSuperClassName();
protected String[] getInterfacesNames(ClassInfo classInfo) {
return new String[0];
}
protected abstract String getClassSuffix();
public Collection<String> create(Collection<ClassInfo> classInfos) {
Set<String> createdClasses = new HashSet<>();
toBeGenerated.addAll(classInfos);
while (!toBeGenerated.isEmpty()) {
create(toBeGenerated.removeFirst()).ifPresent(createdClasses::add);
}
return createdClasses;
}
private Optional<String> create(ClassInfo classInfo) {
String beanClassName = classInfo.name().toString();
if (vetoedClass(classInfo, beanClassName) || !generatedClassNames.add(beanClassName)) {
return Optional.empty();
}
String generatedClassName = beanClassName + getClassSuffix();
try (ClassCreator classCreator = new ClassCreator(
new GeneratedClassGizmoAdaptor(generatedClassBuildItemBuildProducer, true), generatedClassName, null,
getSuperClassName(), getInterfacesNames(classInfo))) {
createConstructor(classCreator, beanClassName);
boolean valid = createSerializationMethod(classInfo, classCreator, beanClassName);
return valid ? Optional.of(generatedClassName) : Optional.empty();
}
}
private void createConstructor(ClassCreator classCreator, String beanClassName) {
MethodCreator constructor = classCreator.getConstructorCreator(new String[0]);
constructor.invokeSpecialMethod(
MethodDescriptor.ofConstructor(getSuperClassName(), "java.lang.Class"),
constructor.getThis(), constructor.loadClass(beanClassName));
constructor.returnVoid();
}
protected abstract boolean createSerializationMethod(ClassInfo classInfo, ClassCreator classCreator, String beanClassName);
protected Collection<FieldInfo> classFields(ClassInfo classInfo) {
Collection<FieldInfo> fields = new ArrayList<>();
classFields(classInfo, fields);
return fields;
}
protected void classFields(ClassInfo classInfo, Collection<FieldInfo> fields) {
fields.addAll(classInfo.fields());
onSuperClass(classInfo, superClassInfo -> {
classFields(superClassInfo, fields);
return null;
});
}
protected <T> T onSuperClass(ClassInfo classInfo, Function<ClassInfo, T> f) {
Type superType = classInfo.superClassType();
if (superType != null && !vetoedClassName(superType.name().toString())) {
ClassInfo superClassInfo = jandexIndex.getClassByName(superType.name());
if (superClassInfo != null) {
return f.apply(superClassInfo);
}
}
return null;
}
protected Collection<MethodInfo> classMethods(ClassInfo classInfo) {
Collection<MethodInfo> methods = new ArrayList<>();
classMethods(classInfo, methods);
return methods;
}
private void classMethods(ClassInfo classInfo, Collection<MethodInfo> methods) {
methods.addAll(classInfo.methods());
onSuperClass(classInfo, superClassInfo -> {
classMethods(superClassInfo, methods);
return null;
});
}
protected MethodInfo findMethod(ClassInfo classInfo, String methodName, Type... parameters) {
MethodInfo method = classInfo.method(methodName, parameters);
return method != null ? method
: onSuperClass(classInfo, superClassInfo -> findMethod(superClassInfo, methodName, parameters));
}
protected static String ucFirst(String name) {
return name.substring(0, 1).toUpperCase() + name.substring(1);
}
protected static boolean vetoedClass(ClassInfo classInfo, String className) {
return classInfo.isAbstract() || classInfo.isInterface() || vetoedClassName(className);
}
private static boolean vetoedClassName(String className) {
return className.startsWith("java.") || className.startsWith("jakarta.") || className.startsWith("io.vertx.core.json.");
}
protected
|
JacksonCodeGenerator
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
|
{
"start": 10721,
"end": 88993
}
|
class ____ implements
EventHandler<RMAppEvent> {
public TestDispatcher() {
}
@Override
public void handle(RMAppEvent event) {
//RMApp rmApp = this.rmContext.getRMApps().get(appID);
setAppEventType(event.getType());
System.out.println("in handle routine " + getAppEventType().toString());
}
}
protected void addToCompletedApps(TestRMAppManager appMonitor, RMContext rmContext) {
for (RMApp app : rmContext.getRMApps().values()) {
if (app.getState() == RMAppState.FINISHED
|| app.getState() == RMAppState.KILLED
|| app.getState() == RMAppState.FAILED) {
appMonitor.finishApplication(app.getApplicationId());
}
}
}
private RMContext rmContext;
private SystemMetricsPublisher metricsPublisher;
private TestRMAppManager appMonitor;
private ApplicationSubmissionContext asContext;
private ApplicationId appId;
private QueueInfo mockDefaultQueueInfo;
@SuppressWarnings("deprecation")
@BeforeEach
public void setUp() throws IOException {
long now = System.currentTimeMillis();
rmContext = mockRMContext(1, now - 10);
rmContext
.setRMTimelineCollectorManager(mock(RMTimelineCollectorManager.class));
if (shouldUseCs.useCapacityScheduler()) {
scheduler = mockResourceScheduler(CapacityScheduler.class);
} else {
scheduler = mockResourceScheduler();
}
((RMContextImpl)rmContext).setScheduler(scheduler);
Configuration conf = new Configuration();
conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
((RMContextImpl) rmContext).setYarnConfiguration(conf);
ApplicationMasterService masterService =
new ApplicationMasterService(rmContext, scheduler);
appMonitor = new TestRMAppManager(rmContext,
new ClientToAMTokenSecretManagerInRM(), scheduler, masterService,
new ApplicationACLsManager(conf), conf);
appId = MockApps.newAppID(1);
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
asContext =
recordFactory.newRecordInstance(ApplicationSubmissionContext.class);
asContext.setApplicationId(appId);
asContext.setAMContainerSpec(mockContainerLaunchContext(recordFactory));
asContext.setResource(mockResource());
asContext.setPriority(Priority.newInstance(0));
asContext.setQueue("default");
mockDefaultQueueInfo = mock(QueueInfo.class);
when(scheduler.getQueueInfo("default", false, false))
.thenReturn(mockDefaultQueueInfo);
setupDispatcher(rmContext, conf);
}
public static PlacementManager createMockPlacementManager(
String userRegex, String placementQueue, String placementParentQueue
) throws YarnException {
PlacementManager placementMgr = mock(PlacementManager.class);
doAnswer(new Answer<ApplicationPlacementContext>() {
@Override
public ApplicationPlacementContext answer(InvocationOnMock invocation)
throws Throwable {
return new ApplicationPlacementContext(placementQueue, placementParentQueue);
}
}).when(placementMgr).placeApplication(
any(ApplicationSubmissionContext.class),
matches(userRegex),
any(Boolean.class));
return placementMgr;
}
private TestRMAppManager createAppManager(RMContext context, Configuration configuration) {
ApplicationMasterService masterService = new ApplicationMasterService(context,
context.getScheduler());
return new TestRMAppManager(context,
new ClientToAMTokenSecretManagerInRM(),
context.getScheduler(), masterService,
new ApplicationACLsManager(configuration), configuration);
}
@Test
public void testQueueSubmitWithACLsEnabledWithQueueMapping()
throws YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new
CapacitySchedulerConfiguration(conf, false);
csConf.set(PREFIX + "root.queues", "default,test");
csConf.setCapacity(DEFAULT, 50.0f);
csConf.setMaximumCapacity(DEFAULT, 100.0f);
csConf.setCapacity(TEST, 50.0f);
csConf.setMaximumCapacity(TEST, 100.0f);
csConf.set(PREFIX + "root.acl_submit_applications", " ");
csConf.set(PREFIX + "root.acl_administer_queue", " ");
csConf.set(PREFIX + "root.default.acl_submit_applications", " ");
csConf.set(PREFIX + "root.default.acl_administer_queue", " ");
csConf.set(PREFIX + "root.test.acl_submit_applications", "test");
csConf.set(PREFIX + "root.test.acl_administer_queue", "test");
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
newMockRMContext.setQueuePlacementManager(
createMockPlacementManager("test", "root.test", null));
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
ApplicationSubmissionContext submission = createAppSubmissionContext(MockApps.newAppID(1));
submission.setQueue("oldQueue");
verifyAppSubmission(submission,
newAppMonitor,
newMockRMContext,
"test",
"root.test");
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(2)),
"test1");
}
@Test
public void testQueueSubmitWithLeafQueueName()
throws YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new
CapacitySchedulerConfiguration(conf, false);
csConf.set(PREFIX + "root.queues", "default,test");
csConf.setCapacity(DEFAULT, 50.0f);
csConf.setMaximumCapacity(DEFAULT, 100.0f);
csConf.setCapacity(TEST, 50.0f);
csConf.setMaximumCapacity(TEST, 100.0f);
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
ApplicationSubmissionContext submission = createAppSubmissionContext(MockApps.newAppID(1));
submission.setQueue("test");
verifyAppSubmission(submission,
newAppMonitor,
newMockRMContext,
"test",
"root.test");
}
@Test
public void testQueueSubmitWithACLsEnabledWithQueueMappingForLegacyAutoCreatedQueue()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(
conf, false);
csConf.set(PREFIX + "root.queues", "default,managedparent");
csConf.setCapacity(DEFAULT, 50.0f);
csConf.setMaximumCapacity(DEFAULT, 100.0f);
csConf.setCapacity(MANAGED_PARENT, 50.0f);
csConf.setMaximumCapacity(MANAGED_PARENT, 100.0f);
csConf.set(PREFIX + "root.acl_submit_applications", " ");
csConf.set(PREFIX + "root.acl_administer_queue", " ");
csConf.set(PREFIX + "root.default.acl_submit_applications", " ");
csConf.set(PREFIX + "root.default.acl_administer_queue", " ");
csConf.set(PREFIX + "root.managedparent.acl_administer_queue", "admin");
csConf.set(PREFIX + "root.managedparent.acl_submit_applications", "user1");
csConf.setAutoCreateChildQueueEnabled(MANAGED_PARENT, true);
csConf.setAutoCreatedLeafQueueConfigCapacity(MANAGED_PARENT, 30f);
csConf.setAutoCreatedLeafQueueConfigMaxCapacity(MANAGED_PARENT, 100f);
MockRM newMockRM = new MockRM(csConf);
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
ManagedParentQueue managedParentQueue = new ManagedParentQueue(cs.getQueueContext(),
"managedparent", cs.getQueue("root"), null);
cs.getCapacitySchedulerQueueManager().addQueue("managedparent",
managedParentQueue);
RMContext newMockRMContext = newMockRM.getRMContext();
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1|user2", "user1", "root.managedparent"));
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
ApplicationSubmissionContext submission = createAppSubmissionContext(MockApps.newAppID(1));
submission.setQueue("oldQueue");
verifyAppSubmission(submission,
newAppMonitor,
newMockRMContext,
"user1",
"root.managedparent.user1");
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(2)),
"user2");
}
@Test
public void testLegacyAutoCreatedQueuesWithACLTemplates()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(
conf, false);
QueuePath parentQueuePath = new QueuePath("root.parent");
QueuePath user1QueuePath = new QueuePath("root.parent.user1");
QueuePath user2QueuePath = new QueuePath("root.parent.user2");
csConf.set(PREFIX + "root.queues", "parent");
csConf.set(PREFIX + "root.acl_submit_applications", " ");
csConf.set(PREFIX + "root.acl_administer_queue", " ");
csConf.setCapacity(parentQueuePath, 100.0f);
csConf.set(PREFIX + "root.parent.acl_administer_queue", "user1,user4");
csConf.set(PREFIX + "root.parent.acl_submit_applications", "user1,user4");
csConf.setAutoCreateChildQueueEnabled(parentQueuePath, true);
csConf.setAutoCreatedLeafQueueConfigCapacity(parentQueuePath, 50f);
csConf.setAutoCreatedLeafQueueConfigMaxCapacity(parentQueuePath, 100f);
String autoCreatedQueuePrefix =
getAutoCreatedQueueTemplateConfPrefix(parentQueuePath);
QueuePath autoCreatedQueuePath = new QueuePath(autoCreatedQueuePrefix);
csConf.set(getQueuePrefix(autoCreatedQueuePath) + "acl_administer_queue", "user2,user4");
csConf.set(getQueuePrefix(autoCreatedQueuePath) + "acl_submit_applications", "user2,user4");
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user1 has permission on root.parent so a queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1", "user1", parentQueuePath.getFullPath()));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user1",
user1QueuePath.getFullPath());
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1|user2|user3|user4", "user2", parentQueuePath.getFullPath()));
// user2 has permission (due to ACL templates)
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(2)),
newAppMonitor,
newMockRMContext,
"user2",
user2QueuePath.getFullPath());
// user3 doesn't have permission
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(3)),
"user3");
// user4 has permission on root.parent
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(4)),
newAppMonitor,
newMockRMContext,
"user4",
user2QueuePath.getFullPath());
// create the root.parent.user2 manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(user2QueuePath);
AutoCreatedLeafQueue autoCreatedLeafQueue = (AutoCreatedLeafQueue) cs.getQueue("user2");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
ManagedParentQueue parentQueue = (ManagedParentQueue) cs.getQueue("parent");
assertEquals(parentQueue, autoCreatedLeafQueue.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(5)),
newAppMonitor,
newMockRMContext,
"user2",
user2QueuePath.getFullPath());
// user3 doesn't have permission for root.parent.user2 queue
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(6)),
"user3");
// user1 doesn't have permission for root.parent.user2 queue, but it has for root.parent
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(7)),
newAppMonitor,
newMockRMContext,
"user1",
user2QueuePath.getFullPath());
}
@Test
public void testFlexibleAutoCreatedQueuesWithSpecializedACLTemplatesAndDynamicParentQueue()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = createFlexibleAQCBaseACLConfiguration(conf);
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "acl_administer_queue",
"user3");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX
+ "acl_submit_applications", "user3");
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user1 has permission on root.parent so a queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1", "user1", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user1",
"root.parent.user1");
// user2 doesn't have permission to create a dynamic leaf queue (parent only template)
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user2", "user2", "root.parent"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(2)),
"user2");
// user3 has permission on root.parent.user2.user3 due to ACL templates
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(3)),
newAppMonitor,
newMockRMContext,
"user3",
"root.parent.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.parent.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(4)),
"user4");
// create the root.parent.user2.user3 manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(new QueuePath("root.parent.user2.user3"));
ParentQueue autoCreatedParentQueue = (ParentQueue) cs.getQueue("user2");
assertNotNull(autoCreatedParentQueue, "Auto Creation of Queue failed");
ParentQueue parentQueue = (ParentQueue) cs.getQueue("parent");
assertEquals(parentQueue, autoCreatedParentQueue.getParent());
LeafQueue autoCreatedLeafQueue = (LeafQueue) cs.getQueue("user3");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedLeafQueue.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(5)),
newAppMonitor,
newMockRMContext,
"user3",
"root.parent.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.parent.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(6)),
"user4");
}
@Test
public void testFlexibleAutoCreatedQueuesWithMixedCommonLeafACLTemplatesAndDynamicParentQueue()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = createFlexibleAQCBaseACLConfiguration(conf);
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "acl_administer_queue",
"user3");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX
+ "acl_submit_applications", "user3");
testFlexibleAQCDWithMixedTemplatesDynamicParentACLScenario(conf, csConf);
}
@Test
public void testFlexibleAutoCreatedQueuesWithMixedCommonCommonACLTemplatesAndDynamicParentQueue()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = createFlexibleAQCBaseACLConfiguration(conf);
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_administer_queue",
"user3");
csConf.set(ROOT_PARENT_PATH + "*." + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_submit_applications",
"user3");
testFlexibleAQCDWithMixedTemplatesDynamicParentACLScenario(conf, csConf);
}
private void testFlexibleAQCDWithMixedTemplatesDynamicParentACLScenario(
YarnConfiguration conf, CapacitySchedulerConfiguration csConf)
throws YarnException, IOException {
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user1 has permission on root.parent so a queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1", "user1", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user1",
"root.parent.user1");
// user2 has permission on root.parent a dynamic leaf queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user2", "user2", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(2)),
newAppMonitor,
newMockRMContext,
"user2",
"root.parent.user2");
// user3 has permission on root.parent.user2.user3 a dynamic parent and leaf queue
// would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(3)),
newAppMonitor,
newMockRMContext,
"user3",
"root.parent.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.parent.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(4)),
"user4");
// create the root.parent.user2.user3 manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(new QueuePath("root.parent.user2.user3"));
ParentQueue autoCreatedParentQueue = (ParentQueue) cs.getQueue("user2");
assertNotNull(autoCreatedParentQueue, "Auto Creation of Queue failed");
ParentQueue parentQueue = (ParentQueue) cs.getQueue("parent");
assertEquals(parentQueue, autoCreatedParentQueue.getParent());
LeafQueue autoCreatedLeafQueue = (LeafQueue) cs.getQueue("user3");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedLeafQueue.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(5)),
newAppMonitor,
newMockRMContext,
"user3",
"root.parent.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.parent.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(6)),
"user4");
}
@Test
public void testFlexibleAutoCreatedQueuesWithACLTemplatesALeafOnly()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = createFlexibleAQCBaseACLConfiguration(conf);
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
testFlexibleAQCLeafOnly(conf, csConf);
}
@Test
public void testFlexibleAutoCreatedQueuesWithSpecialisedACLTemplatesALeafOnly()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = createFlexibleAQCBaseACLConfiguration(conf);
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(ROOT_PARENT_PATH + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
testFlexibleAQCLeafOnly(conf, csConf);
}
private void testFlexibleAQCLeafOnly(
YarnConfiguration conf,
CapacitySchedulerConfiguration csConf)
throws YarnException, IOException {
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user1 has permission on root.parent so a queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1", "user1", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user1",
"root.parent.user1");
// user2 has permission on root.parent.user2 due to ACL templates
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user2", "user2", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(2)),
newAppMonitor,
newMockRMContext,
"user2",
"root.parent.user2");
// user3 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(3)),
"user3");
// create the root.parent.user2 manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(new QueuePath("root.parent.user2"));
ParentQueue autoCreatedParentQueue = (ParentQueue) cs.getQueue("parent");
LeafQueue autoCreatedLeafQueue = (LeafQueue) cs.getQueue("user2");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedLeafQueue.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user2", "user2", "root.parent"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(4)),
newAppMonitor,
newMockRMContext,
"user2",
"root.parent.user2");
// user3 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.parent"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(5)),
"user3");
}
@Test
public void testFlexibleAutoCreatedQueuesWithSpecializedACLTemplatesAndDynamicRootParentQueue()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(
conf, false);
csConf.set(PREFIX + "root.queues", "");
csConf.set(PREFIX + "root.acl_submit_applications", "user1");
csConf.set(PREFIX + "root.acl_administer_queue", "admin1");
csConf.setAutoQueueCreationV2Enabled(ROOT, true);
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
csConf.set(PREFIX + "root." + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(PREFIX + "root." + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "acl_administer_queue",
"user3");
csConf.set(PREFIX + "root." + "*." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX +
"acl_submit_applications",
"user3");
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user1 has permission on root so a queue would be created
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user1", "user1", "root"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user1",
"root.user1");
// user2 doesn't have permission to create a dynamic leaf queue (parent only template)
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user2", "user2", "root"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(2)),
"user2");
// user3 has permission on root.user2.user3 due to ACL templates
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(3)),
newAppMonitor,
newMockRMContext,
"user3",
"root.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(4)),
"user4");
// create the root.user2.user3 manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(new QueuePath("root.user2.user3"));
ParentQueue autoCreatedParentQueue = (ParentQueue) cs.getQueue("user2");
assertNotNull(autoCreatedParentQueue, "Auto Creation of Queue failed");
ParentQueue parentQueue = (ParentQueue) cs.getQueue("root");
assertEquals(parentQueue, autoCreatedParentQueue.getParent());
LeafQueue autoCreatedLeafQueue = (LeafQueue) cs.getQueue("user3");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedLeafQueue.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "user3", "root.user2"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(5)),
newAppMonitor,
newMockRMContext,
"user3",
"root.user2.user3");
// user4 doesn't have permission
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user4", "user4", "root.user2"));
verifyAppSubmissionFailure(newAppMonitor,
createAppSubmissionContext(MockApps.newAppID(6)),
"user4");
}
@Test
public void testFlexibleAutoCreatedQueuesMultiLevelDynamicParentACL()
throws IOException, YarnException {
YarnConfiguration conf = createYarnACLEnabledConfiguration();
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(
conf, false);
csConf.set(PREFIX + "root.queues", "");
csConf.set(PREFIX + "root.acl_submit_applications", "user1");
csConf.set(PREFIX + "root.acl_administer_queue", "admin1");
csConf.setAutoQueueCreationV2Enabled(ROOT, true);
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_administer_queue",
"user2");
csConf.set(PREFIX + "root." + AUTO_QUEUE_PARENT_TEMPLATE_PREFIX + "acl_submit_applications",
"user2");
csConf.set(PREFIX + "root." + "user2.user3." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX + "capacity",
"1w");
csConf.set(PREFIX + "root." + "user2.user3." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX +
"acl_administer_queue",
"user3");
csConf.set(PREFIX + "root." + "user2.user3." + AUTO_QUEUE_LEAF_TEMPLATE_PREFIX +
"acl_submit_applications",
"user3");
csConf.setMaximumAutoCreatedQueueDepth(4);
MockRM newMockRM = new MockRM(csConf);
RMContext newMockRMContext = newMockRM.getRMContext();
TestRMAppManager newAppMonitor = createAppManager(newMockRMContext, conf);
// user3 has permission on root.user2.user3.queue due to ACL templates
newMockRMContext.setQueuePlacementManager(createMockPlacementManager(
"user3", "queue", "root.user2.user3"));
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(1)),
newAppMonitor,
newMockRMContext,
"user3",
"root.user2.user3.queue");
// create the root.user2.user3.queue manually
CapacityScheduler cs =
((CapacityScheduler) newMockRM.getResourceScheduler());
cs.getCapacitySchedulerQueueManager().createQueue(new QueuePath("root.user2.user3.queue"));
ParentQueue autoCreatedParentQueue = (ParentQueue) cs.getQueue("user2");
assertNotNull(autoCreatedParentQueue, "Auto Creation of Queue failed");
ParentQueue parentQueue = (ParentQueue) cs.getQueue("root");
assertEquals(parentQueue, autoCreatedParentQueue.getParent());
ParentQueue autoCreatedParentQueue2 = (ParentQueue) cs.getQueue("user3");
assertNotNull(autoCreatedParentQueue2, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedParentQueue2.getParent());
LeafQueue autoCreatedLeafQueue = (LeafQueue) cs.getQueue("queue");
assertNotNull(autoCreatedLeafQueue, "Auto Creation of Queue failed");
assertEquals(autoCreatedParentQueue, autoCreatedParentQueue2.getParent());
// reinitialize to load the ACLs for the queue
cs.reinitialize(csConf, newMockRMContext);
// template ACLs do work after reinitialize
verifyAppSubmission(createAppSubmissionContext(MockApps.newAppID(2)),
newAppMonitor,
newMockRMContext,
"user3",
"root.user2.user3.queue");
}
private YarnConfiguration createYarnACLEnabledConfiguration() {
YarnConfiguration conf = new YarnConfiguration(new Configuration(false));
conf.set(YarnConfiguration.YARN_ACL_ENABLE, "true");
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
return conf;
}
private CapacitySchedulerConfiguration createFlexibleAQCBaseACLConfiguration(
YarnConfiguration conf) {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(
conf, false);
csConf.set(PREFIX + "root.queues", "parent");
csConf.set(PREFIX + "root.acl_submit_applications", " ");
csConf.set(PREFIX + "root.acl_administer_queue", " ");
csConf.setCapacity(PARENT, "1w");
csConf.set(PREFIX + "root.parent.acl_administer_queue", "user1");
csConf.set(PREFIX + "root.parent.acl_submit_applications", "user1");
csConf.setAutoQueueCreationV2Enabled(PARENT, true);
return csConf;
}
private static void verifyAppSubmissionFailure(TestRMAppManager appManager,
ApplicationSubmissionContext submission,
String user) {
try {
appManager.submitApplication(submission, user);
fail(
String.format("should fail since %s does not have permission to submit to queue", user));
} catch (YarnException e) {
assertTrue(e.getCause() instanceof AccessControlException);
}
}
private static void verifyAppSubmission(ApplicationSubmissionContext submission,
TestRMAppManager appManager,
RMContext rmContext,
String user,
String expectedQueue) throws YarnException {
appManager.submitApplication(submission, user);
RMApp app = rmContext.getRMApps().get(submission.getApplicationId());
assertNotNull(app, "app should not be null");
assertEquals(expectedQueue, app.getQueue(),
String.format("the queue should be placed on '%s' queue", expectedQueue));
}
private static ApplicationSubmissionContext createAppSubmissionContext(ApplicationId id) {
RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
ApplicationSubmissionContext appSubmission =
recordFactory.newRecordInstance(ApplicationSubmissionContext.class);
appSubmission.setApplicationId(id);
appSubmission.setAMContainerSpec(mockContainerLaunchContext(recordFactory));
appSubmission.setResource(mockResource());
appSubmission.setPriority(Priority.newInstance(0));
appSubmission.setQueue("default");
return appSubmission;
}
@AfterEach
public void tearDown() {
setAppEventType(RMAppEventType.KILL);
((Service)rmContext.getDispatcher()).stop();
UserGroupInformation.reset();
}
@Test
public void testRMAppRetireNone() throws Exception {
long now = System.currentTimeMillis();
// Create such that none of the applications will retire since
// haven't hit max #
RMContext rmContext = mockRMContext(10, now - 10);
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 10);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext,conf);
assertEquals(10, rmContext.getRMApps().size(),
"Number of apps incorrect before checkAppTimeLimit");
// add them to completed apps list
addToCompletedApps(appMonitor, rmContext);
// shouldn't have to many apps
appMonitor.checkAppNumCompletedLimit();
assertEquals(10, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(10, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
verify(rmContext.getStateStore(), never()).removeApplication(
isA(RMApp.class));
}
@Test
public void testQueueSubmitWithNoPermission() throws IOException {
YarnConfiguration conf = new YarnConfiguration();
conf.set(PREFIX + "root.acl_submit_applications", " ");
conf.set(PREFIX + "root.acl_administer_queue", " ");
conf.set(PREFIX + "root.default.acl_submit_applications", " ");
conf.set(PREFIX + "root.default.acl_administer_queue", " ");
conf.set(YarnConfiguration.YARN_ACL_ENABLE, "true");
MockRM mockRM = new MockRM(conf);
ClientRMService rmService = mockRM.getClientRMService();
SubmitApplicationRequest req =
Records.newRecord(SubmitApplicationRequest.class);
ApplicationSubmissionContext sub =
Records.newRecord(ApplicationSubmissionContext.class);
sub.setApplicationId(appId);
ResourceRequest resReg =
ResourceRequest.newInstance(Priority.newInstance(0),
ResourceRequest.ANY, Resource.newInstance(1024, 1), 1);
sub.setAMContainerResourceRequests(Collections.singletonList(resReg));
req.setApplicationSubmissionContext(sub);
sub.setAMContainerSpec(mock(ContainerLaunchContext.class));
try {
rmService.submitApplication(req);
} catch (Exception e) {
e.printStackTrace();
if (e instanceof YarnException) {
assertTrue(e.getCause() instanceof AccessControlException);
} else {
fail("Yarn exception is expected : " + e.getMessage());
}
} finally {
mockRM.close();
}
}
@Test
public void testRMAppRetireSome() throws Exception {
long now = System.currentTimeMillis();
RMContext rmContext = mockRMContext(10, now - 20000);
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_STATE_STORE_MAX_COMPLETED_APPLICATIONS, 3);
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 3);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, conf);
assertEquals(10, rmContext
.getRMApps().size(), "Number of apps incorrect before");
// add them to completed apps list
addToCompletedApps(appMonitor, rmContext);
// shouldn't have to many apps
appMonitor.checkAppNumCompletedLimit();
assertEquals(3, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(3, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
verify(rmContext.getStateStore(), times(7)).removeApplication(
isA(RMApp.class));
}
@Test
public void testRMAppRetireSomeDifferentStates() throws Exception {
long now = System.currentTimeMillis();
// these parameters don't matter, override applications below
RMContext rmContext = mockRMContext(10, now - 20000);
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_STATE_STORE_MAX_COMPLETED_APPLICATIONS, 2);
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 2);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, conf);
// clear out applications map
rmContext.getRMApps().clear();
assertEquals(0, rmContext.getRMApps().size(), "map isn't empty");
// 6 applications are in final state, 4 are not in final state.
// / set with various finished states
RMApp app = new MockRMApp(0, now - 20000, RMAppState.KILLED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(1, now - 200000, RMAppState.FAILED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(2, now - 30000, RMAppState.FINISHED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(3, now - 20000, RMAppState.RUNNING);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(4, now - 20000, RMAppState.NEW);
rmContext.getRMApps().put(app.getApplicationId(), app);
// make sure it doesn't expire these since still running
app = new MockRMApp(5, now - 10001, RMAppState.KILLED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(6, now - 30000, RMAppState.ACCEPTED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(7, now - 20000, RMAppState.SUBMITTED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(8, now - 10001, RMAppState.FAILED);
rmContext.getRMApps().put(app.getApplicationId(), app);
app = new MockRMApp(9, now - 20000, RMAppState.FAILED);
rmContext.getRMApps().put(app.getApplicationId(), app);
assertEquals(10, rmContext
.getRMApps().size(), "Number of apps incorrect before");
// add them to completed apps list
addToCompletedApps(appMonitor, rmContext);
// shouldn't have to many apps
appMonitor.checkAppNumCompletedLimit();
assertEquals(6, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(2, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
// 6 applications in final state, 4 of them are removed
verify(rmContext.getStateStore(), times(4)).removeApplication(
isA(RMApp.class));
}
@Test
public void testRMAppRetireNullApp() throws Exception {
long now = System.currentTimeMillis();
RMContext rmContext = mockRMContext(10, now - 20000);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, new Configuration());
assertEquals(10, rmContext
.getRMApps().size(), "Number of apps incorrect before");
appMonitor.finishApplication(null);
assertEquals(0, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
}
@Test
public void testRMAppRetireZeroSetting() throws Exception {
long now = System.currentTimeMillis();
RMContext rmContext = mockRMContext(10, now - 20000);
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_STATE_STORE_MAX_COMPLETED_APPLICATIONS, 0);
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, 0);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, conf);
assertEquals(10, rmContext
.getRMApps().size(), "Number of apps incorrect before");
addToCompletedApps(appMonitor, rmContext);
assertEquals(10, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect");
appMonitor.checkAppNumCompletedLimit();
assertEquals(0, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(0, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
verify(rmContext.getStateStore(), times(10)).removeApplication(
isA(RMApp.class));
}
@Test
public void testStateStoreAppLimitLessThanMemoryAppLimit() {
long now = System.currentTimeMillis();
final int allApps = 10;
RMContext rmContext = mockRMContext(allApps, now - 20000);
Configuration conf = new YarnConfiguration();
int maxAppsInMemory = 8;
int maxAppsInStateStore = 4;
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, maxAppsInMemory);
conf.setInt(YarnConfiguration.RM_STATE_STORE_MAX_COMPLETED_APPLICATIONS,
maxAppsInStateStore);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, conf);
addToCompletedApps(appMonitor, rmContext);
assertEquals(allApps, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect");
appMonitor.checkAppNumCompletedLimit();
assertEquals(maxAppsInMemory, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(maxAppsInMemory, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
int numRemoveAppsFromStateStore = 10 - maxAppsInStateStore;
verify(rmContext.getStateStore(), times(numRemoveAppsFromStateStore))
.removeApplication(isA(RMApp.class));
assertEquals(maxAppsInStateStore,
appMonitor.getNumberOfCompletedAppsInStateStore());
}
@Test
public void testStateStoreAppLimitGreaterThanMemoryAppLimit() {
long now = System.currentTimeMillis();
final int allApps = 10;
RMContext rmContext = mockRMContext(allApps, now - 20000);
Configuration conf = new YarnConfiguration();
int maxAppsInMemory = 8;
conf.setInt(YarnConfiguration.RM_MAX_COMPLETED_APPLICATIONS, maxAppsInMemory);
// greater than maxCompletedAppsInMemory, reset to RM_MAX_COMPLETED_APPLICATIONS.
conf.setInt(YarnConfiguration.RM_STATE_STORE_MAX_COMPLETED_APPLICATIONS, 1000);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext, conf);
addToCompletedApps(appMonitor, rmContext);
assertEquals(allApps, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect");
appMonitor.checkAppNumCompletedLimit();
int numRemoveApps = allApps - maxAppsInMemory;
assertEquals(maxAppsInMemory, rmContext.getRMApps().size(),
"Number of apps incorrect after # completed check");
assertEquals(maxAppsInMemory, appMonitor.getCompletedAppsListSize(),
"Number of completed apps incorrect after check");
verify(rmContext.getStateStore(), times(numRemoveApps)).removeApplication(
isA(RMApp.class));
assertEquals(maxAppsInMemory,
appMonitor.getNumberOfCompletedAppsInStateStore());
}
protected void setupDispatcher(RMContext rmContext, Configuration conf) {
TestDispatcher testDispatcher = new TestDispatcher();
TestAppManagerDispatcher testAppManagerDispatcher =
new TestAppManagerDispatcher();
rmContext.getDispatcher().register(RMAppEventType.class, testDispatcher);
rmContext.getDispatcher().register(RMAppManagerEventType.class, testAppManagerDispatcher);
((Service)rmContext.getDispatcher()).init(conf);
((Service)rmContext.getDispatcher()).start();
assertEquals(RMAppEventType.KILL, appEventType, "app event type is wrong before");
}
@SuppressWarnings("deprecation")
@Test
public void testRMAppSubmitAMContainerResourceRequests() throws Exception {
asContext.setResource(Resources.createResource(1024));
asContext.setAMContainerResourceRequest(
ResourceRequest.newInstance(Priority.newInstance(0),
ResourceRequest.ANY, Resources.createResource(1024), 1, true));
List<ResourceRequest> reqs = new ArrayList<>();
reqs.add(ResourceRequest.newInstance(Priority.newInstance(0),
ResourceRequest.ANY, Resources.createResource(1025), 1, false));
reqs.add(ResourceRequest.newInstance(Priority.newInstance(0),
"/rack", Resources.createResource(1025), 1, false));
reqs.add(ResourceRequest.newInstance(Priority.newInstance(0),
"/rack/node", Resources.createResource(1025), 1, true));
asContext.setAMContainerResourceRequests(cloneResourceRequests(reqs));
// getAMContainerResourceRequest uses the first entry of
// getAMContainerResourceRequests
assertEquals(reqs.get(0), asContext.getAMContainerResourceRequest());
assertEquals(reqs, asContext.getAMContainerResourceRequests());
RMApp app = testRMAppSubmit();
for (ResourceRequest req : reqs) {
req.setNodeLabelExpression(RMNodeLabelsManager.NO_LABEL);
}
// setAMContainerResourceRequests has priority over
// setAMContainerResourceRequest and setResource
assertEquals(reqs, app.getAMResourceRequests());
}
@SuppressWarnings("deprecation")
@Test
public void testRMAppSubmitAMContainerResourceRequest() throws Exception {
asContext.setResource(Resources.createResource(1024));
asContext.setAMContainerResourceRequests(null);
ResourceRequest req =
ResourceRequest.newInstance(Priority.newInstance(0),
ResourceRequest.ANY, Resources.createResource(1025), 1, true);
req.setNodeLabelExpression(RMNodeLabelsManager.NO_LABEL);
asContext.setAMContainerResourceRequest(ResourceRequest.clone(req));
// getAMContainerResourceRequests uses a singleton list of
// getAMContainerResourceRequest
assertEquals(req, asContext.getAMContainerResourceRequest());
assertEquals(req, asContext.getAMContainerResourceRequests().get(0));
assertEquals(1, asContext.getAMContainerResourceRequests().size());
RMApp app = testRMAppSubmit();
// setAMContainerResourceRequest has priority over setResource
assertEquals(Collections.singletonList(req),
app.getAMResourceRequests());
}
@Test
public void testRMAppSubmitAMContainerWithNoLabelByRMDefaultAMNodeLabel() throws Exception {
List<ResourceRequest> reqs = new ArrayList<>();
ResourceRequest anyReq = ResourceRequest.newInstance(
Priority.newInstance(1),
ResourceRequest.ANY, Resources.createResource(1024), 1, false, null,
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED));
reqs.add(anyReq);
asContext.setAMContainerResourceRequests(cloneResourceRequests(reqs));
asContext.setNodeLabelExpression("fixed");
Configuration conf = new Configuration(false);
String defaultAMNodeLabel = "core";
conf.set(YarnConfiguration.AM_DEFAULT_NODE_LABEL, defaultAMNodeLabel);
when(mockDefaultQueueInfo.getAccessibleNodeLabels()).thenReturn(
new HashSet<String>() {{ add("core"); }});
TestRMAppManager newAppMonitor = createAppManager(rmContext, conf);
newAppMonitor.submitApplication(asContext, "test");
RMApp app = rmContext.getRMApps().get(appId);
waitUntilEventProcessed();
assertEquals(defaultAMNodeLabel,
app.getAMResourceRequests().get(0).getNodeLabelExpression());
}
@Test
public void testRMAppSubmitResource() throws Exception {
asContext.setResource(Resources.createResource(1024));
asContext.setAMContainerResourceRequests(null);
RMApp app = testRMAppSubmit();
// setResource
assertEquals(Collections.singletonList(
ResourceRequest.newInstance(RMAppAttemptImpl.AM_CONTAINER_PRIORITY,
ResourceRequest.ANY, Resources.createResource(1024), 1, true,
"")),
app.getAMResourceRequests());
}
@Test
public void testRMAppSubmitNoResourceRequests() throws Exception {
asContext.setResource(null);
asContext.setAMContainerResourceRequests(null);
try {
testRMAppSubmit();
fail("Should have failed due to no ResourceRequest");
} catch (InvalidResourceRequestException e) {
assertEquals(
"Invalid resource request, no resources requested",
e.getMessage());
}
}
@Test
public void testRMAppSubmitAMContainerResourceRequestsDisagree()
throws Exception {
asContext.setResource(null);
List<ResourceRequest> reqs = new ArrayList<>();
when(mockDefaultQueueInfo.getAccessibleNodeLabels()).thenReturn
(new HashSet<String>() {{ add("label1"); add(""); }});
ResourceRequest anyReq = ResourceRequest.newInstance(
Priority.newInstance(1),
ResourceRequest.ANY, Resources.createResource(1024), 1, false, "label1",
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED));
reqs.add(anyReq);
reqs.add(ResourceRequest.newInstance(Priority.newInstance(2),
"/rack", Resources.createResource(1025), 2, false, "",
ExecutionTypeRequest.newInstance(ExecutionType.OPPORTUNISTIC)));
reqs.add(ResourceRequest.newInstance(Priority.newInstance(3),
"/rack/node", Resources.createResource(1026), 3, true, "",
ExecutionTypeRequest.newInstance(ExecutionType.OPPORTUNISTIC)));
asContext.setAMContainerResourceRequests(cloneResourceRequests(reqs));
RMApp app = testRMAppSubmit();
// It should force the requests to all agree on these points
for (ResourceRequest req : reqs) {
req.setCapability(anyReq.getCapability());
req.setExecutionTypeRequest(
ExecutionTypeRequest.newInstance(ExecutionType.GUARANTEED));
req.setNumContainers(1);
req.setPriority(Priority.newInstance(0));
}
assertEquals(reqs, app.getAMResourceRequests());
}
@Test
public void testRMAppSubmitAMContainerResourceRequestsNoAny()
throws Exception {
asContext.setResource(null);
List<ResourceRequest> reqs = new ArrayList<>();
reqs.add(ResourceRequest.newInstance(Priority.newInstance(1),
"/rack", Resources.createResource(1025), 1, false));
reqs.add(ResourceRequest.newInstance(Priority.newInstance(1),
"/rack/node", Resources.createResource(1025), 1, true));
asContext.setAMContainerResourceRequests(cloneResourceRequests(reqs));
// getAMContainerResourceRequest uses the first entry of
// getAMContainerResourceRequests
assertEquals(reqs, asContext.getAMContainerResourceRequests());
try {
testRMAppSubmit();
fail("Should have failed due to missing ANY ResourceRequest");
} catch (InvalidResourceRequestException e) {
assertEquals(
"Invalid resource request, no resource request specified with *",
e.getMessage());
}
}
@Test
public void testRMAppSubmitAMContainerResourceRequestsTwoManyAny()
throws Exception {
asContext.setResource(null);
List<ResourceRequest> reqs = new ArrayList<>();
reqs.add(ResourceRequest.newInstance(Priority.newInstance(1),
ResourceRequest.ANY, Resources.createResource(1025), 1, false));
reqs.add(ResourceRequest.newInstance(Priority.newInstance(1),
ResourceRequest.ANY, Resources.createResource(1025), 1, false));
asContext.setAMContainerResourceRequests(cloneResourceRequests(reqs));
// getAMContainerResourceRequest uses the first entry of
// getAMContainerResourceRequests
assertEquals(reqs, asContext.getAMContainerResourceRequests());
try {
testRMAppSubmit();
fail("Should have failed due to too many ANY ResourceRequests");
} catch (InvalidResourceRequestException e) {
assertEquals(
"Invalid resource request, only one resource request with * is " +
"allowed", e.getMessage());
}
}
private RMApp testRMAppSubmit() throws Exception {
appMonitor.submitApplication(asContext, "test");
return waitUntilEventProcessed();
}
private RMApp waitUntilEventProcessed() throws InterruptedException {
RMApp app = rmContext.getRMApps().get(appId);
assertNotNull(app, "app is null");
assertEquals(appId, app.getApplicationId(), "app id doesn't match");
assertEquals(RMAppState.NEW, app.getState(), "app state doesn't match");
// wait for event to be processed
int timeoutSecs = 0;
while ((getAppEventType() == RMAppEventType.KILL) &&
timeoutSecs++ < 20) {
Thread.sleep(1000);
}
assertEquals(RMAppEventType.START, getAppEventType(),
"app event type sent is wrong");
return app;
}
@Test
public void testRMAppSubmitWithInvalidTokens() throws Exception {
// Setup invalid security tokens
DataOutputBuffer dob = new DataOutputBuffer();
ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0,
dob.getLength());
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
asContext.getAMContainerSpec().setTokens(securityTokens);
try {
appMonitor.submitApplication(asContext, "test");
fail("Application submission should fail because" +
" Tokens are invalid.");
} catch (YarnException e) {
// Exception is expected
assertTrue(e.getMessage().contains("java.io.EOFException"),
"The thrown exception is not java.io.EOFException");
}
int timeoutSecs = 0;
while ((getAppEventType() == RMAppEventType.KILL) &&
timeoutSecs++ < 20) {
Thread.sleep(1000);
}
assertEquals(RMAppEventType.APP_REJECTED, getAppEventType(),
"app event type sent is wrong");
asContext.getAMContainerSpec().setTokens(null);
}
@Test
@Timeout(value = 30)
public void testRMAppSubmitMaxAppAttempts() throws Exception {
int[] globalMaxAppAttempts = new int[] { 10, 1 };
int[] rmAmMaxAttempts = new int[] { 8, 1 };
int[][] individualMaxAppAttempts = new int[][]{
new int[]{ 9, 10, 11, 0 },
new int[]{ 1, 10, 0, -1 }};
int[][] expectedNums = new int[][]{
new int[]{ 9, 10, 10, 8 },
new int[]{ 1, 1, 1, 1 }};
for (int i = 0; i < globalMaxAppAttempts.length; ++i) {
for (int j = 0; j < individualMaxAppAttempts.length; ++j) {
scheduler = mockResourceScheduler();
Configuration conf = new Configuration();
conf.setInt(YarnConfiguration.GLOBAL_RM_AM_MAX_ATTEMPTS,
globalMaxAppAttempts[i]);
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, rmAmMaxAttempts[i]);
ApplicationMasterService masterService =
new ApplicationMasterService(rmContext, scheduler);
TestRMAppManager appMonitor = new TestRMAppManager(rmContext,
new ClientToAMTokenSecretManagerInRM(), scheduler, masterService,
new ApplicationACLsManager(conf), conf);
ApplicationId appID = MockApps.newAppID(i * 4 + j + 1);
asContext.setApplicationId(appID);
if (individualMaxAppAttempts[i][j] != 0) {
asContext.setMaxAppAttempts(individualMaxAppAttempts[i][j]);
}
appMonitor.submitApplication(asContext, "test");
RMApp app = rmContext.getRMApps().get(appID);
assertEquals(expectedNums[i][j], app.getMaxAppAttempts(),
"max application attempts doesn't match");
// wait for event to be processed
int timeoutSecs = 0;
while ((getAppEventType() == RMAppEventType.KILL) &&
timeoutSecs++ < 20) {
Thread.sleep(1000);
}
setAppEventType(RMAppEventType.KILL);
}
}
}
@Test
@Timeout(value = 30)
public void testRMAppSubmitDuplicateApplicationId() throws Exception {
ApplicationId appId = MockApps.newAppID(0);
asContext.setApplicationId(appId);
RMApp appOrig = rmContext.getRMApps().get(appId);
assertTrue("testApp1" != appOrig.getName(), "app name matches "
+ "but shouldn't");
// our testApp1 should be rejected and original app with same id should be left in place
try {
appMonitor.submitApplication(asContext, "test");
fail("Exception is expected when applicationId is duplicate.");
} catch (YarnException e) {
assertTrue(e.getMessage().contains("Cannot add a duplicate!"),
"The thrown exception is not the expectd one.");
}
// make sure original app didn't get removed
RMApp app = rmContext.getRMApps().get(appId);
assertNotNull(app, "app is null");
assertEquals(appId, app.getApplicationId(), "app id doesn't match");
assertEquals(RMAppState.FINISHED, app.getState(), "app state doesn't match");
}
@SuppressWarnings("deprecation")
@Test
@Timeout(value = 30)
public void testRMAppSubmitInvalidResourceRequest() throws Exception {
asContext.setResource(Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB + 1));
// submit an app
try {
appMonitor.submitApplication(asContext, "test");
fail("Application submission should fail because resource" +
" request is invalid.");
} catch (YarnException e) {
// Exception is expected
// TODO Change this to assert the expected exception type - post YARN-142
// sub-task related to specialized exceptions.
assertTrue(e.getMessage().contains("Invalid resource request"),
"The thrown exception is not" +
" InvalidResourceRequestException");
}
}
@Test
@Timeout(value = 30)
public void testEscapeApplicationSummary() {
RMApp app = mock(RMAppImpl.class);
ApplicationSubmissionContext asc = mock(ApplicationSubmissionContext.class);
when(asc.getNodeLabelExpression()).thenReturn("test");
when(app.getApplicationSubmissionContext()).thenReturn(asc);
when(app.getApplicationId()).thenReturn(
ApplicationId.newInstance(100L, 1));
when(app.getName()).thenReturn("Multiline\n\n\r\rAppName");
when(app.getUser()).thenReturn("Multiline\n\n\r\rUserName");
when(app.getQueue()).thenReturn("Multiline\n\n\r\rQueueName");
when(app.getState()).thenReturn(RMAppState.RUNNING);
when(app.getApplicationType()).thenReturn("MAPREDUCE");
when(app.getSubmitTime()).thenReturn(1000L);
when(app.getLaunchTime()).thenReturn(2000L);
when(app.getApplicationTags()).thenReturn(Sets.newHashSet("tag2", "tag1"));
RMAppAttempt mockRMAppAttempt = mock(RMAppAttempt.class);
Container mockContainer = mock(Container.class);
NodeId mockNodeId = mock(NodeId.class);
String host = "127.0.0.1";
when(mockNodeId.getHost()).thenReturn(host);
when(mockContainer.getNodeId()).thenReturn(mockNodeId);
when(mockRMAppAttempt.getMasterContainer()).thenReturn(mockContainer);
when(app.getCurrentAppAttempt()).thenReturn(mockRMAppAttempt);
Map<String, Long> resourceSecondsMap = new HashMap<>();
resourceSecondsMap.put(ResourceInformation.MEMORY_MB.getName(), 16384L);
resourceSecondsMap.put(ResourceInformation.VCORES.getName(), 64L);
RMAppMetrics metrics =
new RMAppMetrics(Resource.newInstance(1234, 56),
10, 1, resourceSecondsMap, new HashMap<>(), 1234);
when(app.getRMAppMetrics()).thenReturn(metrics);
when(app.getDiagnostics()).thenReturn(new StringBuilder(
"Multiline\n\n\r\rDiagnostics=Diagn,ostic"));
RMAppManager.ApplicationSummary.SummaryBuilder summary =
new RMAppManager.ApplicationSummary().createAppSummary(app);
String msg = summary.toString();
LOG.info("summary: " + msg);
assertFalse(msg.contains("\n"));
assertFalse(msg.contains("\r"));
String escaped = "\\n\\n\\r\\r";
assertTrue(msg.contains("Multiline" + escaped +"AppName"));
assertTrue(msg.contains("Multiline" + escaped +"UserName"));
assertTrue(msg.contains("Multiline" + escaped +"QueueName"));
assertTrue(msg.contains("appMasterHost=" + host));
assertTrue(msg.contains("submitTime=1000"));
assertTrue(msg.contains("launchTime=2000"));
assertTrue(msg.contains("memorySeconds=16384"));
assertTrue(msg.contains("vcoreSeconds=64"));
assertTrue(msg.contains("preemptedAMContainers=1"));
assertTrue(msg.contains("preemptedNonAMContainers=10"));
assertTrue(msg.contains("preemptedResources=<memory:1234\\, vCores:56>"));
assertTrue(msg.contains("applicationType=MAPREDUCE"));
assertTrue(msg.contains("applicationTags=tag1\\,tag2"));
assertTrue(msg.contains("applicationNodeLabel=test"));
assertTrue(msg.contains("diagnostics=Multiline" + escaped
+ "Diagnostics\\=Diagn\\,ostic"));
assertTrue(msg.contains("totalAllocatedContainers=1234"));
}
@Test
public void testRMAppSubmitWithQueueChanged() throws Exception {
// Setup a PlacementManager returns a new queue
PlacementManager placementMgr = mock(PlacementManager.class);
doAnswer(new Answer<ApplicationPlacementContext>() {
@Override
public ApplicationPlacementContext answer(InvocationOnMock invocation)
throws Throwable {
return new ApplicationPlacementContext("newQueue");
}
}).when(placementMgr).placeApplication(
any(ApplicationSubmissionContext.class),
any(String.class),
any(Boolean.class));
rmContext.setQueuePlacementManager(placementMgr);
asContext.setQueue("oldQueue");
appMonitor.submitApplication(asContext, "test");
RMApp app = rmContext.getRMApps().get(appId);
RMAppEvent event = new RMAppEvent(appId, RMAppEventType.START);
rmContext.getRMApps().get(appId).handle(event);
event = new RMAppEvent(appId, RMAppEventType.APP_NEW_SAVED);
rmContext.getRMApps().get(appId).handle(event);
assertNotNull(app, "app is null");
assertEquals("newQueue", asContext.getQueue());
// wait for event to be processed
int timeoutSecs = 0;
while ((getAppEventType() == RMAppEventType.KILL) && timeoutSecs++ < 20) {
Thread.sleep(1000);
}
assertEquals(RMAppEventType.START, getAppEventType(),
"app event type sent is wrong");
}
private static ResourceScheduler mockResourceScheduler() {
return mockResourceScheduler(ResourceScheduler.class);
}
private static <T extends ResourceScheduler> ResourceScheduler
mockResourceScheduler(Class<T> schedulerClass) {
ResourceScheduler scheduler = mock(schedulerClass);
when(scheduler.getMinimumResourceCapability()).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB));
when(scheduler.getMaximumResourceCapability()).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB));
when(scheduler.getMaximumResourceCapability(any(String.class))).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB));
when(scheduler.getMaximumResourceCapability(anyString())).thenReturn(
Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB));
ResourceCalculator rs = mock(ResourceCalculator.class);
when(scheduler.getResourceCalculator()).thenReturn(rs);
when(scheduler.getNormalizedResource(any(), any()))
.thenAnswer(new Answer<Resource>() {
@Override
public Resource answer(InvocationOnMock invocationOnMock)
throws Throwable {
return (Resource) invocationOnMock.getArguments()[0];
}
});
return scheduler;
}
private static ContainerLaunchContext mockContainerLaunchContext(
RecordFactory recordFactory) {
ContainerLaunchContext amContainer = recordFactory.newRecordInstance(
ContainerLaunchContext.class);
amContainer.setApplicationACLs(new HashMap<ApplicationAccessType, String>());
return amContainer;
}
private static Resource mockResource() {
return Resources.createResource(
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
}
private static List<ResourceRequest> cloneResourceRequests(
List<ResourceRequest> reqs) {
List<ResourceRequest> cloneReqs = new ArrayList<>();
for (ResourceRequest req : reqs) {
cloneReqs.add(ResourceRequest.clone(req));
}
return cloneReqs;
}
@Test
public void testGetUserNameForPlacementTagBasedPlacementDisabled()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String userIdTag = USER_ID_PREFIX + userNameFromAppTag;
setApplicationTags("tag1", userIdTag, "tag2");
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled and
* the submitting user 'user1' is whitelisted and the user from the
* application tag has access to queue.
* Expected behaviour: the placement is done for user from the tag 'user2'
*/
@Test
public void testGetUserNameForPlacementTagBasedPlacementEnabled()
throws YarnException {
String user = "user1";
String usernameFromAppTag = "user2";
String expectedQueue = "user1Queue";
String expectedUser = usernameFromAppTag;
String userIdTag = USER_ID_PREFIX + usernameFromAppTag;
setApplicationTags("tag1", userIdTag, "tag2");
enableApplicationTagPlacement(true, user);
verifyPlacementUsername(expectedQueue, user, usernameFromAppTag,
expectedUser);
}
/**
* Test for the case when the application tag based placement is enabled.
* And submitting user 'user1' is whitelisted and there are multiple valid
* username tags passed
* Expected behaviour: the placement is done for the first valid username
* from the tag 'user2'
*/
@Test
public void testGetUserNameForPlacementTagBasedPlacementMultipleUserIds()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String expectedUser = userNameFromAppTag;
String userIdTag = USER_ID_PREFIX + expectedUser;
String userIdTag2 = USER_ID_PREFIX + "user3";
setApplicationTags("tag1", userIdTag, "tag2", userIdTag2);
enableApplicationTagPlacement(true, user);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag,
expectedUser);
}
/**
* Test for the case when the application tag based placement is enabled.
* And no username is set in the application tag
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementTagBasedPlacementNoUserId()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = null;
setApplicationTags("tag1", "tag2");
enableApplicationTagPlacement(true, user);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled but
* the user from the application tag 'user2' does not have access to the
* queue.
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementUserWithoutAccessToQueue()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String userIdTag = USER_ID_PREFIX + userNameFromAppTag;
setApplicationTags("tag1", userIdTag, "tag2");
enableApplicationTagPlacement(false, user);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled but
* the submitting user 'user1' is not whitelisted and there is a valid
* username tag passed.
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementNotWhitelistedUser()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String userIdTag = USER_ID_PREFIX + userNameFromAppTag;
setApplicationTags("tag1", userIdTag, "tag2");
enableApplicationTagPlacement(true, "someUser");
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled but
* there is no whitelisted user.
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementEmptyWhiteList()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String userIdTag = USER_ID_PREFIX + userNameFromAppTag;
setApplicationTags("tag1", userIdTag, "tag2");
enableApplicationTagPlacement(false);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled and
* there is one wrongly qualified user
* 'userid=' and a valid user 'userid=user2' passed
* with application tag.
* Expected behaviour: the placement is done for the first valid username
* from the tag 'user2'
*/
@Test
public void testGetUserNameForPlacementWronglyQualifiedFirstUserNameInTag()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "user2";
String expectedUser = userNameFromAppTag;
String userIdTag = USER_ID_PREFIX + userNameFromAppTag;
String wrongUserIdTag = USER_ID_PREFIX;
setApplicationTags("tag1", wrongUserIdTag, userIdTag, "tag2");
enableApplicationTagPlacement(true, user);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag,
expectedUser);
}
/**
* Test for the case when the application tag based placement is enabled and
* there is only one wrongly qualified user 'userid=' passed
* with application tag.
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementWronglyQualifiedUserNameInTag()
throws YarnException {
String user = "user1";
String expectedQueue = "user1Queue";
String userNameFromAppTag = "";
String wrongUserIdTag = USER_ID_PREFIX;
setApplicationTags("tag1", wrongUserIdTag, "tag2");
enableApplicationTagPlacement(true, user);
verifyPlacementUsername(expectedQueue, user, userNameFromAppTag, user);
}
/**
* Test for the case when the application tag based placement is enabled.
* And there is no placement rule defined for the user from the application tag
* Expected behaviour: the placement is done for the submitting user 'user1'
*/
@Test
public void testGetUserNameForPlacementNoRuleDefined()
throws YarnException {
String user = "user1";
String expectedUser = user;
String userNameFromAppTag = "user2";
String wrongUserIdTag = USER_ID_PREFIX + userNameFromAppTag;
setApplicationTags("tag1", wrongUserIdTag, "tag2");
enableApplicationTagPlacement(true, user);
PlacementManager placementMgr = mock(PlacementManager.class);
when(placementMgr.placeApplication(asContext, userNameFromAppTag))
.thenReturn(null);
String userNameForPlacement = appMonitor
.getUserNameForPlacement(user, asContext, placementMgr);
assertEquals(expectedUser, userNameForPlacement);
}
@Test
@UseMockCapacityScheduler
public void testCheckAccessFullPathWithCapacityScheduler()
throws YarnException {
// make sure we only combine "parent + queue" if CS is selected
testCheckAccess("root.users", "hadoop");
}
@Test
@UseMockCapacityScheduler
public void testCheckAccessLeafQueueOnlyWithCapacityScheduler()
throws YarnException {
// make sure we that NPE is avoided if there's no parent defined
testCheckAccess(null, "hadoop");
}
private void testCheckAccess(String parent, String queue)
throws YarnException {
enableApplicationTagPlacement(true, "hadoop");
String userIdTag = USER_ID_PREFIX + "hadoop";
setApplicationTags("tag1", userIdTag, "tag2");
PlacementManager placementMgr = mock(PlacementManager.class);
ApplicationPlacementContext appContext;
String expectedQueue;
if (parent == null) {
appContext = new ApplicationPlacementContext(queue);
expectedQueue = queue;
} else {
appContext = new ApplicationPlacementContext(queue, parent);
expectedQueue = parent + "." + queue;
}
when(placementMgr.placeApplication(asContext, "hadoop"))
.thenReturn(appContext);
appMonitor.getUserNameForPlacement("hadoop", asContext, placementMgr);
ArgumentCaptor<String> queueNameCaptor =
ArgumentCaptor.forClass(String.class);
verify(scheduler).checkAccess(any(UserGroupInformation.class),
any(QueueACL.class), queueNameCaptor.capture());
assertEquals(expectedQueue, queueNameCaptor.getValue(),
"Expected access check for queue");
}
private void enableApplicationTagPlacement(boolean userHasAccessToQueue,
String... whiteListedUsers) {
Configuration conf = new Configuration();
conf.setBoolean(YarnConfiguration
.APPLICATION_TAG_BASED_PLACEMENT_ENABLED, true);
conf.setStrings(YarnConfiguration
.APPLICATION_TAG_BASED_PLACEMENT_USER_WHITELIST, whiteListedUsers);
((RMContextImpl) rmContext).setYarnConfiguration(conf);
when(scheduler.checkAccess(any(UserGroupInformation.class),
eq(QueueACL.SUBMIT_APPLICATIONS), any(String.class)))
.thenReturn(userHasAccessToQueue);
ApplicationMasterService masterService =
new ApplicationMasterService(rmContext, scheduler);
appMonitor = new TestRMAppManager(rmContext,
new ClientToAMTokenSecretManagerInRM(),
scheduler, masterService,
new ApplicationACLsManager(conf), conf);
}
private void verifyPlacementUsername(final String queue,
final String submittingUser, final String userNameFRomAppTag,
final String expectedUser)
throws YarnException {
PlacementManager placementMgr = mock(PlacementManager.class);
ApplicationPlacementContext appContext
= new ApplicationPlacementContext(queue);
when(placementMgr.placeApplication(asContext, userNameFRomAppTag))
.thenReturn(appContext);
String userNameForPlacement = appMonitor
.getUserNameForPlacement(submittingUser, asContext, placementMgr);
assertEquals(expectedUser, userNameForPlacement);
}
private void setApplicationTags(String... tags) {
Set<String> applicationTags = new TreeSet<>();
Collections.addAll(applicationTags, tags);
asContext.setApplicationTags(applicationTags);
}
private
|
TestDispatcher
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/property/access/internal/PropertyAccessStrategyMixedImpl.java
|
{
"start": 404,
"end": 817
}
|
class ____ implements PropertyAccessStrategy {
/**
* Singleton access
*/
public static final PropertyAccessStrategy INSTANCE = new PropertyAccessStrategyMixedImpl();
@Override
public PropertyAccess buildPropertyAccess(Class<?> containerJavaType, String propertyName, boolean setterRequired) {
return new PropertyAccessMixedImpl( this, containerJavaType, propertyName );
}
}
|
PropertyAccessStrategyMixedImpl
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/naming/remote/response/BatchInstanceResponse.java
|
{
"start": 781,
"end": 983
}
|
class ____ extends InstanceResponse {
public BatchInstanceResponse() {
super();
}
public BatchInstanceResponse(String type) {
super(type);
}
}
|
BatchInstanceResponse
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastTopicEndpointBuilderFactory.java
|
{
"start": 6763,
"end": 12653
}
|
interface ____
extends
EndpointConsumerBuilder {
default HazelcastTopicEndpointConsumerBuilder basic() {
return (HazelcastTopicEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedHazelcastTopicEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the Hazelcast Topic component.
*/
public
|
AdvancedHazelcastTopicEndpointConsumerBuilder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.