language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/registry/classloading/LeakUtilitySelfTest.java | {
"start": 394,
"end": 1088
} | class ____ {
@Test
public void verifyLeakUtility() {
PhantomReferenceLeakDetector.assertActionNotLeaking( LeakUtilitySelfTest::notALeak );
}
@Test
public void verifyLeakUtilitySpotsLeak() {
assertThat( PhantomReferenceLeakDetector.verifyActionNotLeaking( LeakUtilitySelfTest::troubleSomeLeak, 2, 1 ) )
.isFalse();
}
private static SomeSpecialObject notALeak() {
return new SomeSpecialObject();
}
private static SomeSpecialObject troubleSomeLeak() {
final SomeSpecialObject specialThing = new SomeSpecialObject();
tl.set( specialThing );
return specialThing;
}
private static final ThreadLocal<SomeSpecialObject> tl = new ThreadLocal<>();
static | LeakUtilitySelfTest |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/join/deltajoin/StreamingDeltaJoinOperatorTest.java | {
"start": 4702,
"end": 65319
} | class ____ {
private static final int AEC_CAPACITY = 100;
private static final int CACHE_SIZE = 10;
// the data snapshot of the left/right table when joining
// <upsert key, data>
private static final HashMap<RowData, RowData> leftTableCurrentData = new HashMap<>();
private static final HashMap<RowData, RowData> rightTableCurrentData = new HashMap<>();
@Parameters(name = "EnableCache = {0}")
public static List<Boolean> parameters() {
return Arrays.asList(false, true);
}
@Parameter public boolean enableCache;
private KeyedTwoInputStreamOperatorTestHarness<RowData, RowData, RowData, RowData> testHarness;
private RowDataHarnessAssertor assertor;
private Optional<Throwable> latestException = Optional.empty();
@BeforeEach
public void beforeEach() throws Exception {
MyAsyncFunction.leftInvokeCount.set(0);
MyAsyncFunction.rightInvokeCount.set(0);
MyAsyncExecutionControllerDelegate.insertTableDataAfterEmit = true;
}
@AfterEach
public void afterEach() throws Exception {
if (assertor != null) {
testHarness.close();
}
leftTableCurrentData.clear();
rightTableCurrentData.clear();
latestException = Optional.empty();
MyAsyncFunction.clearExpectedThrownException();
}
@TestTemplate
void testJoinBothLogTables() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
StreamRecord<RowData> leftRecord2 = insertRecord(100, false, "jklk2");
testHarness.processElement1(leftRecord1);
testHarness.processElement1(leftRecord2);
StreamRecord<RowData> leftRecord3 = insertRecord(200, true, "jklk1");
StreamRecord<RowData> leftRecord4 = insertRecord(200, false, "jklk2");
testHarness.processElement1(leftRecord3);
testHarness.processElement1(leftRecord4);
StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
StreamRecord<RowData> rightRecord2 = insertRecord("jklk2", 300, false);
testHarness.processElement2(rightRecord1);
testHarness.processElement2(rightRecord2);
// mismatch
StreamRecord<RowData> rightRecord3 = insertRecord("unknown", 500, false);
testHarness.processElement2(rightRecord3);
StreamRecord<RowData> leftRecord5 = insertRecord(800, true, "jklk1");
StreamRecord<RowData> leftRecord6 = insertRecord(800, false, "jklk2");
testHarness.processElement1(leftRecord5);
testHarness.processElement1(leftRecord6);
StreamRecord<RowData> rightRecord4 = insertRecord("jklk1", 1000, true);
StreamRecord<RowData> rightRecord5 = insertRecord("jklk2", 1000, false);
testHarness.processElement2(rightRecord4);
testHarness.processElement2(rightRecord5);
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(100, false, "jklk2", "jklk2", 300, false));
expectedOutput.add(insertRecord(200, false, "jklk2", "jklk2", 300, false));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(800, false, "jklk2", "jklk2", 300, false));
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(100, false, "jklk2", "jklk2", 1000, false));
expectedOutput.add(insertRecord(200, false, "jklk2", "jklk2", 1000, false));
expectedOutput.add(insertRecord(800, false, "jklk2", "jklk2", 1000, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
RowType leftRowType = testSpec.getLeftInputRowType();
RowType rightRowType = testSpec.getRightInputRowType();
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(leftRecord1.getValue(), leftRowType),
leftRecord1.getValue(),
toBinary(leftRecord3.getValue(), leftRowType),
leftRecord3.getValue(),
toBinary(leftRecord5.getValue(), leftRowType),
leftRecord5.getValue()),
binaryrow(false, "jklk2"),
newHashMap(
toBinary(leftRecord2.getValue(), leftRowType),
leftRecord2.getValue(),
toBinary(leftRecord4.getValue(), leftRowType),
leftRecord4.getValue(),
toBinary(leftRecord6.getValue(), leftRowType),
leftRecord6.getValue()),
binaryrow(false, "unknown"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(rightRecord1.getValue(), rightRowType),
rightRecord1.getValue(),
toBinary(rightRecord4.getValue(), rightRowType),
rightRecord4.getValue()),
binaryrow(false, "jklk2"),
newHashMap(
toBinary(rightRecord2.getValue(), rightRowType),
rightRecord2.getValue(),
toBinary(rightRecord5.getValue(), rightRowType),
rightRecord5.getValue()));
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 5, 2, 6, 4);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(3);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(6);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(5);
}
}
@TestTemplate
void testJoinBothLogTablesWhileFilterExistsOnBothTable() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITH_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
testHarness.processElement1(leftRecord1);
// will be filtered upstream
StreamRecord<RowData> leftRecord2 = insertRecord(100, false, "jklk2");
insertLeftTable(testSpec, leftRecord2);
StreamRecord<RowData> leftRecord3 = insertRecord(200, true, "jklk1");
testHarness.processElement1(leftRecord3);
// will be filtered upstream
StreamRecord<RowData> leftRecord4 = insertRecord(200, false, "jklk2");
insertLeftTable(testSpec, leftRecord4);
StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
testHarness.processElement2(rightRecord1);
// will be filtered upstream
StreamRecord<RowData> rightRecord2 = insertRecord("jklk2", 300, false);
insertRightTable(testSpec, rightRecord2);
// mismatch
StreamRecord<RowData> leftRecord5 = insertRecord(200, true, "unknown1");
testHarness.processElement1(leftRecord5);
// mismatch and will be filtered upstream
StreamRecord<RowData> rightRecord3 = insertRecord("unknown2", 300, false);
insertRightTable(testSpec, rightRecord3);
StreamRecord<RowData> leftRecord6 = insertRecord(800, true, "jklk1");
testHarness.processElement1(leftRecord6);
// will be filtered upstream
StreamRecord<RowData> leftRecord7 = insertRecord(800, false, "jklk2");
insertLeftTable(testSpec, leftRecord7);
StreamRecord<RowData> rightRecord4 = insertRecord("jklk1", 1000, true);
testHarness.processElement2(rightRecord4);
// will be filtered upstream
StreamRecord<RowData> rightRecord5 = insertRecord("jklk2", 1000, false);
insertRightTable(testSpec, rightRecord5);
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 1000, true));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
RowType leftRowType = testSpec.getLeftInputRowType();
RowType rightRowType = testSpec.getRightInputRowType();
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(leftRecord1.getValue(), leftRowType),
leftRecord1.getValue(),
toBinary(leftRecord3.getValue(), leftRowType),
leftRecord3.getValue(),
toBinary(leftRecord6.getValue(), leftRowType),
leftRecord6.getValue()));
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(rightRecord1.getValue(), rightRowType),
rightRecord1.getValue(),
toBinary(rightRecord4.getValue(), rightRowType),
rightRecord4.getValue()),
binaryrow(true, "unknown1"),
Collections.emptyMap());
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 2, 1, 4, 2);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(1);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(4);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
}
}
@TestTemplate
void testJoinBothPkTables() throws Exception {
PkPkTableJoinTestSpec testSpec = PkPkTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
StreamRecord<RowData> leftRecordK1V1 = insertRecord(100, true, "Tom");
StreamRecord<RowData> leftRecordK2V1 = insertRecord(101, false, "Tom");
// mismatch
StreamRecord<RowData> leftRecordK3V1 = insertRecord(1999, false, "Jim");
testHarness.processElement1(leftRecordK1V1);
testHarness.processElement1(leftRecordK2V1);
testHarness.processElement1(leftRecordK3V1);
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
StreamRecord<RowData> rightRecordK1V1 = insertRecord("Tom", 200, true);
StreamRecord<RowData> rightRecordK2V1 = insertRecord("Tom", 201, false);
// mismatch
StreamRecord<RowData> rightRecordK3V1 = insertRecord("Sam", 2999, false);
testHarness.processElement2(rightRecordK1V1);
testHarness.processElement2(rightRecordK2V1);
testHarness.processElement2(rightRecordK3V1);
waitAllDataProcessed();
expectedOutput.add(insertRecord(100, true, "Tom", "Tom", 200, true));
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 200, true));
expectedOutput.add(insertRecord(100, true, "Tom", "Tom", 201, false));
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 201, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
StreamRecord<RowData> leftRecordK1V2 = updateAfterRecord(1000, true, "Tom");
testHarness.processElement1(leftRecordK1V2);
waitAllDataProcessed();
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 200, true));
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 201, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
StreamRecord<RowData> rightRecordK1V2 = updateAfterRecord("Tom", 2000, true);
StreamRecord<RowData> rightRecordK2V2 = updateAfterRecord("Tom", 2001, false);
testHarness.processElement2(rightRecordK1V2);
testHarness.processElement2(rightRecordK2V2);
waitAllDataProcessed();
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 2000, true));
expectedOutput.add(updateAfterRecord(101, false, "Tom", "Tom", 2000, true));
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 2001, false));
expectedOutput.add(updateAfterRecord(101, false, "Tom", "Tom", 2001, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(
binaryrow(true, "Tom"),
leftRecordK1V2.getValue(),
binaryrow(false, "Tom"),
leftRecordK2V1.getValue()),
binaryrow("Sam"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(
binaryrow("Tom", true),
rightRecordK1V2.getValue(),
binaryrow("Tom", false),
rightRecordK2V2.getValue()),
binaryrow("Jim"),
Collections.emptyMap());
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 5, 3, 4, 2);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(4);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(5);
}
}
@TestTemplate
void testJoinBothPkTablesWhileFilterExistsOnBothTable() throws Exception {
PkPkTableJoinTestSpec testSpec = PkPkTableJoinTestSpec.WITH_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
StreamRecord<RowData> leftRecordK1V1 = insertRecord(100, true, "Tom");
testHarness.processElement1(leftRecordK1V1);
// will be filtered upstream
StreamRecord<RowData> leftRecordK2V1 = insertRecord(101, false, "Tom");
insertLeftTable(testSpec, leftRecordK2V1);
// mismatch and will be filtered upstream
StreamRecord<RowData> leftRecordK3V1 = insertRecord(1999, false, "Jim");
insertLeftTable(testSpec, leftRecordK3V1);
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
// will be filtered upstream
StreamRecord<RowData> rightRecordK1V1 = insertRecord("Tom", 200, true);
insertRightTable(testSpec, rightRecordK1V1);
StreamRecord<RowData> rightRecordK2V1 = insertRecord("Tom", 201, false);
testHarness.processElement2(rightRecordK2V1);
// mismatch
StreamRecord<RowData> rightRecordK3V1 = insertRecord("Sam", 2999, true);
testHarness.processElement2(rightRecordK3V1);
waitAllDataProcessed();
expectedOutput.add(insertRecord(100, true, "Tom", "Tom", 201, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
StreamRecord<RowData> leftRecordK1V2 = updateAfterRecord(1000, true, "Tom");
testHarness.processElement1(leftRecordK1V2);
waitAllDataProcessed();
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 201, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
// will be filtered upstream
StreamRecord<RowData> rightRecordK1V2 = updateAfterRecord("Tom", 2000, true);
insertRightTable(testSpec, rightRecordK1V2);
StreamRecord<RowData> rightRecordK2V2 = updateAfterRecord("Tom", 2001, false);
testHarness.processElement2(rightRecordK2V2);
waitAllDataProcessed();
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 2001, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(binaryrow(true, "Tom"), leftRecordK1V2.getValue()),
binaryrow("Sam"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(binaryrow("Tom", false), rightRecordK2V2.getValue()));
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 3, 1, 2, 1);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(1);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(3);
}
}
@TestTemplate
void testBlockingWithSameJoinKey() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
// block the async function
MyAsyncFunction.block();
// in flight
StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
StreamRecord<RowData> leftRecord2 = insertRecord(100, false, "jklk2");
testHarness.processElement1(leftRecord1);
testHarness.processElement1(leftRecord2);
// blocked
StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
StreamRecord<RowData> rightRecord2 = insertRecord("jklk2", 300, false);
testHarness.processElement2(rightRecord1);
testHarness.processElement2(rightRecord2);
// blocked
StreamRecord<RowData> leftRecord3 = insertRecord(200, true, "jklk1");
StreamRecord<RowData> leftRecord4 = insertRecord(200, false, "jklk2");
StreamRecord<RowData> leftRecord5 = insertRecord(201, false, "jklk2");
testHarness.processElement1(leftRecord3);
testHarness.processElement1(leftRecord4);
testHarness.processElement1(leftRecord5);
// in flight
StreamRecord<RowData> rightRecord3 = insertRecord("unknown", 500, false);
testHarness.processElement2(rightRecord3);
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(5);
assertThat(aec.getInFlightSize()).isEqualTo(3);
assertThat(aec.getFinishSize()).isEqualTo(0);
RecordsBuffer<AecRecord<RowData, RowData>, RowData> recordsBuffer = aec.getRecordsBuffer();
assertThat(recordsBuffer.getActiveBuffer().size()).isEqualTo(3);
assertThat(recordsBuffer.getBlockingBuffer().size()).isEqualTo(2);
RowDataKeySelector leftJoinKeySelector = testSpec.getLeftJoinKeySelector();
RowDataKeySelector rightJoinKeySelector = testSpec.getRightJoinKeySelector();
RowData joinKey1 = leftJoinKeySelector.getKey(insertRecord(100, true, "jklk1").getValue());
RowData joinKey2 = leftJoinKeySelector.getKey(insertRecord(100, false, "jklk2").getValue());
RowData joinKey3 =
rightJoinKeySelector.getKey(insertRecord("unknown", 500, false).getValue());
assertThat(recordsBuffer.getActiveBuffer().get(joinKey1)).isNotNull();
assertThat(recordsBuffer.getActiveBuffer().get(joinKey2)).isNotNull();
assertThat(recordsBuffer.getActiveBuffer().get(joinKey3)).isNotNull();
assertThat(recordsBuffer.getBlockingBuffer().get(joinKey1)).isNotNull().hasSize(2);
assertThat(recordsBuffer.getBlockingBuffer().get(joinKey2)).isNotNull().hasSize(3);
assertThat(recordsBuffer.getBlockingBuffer().get(joinKey3)).isNull();
MyAsyncFunction.release();
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(100, false, "jklk2", "jklk2", 300, false));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, false, "jklk2", "jklk2", 300, false));
expectedOutput.add(insertRecord(201, false, "jklk2", "jklk2", 300, false));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
assertThat(recordsBuffer.getActiveBuffer()).isEmpty();
assertThat(recordsBuffer.getBlockingBuffer()).isEmpty();
assertThat(recordsBuffer.getFinishedBuffer()).isEmpty();
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
RowType leftRowType = testSpec.getLeftInputRowType();
RowType rightRowType = testSpec.getRightInputRowType();
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(leftRecord1.getValue(), leftRowType),
leftRecord1.getValue(),
toBinary(leftRecord3.getValue(), leftRowType),
leftRecord3.getValue()),
binaryrow(false, "jklk2"),
newHashMap(
toBinary(leftRecord2.getValue(), leftRowType),
leftRecord2.getValue(),
toBinary(leftRecord4.getValue(), leftRowType),
leftRecord4.getValue(),
toBinary(leftRecord5.getValue(), leftRowType),
leftRecord5.getValue()),
binaryrow(false, "unknown"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(rightRecord1.getValue(), rightRowType),
rightRecord1.getValue()),
binaryrow(false, "jklk2"),
newHashMap(
toBinary(rightRecord2.getValue(), rightRowType),
rightRecord2.getValue()));
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 3, 0, 5, 3);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(3);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(5);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(3);
}
}
/**
* This test is used to test the scenario where the right stream side joined out a record from
* the left table that has not been sent to the delta-join operator (maybe is in flight between
* source and delta-join).
*/
@TestTemplate
void testLogTableDataVisibleBeforeJoin() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
MyAsyncExecutionControllerDelegate.insertTableDataAfterEmit = false;
// prepare the data first to mock all following requests were in flight between source and
// delta-join
final StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
insertLeftTable(testSpec, leftRecord1);
final StreamRecord<RowData> leftRecord2 = insertRecord(200, true, "jklk1");
insertLeftTable(testSpec, leftRecord2);
final StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
insertRightTable(testSpec, rightRecord1);
// mismatch
final StreamRecord<RowData> rightRecord2 = insertRecord("jklk2", 500, false);
insertRightTable(testSpec, rightRecord2);
final StreamRecord<RowData> leftRecord3 = insertRecord(800, true, "jklk1");
insertLeftTable(testSpec, leftRecord3);
final StreamRecord<RowData> rightRecord3 = insertRecord("jklk1", 1000, true);
insertRightTable(testSpec, rightRecord3);
testHarness.processElement1(leftRecord1);
testHarness.processElement1(leftRecord2);
testHarness.processElement2(rightRecord1);
testHarness.processElement2(rightRecord2);
testHarness.processElement1(leftRecord3);
testHarness.processElement2(rightRecord3);
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
// left record comes
// left can see 2 records in right log table
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 1000, true));
// left record comes
// left can see 2 records in right log table
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 1000, true));
// right record comes
// right can see 3 records in left log table
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 300, true));
// left record comes
// left can see 2 records in right log table
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 1000, true));
// right record comes
// right can see 3 records in left log table
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 1000, true));
expectedOutput.add(insertRecord(800, true, "jklk1", "jklk1", 1000, true));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
RowType leftRowType = testSpec.getLeftInputRowType();
RowType rightRowType = testSpec.getRightInputRowType();
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(leftRecord1.getValue(), leftRowType),
leftRecord1.getValue(),
toBinary(leftRecord2.getValue(), leftRowType),
leftRecord2.getValue(),
toBinary(leftRecord3.getValue(), leftRowType),
leftRecord3.getValue()),
binaryrow(false, "jklk2"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(rightRecord1.getValue(), rightRowType),
rightRecord1.getValue(),
toBinary(rightRecord3.getValue(), rightRowType),
rightRecord3.getValue()));
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 3, 1, 3, 2);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(1);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(3);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(3);
}
}
/**
* This test is used to test the scenario where the right stream side joined out a record from
* the left table that has not been sent to the delta-join operator (maybe is in flight between
* source and delta-join).
*/
@TestTemplate
void testPkTableDataVisibleBeforeJoin() throws Exception {
PkPkTableJoinTestSpec testSpec = PkPkTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
MyAsyncExecutionControllerDelegate.insertTableDataAfterEmit = false;
// prepare the data first to mock all following requests were in flight between source and
// delta-join
final StreamRecord<RowData> leftRecordK1V1 = insertRecord(100, true, "Tom");
insertLeftTable(testSpec, leftRecordK1V1);
final StreamRecord<RowData> leftRecordK1V2 = updateAfterRecord(1000, true, "Tom");
insertLeftTable(testSpec, leftRecordK1V2);
final StreamRecord<RowData> leftRecordK2V1 = insertRecord(101, false, "Tom");
insertLeftTable(testSpec, leftRecordK2V1);
// mismatch
final StreamRecord<RowData> leftRecordK3V1 = insertRecord(101, false, "Jim");
insertLeftTable(testSpec, leftRecordK3V1);
final StreamRecord<RowData> leftRecordK3V2 = updateAfterRecord(1001, false, "Jim");
insertLeftTable(testSpec, leftRecordK3V2);
final StreamRecord<RowData> rightRecordK1V1 = insertRecord("Tom", 200, true);
insertRightTable(testSpec, rightRecordK1V1);
final StreamRecord<RowData> rightRecordK1V2 = updateAfterRecord("Tom", 2000, true);
insertRightTable(testSpec, rightRecordK1V2);
final StreamRecord<RowData> rightRecordK1V3 = updateAfterRecord("Tom", 20000, true);
insertRightTable(testSpec, rightRecordK1V3);
final StreamRecord<RowData> rightRecordK2V1 = insertRecord("Tom", 201, false);
insertRightTable(testSpec, rightRecordK2V1);
// mismatch
final StreamRecord<RowData> rightRecordK3V1 = insertRecord("Sam", 999, false);
insertRightTable(testSpec, rightRecordK3V1);
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.processElement1(leftRecordK1V1);
expectedOutput.add(insertRecord(100, true, "Tom", "Tom", 20000, true));
expectedOutput.add(insertRecord(100, true, "Tom", "Tom", 201, false));
testHarness.processElement1(leftRecordK1V2);
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 20000, true));
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 201, false));
testHarness.processElement1(leftRecordK2V1);
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 20000, true));
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 201, false));
testHarness.processElement1(leftRecordK3V1);
testHarness.processElement1(leftRecordK3V2);
testHarness.processElement2(rightRecordK1V1);
expectedOutput.add(insertRecord(1000, true, "Tom", "Tom", 200, true));
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 200, true));
testHarness.processElement2(rightRecordK1V2);
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 2000, true));
expectedOutput.add(updateAfterRecord(101, false, "Tom", "Tom", 2000, true));
testHarness.processElement2(rightRecordK1V3);
expectedOutput.add(updateAfterRecord(1000, true, "Tom", "Tom", 20000, true));
expectedOutput.add(updateAfterRecord(101, false, "Tom", "Tom", 20000, true));
testHarness.processElement2(rightRecordK2V1);
expectedOutput.add(insertRecord(1000, true, "Tom", "Tom", 201, false));
expectedOutput.add(insertRecord(101, false, "Tom", "Tom", 201, false));
testHarness.processElement2(rightRecordK3V1);
waitAllDataProcessed();
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(
binaryrow(true, "Tom"),
leftRecordK1V2.getValue(),
binaryrow(false, "Tom"),
leftRecordK2V1.getValue()),
binaryrow("Sam"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow("Tom"),
newHashMap(
binaryrow("Tom", true),
rightRecordK1V3.getValue(),
binaryrow("Tom", false),
rightRecordK2V1.getValue()),
binaryrow("Jim"),
Collections.emptyMap());
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 5, 3, 5, 3);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(5);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(5);
}
}
@TestTemplate
void testCheckpointAndRestore() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
// block the async function
MyAsyncFunction.block();
// in flight
StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
testHarness.processElement1(leftRecord1);
// blocked
StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
testHarness.processElement2(rightRecord1);
// blocked
StreamRecord<RowData> leftRecord2 = insertRecord(200, true, "jklk1");
testHarness.processElement1(leftRecord2);
// in flight
StreamRecord<RowData> rightRecord2 = insertRecord("unknown", 500, false);
testHarness.processElement2(rightRecord2);
TableAsyncExecutionController<RowData, RowData, RowData> aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(2);
assertThat(aec.getInFlightSize()).isEqualTo(2);
assertThat(aec.getFinishSize()).isEqualTo(0);
RecordsBuffer<AecRecord<RowData, RowData>, RowData> recordsBuffer = aec.getRecordsBuffer();
assertThat(recordsBuffer.getActiveBuffer().size()).isEqualTo(2);
assertThat(recordsBuffer.getBlockingBuffer().size()).isEqualTo(1);
// checkpointing
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
// release async function to avoid timeout when closing
MyAsyncFunction.release();
testHarness.close();
MyAsyncFunction.leftInvokeCount.set(0);
MyAsyncFunction.rightInvokeCount.set(0);
MyAsyncFunction.block();
// restoring
testHarness = createDeltaJoinOperatorTestHarness(testSpec);
testHarness.setup();
StreamingDeltaJoinOperator operator = unwrapOperator(testHarness);
operator.setAsyncExecutionController(
new MyAsyncExecutionControllerDelegate(
testSpec, operator.getAsyncExecutionController()));
latestException = Optional.empty();
testHarness.initializeState(snapshot);
testHarness.open();
aec = unwrapAEC(testHarness);
assertThat(aec.getBlockingSize()).isEqualTo(2);
assertThat(aec.getInFlightSize()).isEqualTo(2);
assertThat(aec.getFinishSize()).isEqualTo(0);
recordsBuffer = aec.getRecordsBuffer();
assertThat(recordsBuffer.getActiveBuffer().size()).isEqualTo(2);
assertThat(recordsBuffer.getBlockingBuffer().size()).isEqualTo(1);
MyAsyncFunction.release();
waitAllDataProcessed();
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
assertThat(aec.getBlockingSize()).isEqualTo(0);
assertThat(aec.getInFlightSize()).isEqualTo(0);
assertThat(aec.getFinishSize()).isEqualTo(0);
assertThat(recordsBuffer.getActiveBuffer()).isEmpty();
assertThat(recordsBuffer.getBlockingBuffer()).isEmpty();
assertThat(recordsBuffer.getFinishedBuffer()).isEmpty();
DeltaJoinCache cache = unwrapCache(testHarness);
if (enableCache) {
RowType leftRowType = testSpec.getLeftInputRowType();
RowType rightRowType = testSpec.getRightInputRowType();
Map<RowData, Map<RowData, Object>> expectedLeftCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(leftRecord1.getValue(), leftRowType),
toBinary(leftRecord1.getValue(), leftRowType),
toBinary(leftRecord2.getValue(), leftRowType),
toBinary(leftRecord2.getValue(), leftRowType)),
binaryrow(false, "unknown"),
Collections.emptyMap());
Map<RowData, Map<RowData, Object>> expectedRightCacheData =
newHashMap(
binaryrow(true, "jklk1"),
newHashMap(
toBinary(rightRecord1.getValue(), rightRowType),
toBinary(rightRecord1.getValue(), rightRowType)));
verifyCacheData(cache, expectedLeftCacheData, expectedRightCacheData, 2, 0, 2, 1);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(1);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
} else {
verifyCacheData(cache, Collections.emptyMap(), Collections.emptyMap(), 0, 0, 0, 0);
assertThat(MyAsyncFunction.leftInvokeCount.get()).isEqualTo(2);
assertThat(MyAsyncFunction.rightInvokeCount.get()).isEqualTo(2);
}
}
@TestTemplate
void testClearLegacyStateWhenCheckpointing() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
// block the async function
MyAsyncFunction.block();
// in flight
StreamRecord<RowData> leftRecord1 = insertRecord(100, true, "jklk1");
testHarness.processElement1(leftRecord1);
// blocked
StreamRecord<RowData> rightRecord1 = insertRecord("jklk1", 300, true);
testHarness.processElement2(rightRecord1);
// blocked
StreamRecord<RowData> leftRecord2 = insertRecord(200, true, "jklk1");
testHarness.processElement1(leftRecord2);
// in flight
StreamRecord<RowData> rightRecord2 = insertRecord("unknown", 500, false);
testHarness.processElement2(rightRecord2);
// checkpointing
testHarness.snapshot(0L, 0L);
assertThat(testHarness.numKeyedStateEntries()).isEqualTo(2);
MyAsyncFunction.release();
waitAllDataProcessed();
MyAsyncFunction.block();
StreamRecord<RowData> leftRecord3 = insertRecord(700, true, "jklk1");
testHarness.processElement1(leftRecord3);
testHarness.snapshot(1L, 0L);
assertThat(testHarness.numKeyedStateEntries()).isEqualTo(1);
MyAsyncFunction.release();
waitAllDataProcessed();
testHarness.snapshot(2L, 0L);
assertThat(testHarness.numKeyedStateEntries()).isEqualTo(0);
final ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(insertRecord(100, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(200, true, "jklk1", "jklk1", 300, true));
expectedOutput.add(insertRecord(700, true, "jklk1", "jklk1", 300, true));
assertor.assertOutputEqualsSorted(
"result mismatch", expectedOutput, testHarness.getOutput());
}
@TestTemplate
void testMeetExceptionWhenLookup() throws Exception {
LogLogTableJoinTestSpec testSpec = LogLogTableJoinTestSpec.WITHOUT_FILTER_ON_TABLE;
initTestHarness(testSpec);
initAssertor(testSpec);
Throwable expectedException = new IllegalStateException("Mock to fail");
MyAsyncFunction.setExpectedThrownException(expectedException);
StreamRecord<RowData> record = insertRecord(100, true, "jklk1");
testHarness.processElement1(record);
// IllegalStateException(Failed to wait all data processed)
// +- Exception(Could not complete the stream element ...)
// +- RuntimeException(Failed to lookup table)
// +- Actual Exception
assertThatThrownBy(this::waitAllDataProcessed)
.cause()
.cause()
.cause()
.isEqualTo(expectedException);
}
private void initTestHarness(AbstractTestSpec testSpec) throws Exception {
testHarness = createDeltaJoinOperatorTestHarness(testSpec);
testHarness.setup();
testHarness.open();
StreamingDeltaJoinOperator operator = unwrapOperator(testHarness);
// set external failure cause consumer to prevent hang
testHarness
.getEnvironment()
.setExternalFailureCauseConsumer(
error -> {
latestException = Optional.of(error);
// DO NOT throw exception up again to avoid hang
});
operator.setAsyncExecutionController(
new MyAsyncExecutionControllerDelegate(
testSpec, operator.getAsyncExecutionController()));
}
private void initAssertor(AbstractTestSpec testSpec) {
RowType outputRowType = testSpec.getOutputRowType();
assertor =
new RowDataHarnessAssertor(
outputRowType.getChildren().toArray(new LogicalType[0]),
// sort the result by the output upsert key
(o1, o2) -> {
for (int keyIndex : testSpec.getOutputFieldIndices()) {
LogicalType type = outputRowType.getChildren().get(keyIndex);
RowData.FieldGetter getter =
RowData.createFieldGetter(type, keyIndex);
int compareResult =
Objects.requireNonNull(getter.getFieldOrNull(o1))
.toString()
.compareTo(
Objects.requireNonNull(
getter.getFieldOrNull(o2))
.toString());
if (compareResult != 0) {
return compareResult;
}
}
return o1.toString().compareTo(o2.toString());
});
}
private void verifyCacheData(
DeltaJoinCache actualCache,
Map<RowData, Map<RowData, Object>> expectedLeftCacheData,
Map<RowData, Map<RowData, Object>> expectedRightCacheData,
long expectedLeftCacheRequestCount,
long expectedLeftCacheHitCount,
long expectedRightCacheRequestCount,
long expectedRightCacheHitCount) {
// assert left cache
verifyCacheData(
actualCache,
expectedLeftCacheData,
expectedLeftCacheRequestCount,
expectedLeftCacheHitCount,
true);
// assert right cache
verifyCacheData(
actualCache,
expectedRightCacheData,
expectedRightCacheRequestCount,
expectedRightCacheHitCount,
false);
}
private void verifyCacheData(
DeltaJoinCache actualCache,
Map<RowData, Map<RowData, Object>> expectedCacheData,
long expectedCacheRequestCount,
long expectedCacheHitCount,
boolean testLeftCache) {
String errorPrefix = testLeftCache ? "left cache " : "right cache ";
Map<RowData, LinkedHashMap<RowData, Object>> actualCacheData =
testLeftCache
? actualCache.getLeftCache().asMap()
: actualCache.getRightCache().asMap();
assertThat(actualCacheData).as(errorPrefix + "data mismatch").isEqualTo(expectedCacheData);
long actualCacheSize =
testLeftCache
? actualCache.getLeftCache().size()
: actualCache.getRightCache().size();
assertThat(actualCacheSize)
.as(errorPrefix + "size mismatch")
.isEqualTo(expectedCacheData.size());
long actualTotalSize =
testLeftCache
? actualCache.getLeftTotalSize().get()
: actualCache.getRightTotalSize().get();
assertThat(actualTotalSize)
.as(errorPrefix + "total size mismatch")
.isEqualTo(expectedCacheData.values().stream().mapToInt(Map::size).sum());
long actualRequestCount =
testLeftCache
? actualCache.getLeftRequestCount().get()
: actualCache.getRightRequestCount().get();
assertThat(actualRequestCount)
.as(errorPrefix + "request count mismatch")
.isEqualTo(expectedCacheRequestCount);
long actualHitCount =
testLeftCache
? actualCache.getLeftHitCount().get()
: actualCache.getRightHitCount().get();
assertThat(actualHitCount)
.as(errorPrefix + "hit count mismatch")
.isEqualTo(expectedCacheHitCount);
}
private void waitAllDataProcessed() throws Exception {
testHarness.endAllInputs();
if (latestException.isPresent()) {
throw new IllegalStateException(
"Failed to wait all data processed", latestException.get());
}
}
private KeyedTwoInputStreamOperatorTestHarness<RowData, RowData, RowData, RowData>
createDeltaJoinOperatorTestHarness(AbstractTestSpec testSpec) throws Exception {
TaskMailbox mailbox = new TaskMailboxImpl();
MailboxProcessor mailboxProcessor =
new MailboxProcessor(controller -> {}, mailbox, StreamTaskActionExecutor.IMMEDIATE);
DataStructureConverter<RowData, Object> leftFetcherConverter =
(DataStructureConverter)
DataStructureConverters.getConverter(
testSpec.getLeftTypeInfo().getDataType());
AsyncDeltaJoinRunner leftAsyncFunction =
new AsyncDeltaJoinRunner(
new GeneratedFunction<>("", "", new Object[0]) {
@Override
public MyAsyncFunction newInstance(ClassLoader classLoader) {
return new MyAsyncFunction(testSpec, false);
}
},
leftFetcherConverter,
new MockGeneratedFlatMapFunction(
testSpec.getFilterOnLeftTable().orElse(null)),
new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
testSpec.getLeftTypeInfo().toRowSerializer(),
testSpec.getLeftJoinKeySelector(),
testSpec.getLeftUpsertKeySelector(),
testSpec.getRightJoinKeySelector(),
testSpec.getRightUpsertKeySelector(),
AEC_CAPACITY,
false,
enableCache);
DataStructureConverter<RowData, Object> rightFetcherConverter =
(DataStructureConverter)
DataStructureConverters.getConverter(
testSpec.getRightTypeInfo().getDataType());
AsyncDeltaJoinRunner rightAsyncFunction =
new AsyncDeltaJoinRunner(
new GeneratedFunction<>("", "", new Object[0]) {
@Override
public MyAsyncFunction newInstance(ClassLoader classLoader) {
return new MyAsyncFunction(testSpec, true);
}
},
rightFetcherConverter,
new MockGeneratedFlatMapFunction(
testSpec.getFilterOnRightTable().orElse(null)),
new GeneratedResultFutureWrapper<>(new TestingFetcherResultFuture()),
testSpec.getRightTypeInfo().toRowSerializer(),
testSpec.getLeftJoinKeySelector(),
testSpec.getLeftUpsertKeySelector(),
testSpec.getRightJoinKeySelector(),
testSpec.getRightUpsertKeySelector(),
AEC_CAPACITY,
true,
enableCache);
InternalTypeInfo<RowData> joinKeyTypeInfo =
testSpec.getLeftJoinKeySelector().getProducedType();
StreamingDeltaJoinOperator operator =
new StreamingDeltaJoinOperator(
rightAsyncFunction,
leftAsyncFunction,
testSpec.getLeftJoinKeySelector(),
testSpec.getRightJoinKeySelector(),
-1L,
AEC_CAPACITY,
new TestProcessingTimeService(),
new MailboxExecutorImpl(
mailbox, 0, StreamTaskActionExecutor.IMMEDIATE, mailboxProcessor),
CACHE_SIZE,
CACHE_SIZE,
testSpec.getLeftInputRowType(),
testSpec.getRightInputRowType());
return new KeyedTwoInputStreamOperatorTestHarness<>(
operator,
testSpec.getLeftJoinKeySelector(),
testSpec.getRightJoinKeySelector(),
joinKeyTypeInfo,
1,
1,
0,
testSpec.getLeftTypeInfo().toSerializer(),
testSpec.getRightTypeInfo().toSerializer());
}
private TableAsyncExecutionController<RowData, RowData, RowData> unwrapAEC(
KeyedTwoInputStreamOperatorTestHarness<RowData, RowData, RowData, RowData>
testHarness) {
return unwrapOperator(testHarness).getAsyncExecutionController();
}
private StreamingDeltaJoinOperator unwrapOperator(
KeyedTwoInputStreamOperatorTestHarness<RowData, RowData, RowData, RowData>
testHarness) {
return (StreamingDeltaJoinOperator) testHarness.getOperator();
}
private DeltaJoinCache unwrapCache(
KeyedTwoInputStreamOperatorTestHarness<RowData, RowData, RowData, RowData>
testHarness) {
DeltaJoinCache cacheInLeftRunner =
unwrapOperator(testHarness).getLeftTriggeredUserFunction().getCache();
DeltaJoinCache cacheInRightRunner =
unwrapOperator(testHarness).getRightTriggeredUserFunction().getCache();
// the object ref must be the same
assertThat(cacheInLeftRunner == cacheInRightRunner).isTrue();
return cacheInLeftRunner;
}
private void insertLeftTable(AbstractTestSpec testSpec, StreamRecord<RowData> record) {
insertTableData(testSpec, record, true);
}
private void insertRightTable(AbstractTestSpec testSpec, StreamRecord<RowData> record) {
insertTableData(testSpec, record, false);
}
private static void insertTableData(
AbstractTestSpec testSpec, StreamRecord<RowData> record, boolean insertLeftTable) {
RowData rowData = record.getValue();
try {
if (insertLeftTable) {
synchronized (leftTableCurrentData) {
RowData upsertKey = testSpec.getLeftUpsertKeySelector().getKey(rowData);
leftTableCurrentData.put(upsertKey, rowData);
}
} else {
synchronized (rightTableCurrentData) {
RowData upsertKey = testSpec.getRightUpsertKeySelector().getKey(rowData);
rightTableCurrentData.put(upsertKey, rowData);
}
}
} catch (Exception e) {
throw new IllegalStateException("Failed to insert table data", e);
}
}
private <T> Map<RowData, T> newHashMap(Object... data) {
Preconditions.checkArgument(data.length % 2 == 0);
Map<RowData, T> map = new HashMap<>();
for (int i = 0; i < data.length; i = i + 2) {
Preconditions.checkArgument(
data[i] instanceof RowData, "The key of the map must be RowData");
RowData key = (RowData) data[i];
Preconditions.checkArgument(!map.containsKey(key), "Duplicate key");
map.put(key, (T) data[i + 1]);
}
return map;
}
private RowData toBinary(RowData row, RowType rowType) {
int size = row.getArity();
Object[] fields = new Object[size];
for (int i = 0; i < size; i++) {
fields[i] = RowData.createFieldGetter(rowType.getTypeAt(i), i).getFieldOrNull(row);
}
return binaryrow(fields);
}
/** An async function used for test. */
public static | StreamingDeltaJoinOperatorTest |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/period/PeriodAssert_hasMonths_Test.java | {
"start": 1088,
"end": 1868
} | class ____ {
@Test
void should_pass_if_period_has_expected_Months() {
// GIVEN
Period period = Period.ofMonths(10);
// WHEN/THEN
then(period).hasMonths(10);
}
@Test
void should_fail_when_period_is_null() {
// GIVEN
Period period = null;
// WHEN
final AssertionError code = expectAssertionError(() -> assertThat(period).hasMonths(5));
// THEN
then(code).hasMessage(actualIsNull());
}
@Test
void should_fail_if_period_does_not_have_expected_Months() {
// GIVEN
Period period = Period.ofMonths(10);
// WHEN
final AssertionError code = expectAssertionError(() -> assertThat(period).hasMonths(15));
// THEN
then(code).hasMessage(shouldHaveMonths(period, 10, 15).create());
}
}
| PeriodAssert_hasMonths_Test |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/plain/SourceTypeIsTypeVarMapper.java | {
"start": 327,
"end": 642
} | interface ____ {
SourceTypeIsTypeVarMapper INSTANCE = Mappers.getMapper( SourceTypeIsTypeVarMapper.class );
Target sourceToTarget(Source source);
@SuppressWarnings("unchecked")
default <T> GenericWrapper<T> map( T in ) {
return new GenericWrapper<>( in );
}
| SourceTypeIsTypeVarMapper |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/bindinggraphvalidation/DuplicateBindingsValidator.java | {
"start": 3101,
"end": 14533
} | class ____ extends ValidationBindingGraphPlugin {
private static final Comparator<Binding> BY_LENGTH_OF_COMPONENT_PATH =
comparing(binding -> binding.componentPath().components().size());
private final DeclarationFormatter declarationFormatter;
private final CompilerOptions compilerOptions;
@Inject
DuplicateBindingsValidator(
DeclarationFormatter declarationFormatter, CompilerOptions compilerOptions) {
this.declarationFormatter = declarationFormatter;
this.compilerOptions = compilerOptions;
}
@Override
public String pluginName() {
return "Dagger/DuplicateBindings";
}
@Override
public void visitGraph(BindingGraph bindingGraph, DiagnosticReporter diagnosticReporter) {
// If two unrelated subcomponents have the same duplicate bindings only because they install the
// same two modules, then fixing the error in one subcomponent will uncover the second
// subcomponent to fix.
// TODO(ronshapiro): Explore ways to address such underreporting without overreporting.
Set<ImmutableSet<BindingWithoutComponent>> reportedDuplicateBindingSets = new HashSet<>();
duplicateBindingSets(bindingGraph)
.forEach(
duplicateBindings -> {
// Only report each set of duplicate bindings once, ignoring the installed component.
if (reportedDuplicateBindingSets.add(duplicateBindings.keySet())) {
reportErrors(duplicateBindings, bindingGraph, diagnosticReporter);
}
});
}
/**
* Returns sets of duplicate bindings. Bindings are duplicates if they bind the same key and are
* visible from the same component. Two bindings that differ only in the component that owns them
* are not considered to be duplicates, because that means the same binding was "copied" down to a
* descendant component because it depends on local multibindings or optional bindings. Hence each
* "set" is represented as a multimap from binding element (ignoring component path) to binding.
*/
private ImmutableSet<ImmutableSetMultimap<BindingWithoutComponent, Binding>> duplicateBindingSets(
BindingGraph bindingGraph) {
return groupBindingsByKey(bindingGraph).stream()
.flatMap(bindings -> mutuallyVisibleSubsets(bindings).stream())
.map(BindingWithoutComponent::index)
.filter(duplicates -> duplicates.keySet().size() > 1)
.collect(toImmutableSet());
}
private ImmutableSet<ImmutableSet<Binding>> groupBindingsByKey(BindingGraph bindingGraph) {
return valueSetsForEachKey(
bindingGraph.bindings().stream()
.filter(binding -> !binding.kind().equals(MEMBERS_INJECTION))
.collect(
toImmutableSetMultimap(
binding ->
// If the "ignoreProvisionKeyWildcards" flag is enabled then ignore the
// variance in the key types here so that Foo<Bar> and Foo<? extends Bar>
// get grouped into the same set (i.e. as duplicates).
KeyWithTypeEquivalence.forKey(
binding.key(),
compilerOptions.ignoreProvisionKeyWildcards()
? XTypes.equivalenceIgnoringVariance()
: XTypes.equivalence()),
binding -> binding)));
}
/**
* Returns the subsets of the input set that contain bindings that are all visible from the same
* component. A binding is visible from its component and all its descendants.
*/
private static ImmutableSet<ImmutableSet<Binding>> mutuallyVisibleSubsets(
Set<Binding> duplicateBindings) {
ImmutableListMultimap<ComponentPath, Binding> bindingsByComponentPath =
Multimaps.index(duplicateBindings, Binding::componentPath);
ImmutableSetMultimap.Builder<ComponentPath, Binding> mutuallyVisibleBindings =
ImmutableSetMultimap.builder();
bindingsByComponentPath
.asMap()
.forEach(
(componentPath, bindings) -> {
mutuallyVisibleBindings.putAll(componentPath, bindings);
for (ComponentPath ancestor = componentPath; !ancestor.atRoot(); ) {
ancestor = ancestor.parent();
ImmutableList<Binding> bindingsInAncestor = bindingsByComponentPath.get(ancestor);
mutuallyVisibleBindings.putAll(componentPath, bindingsInAncestor);
}
});
return valueSetsForEachKey(mutuallyVisibleBindings.build());
}
private void reportErrors(
ImmutableSetMultimap<BindingWithoutComponent, Binding> duplicateBindings,
BindingGraph bindingGraph,
DiagnosticReporter diagnosticReporter) {
if (explicitBindingConfictsWithInject(duplicateBindings.keySet())) {
compilerOptions
.explicitBindingConflictsWithInjectValidationType()
.diagnosticKind()
.ifPresent(
diagnosticKind ->
reportExplicitBindingConflictsWithInject(
duplicateBindings.values(),
diagnosticReporter,
diagnosticKind,
bindingGraph.rootComponentNode()));
return;
}
reportDuplicateBindings(duplicateBindings.values(), bindingGraph, diagnosticReporter);
}
/**
* Returns {@code true} if the bindings contain one {@code @Inject} binding and one that isn't.
*/
private static boolean explicitBindingConfictsWithInject(
ImmutableSet<BindingWithoutComponent> duplicateBindings) {
ImmutableMultiset<BindingKind> bindingKinds =
Multimaps.index(duplicateBindings, BindingWithoutComponent::bindingKind).keys();
return bindingKinds.count(INJECTION) == 1 && bindingKinds.size() == 2;
}
private void reportExplicitBindingConflictsWithInject(
ImmutableCollection<Binding> duplicateBindings,
DiagnosticReporter diagnosticReporter,
Diagnostic.Kind diagnosticKind,
ComponentNode rootComponent) {
Binding injectBinding = rootmostBindingWithKind(k -> k.equals(INJECTION), duplicateBindings);
Binding explicitBinding = rootmostBindingWithKind(k -> !k.equals(INJECTION), duplicateBindings);
StringBuilder message =
new StringBuilder()
.append(explicitBinding.key())
.append(" is bound multiple times:")
.append(formatWithComponentPath(injectBinding))
.append(formatWithComponentPath(explicitBinding))
.append(
"\nThis condition was never validated before, and will soon be an error. "
+ "See https://dagger.dev/conflicting-inject.");
if (compilerOptions.experimentalDaggerErrorMessages()) {
diagnosticReporter.reportComponent(diagnosticKind, rootComponent, message.toString());
} else {
diagnosticReporter.reportBinding(diagnosticKind, explicitBinding, message.toString());
}
}
private String formatWithComponentPath(Binding binding) {
return String.format(
"\n%s%s [%s]",
Formatter.INDENT,
declarationFormatter.format(((BindingNode) binding).delegate()),
binding.componentPath());
}
private void reportDuplicateBindings(
ImmutableCollection<Binding> duplicateBindings,
BindingGraph graph,
DiagnosticReporter diagnosticReporter) {
StringBuilder message = new StringBuilder();
Binding oneBinding = duplicateBindings.asList().get(0);
ImmutableSet<Binding> multibindings =
duplicateBindings.stream()
.filter(binding -> binding.kind().isMultibinding())
.collect(toImmutableSet());
if (multibindings.isEmpty()) {
message.append(oneBinding.key()).append(" is bound multiple times:");
formatDeclarations(message, 2, declarations(graph, duplicateBindings));
} else {
Binding oneMultibinding = multibindings.asList().get(0);
message.append(oneMultibinding.key()).append(" has incompatible bindings or declarations:\n");
message
.append(INDENT)
.append(multibindingTypeString(oneMultibinding))
.append(" bindings and declarations:");
formatDeclarations(message, 2, declarations(graph, multibindings));
ImmutableSet<Declaration> uniqueBindingDeclarations =
duplicateBindings.stream()
.filter(binding -> !binding.kind().isMultibinding())
.flatMap(binding -> declarations(graph, binding).stream())
.filter(declaration -> !(declaration instanceof MultibindingDeclaration))
.collect(toImmutableSet());
if (!uniqueBindingDeclarations.isEmpty()) {
message.append('\n').append(INDENT).append("Unique bindings and declarations:");
formatDeclarations(message, 2, uniqueBindingDeclarations);
}
}
if (compilerOptions.experimentalDaggerErrorMessages()) {
message.append(String.format("\n%sin component: [%s]", INDENT, oneBinding.componentPath()));
diagnosticReporter.reportComponent(ERROR, graph.rootComponentNode(), message.toString());
} else {
diagnosticReporter.reportBinding(ERROR, oneBinding, message.toString());
}
}
private void formatDeclarations(
StringBuilder builder,
int indentLevel,
Iterable<? extends Declaration> bindingDeclarations) {
declarationFormatter.formatIndentedList(
builder, ImmutableList.copyOf(bindingDeclarations), indentLevel);
}
private ImmutableSet<Declaration> declarations(
BindingGraph graph, ImmutableCollection<Binding> bindings) {
return bindings.stream()
.flatMap(binding -> declarations(graph, binding).stream())
.distinct()
.sorted(Declaration.COMPARATOR)
.collect(toImmutableSet());
}
private ImmutableSet<Declaration> declarations(BindingGraph graph, Binding binding) {
ImmutableSet.Builder<Declaration> declarations = ImmutableSet.builder();
BindingNode bindingNode = (BindingNode) binding;
bindingNode.associatedDeclarations().forEach(declarations::add);
if (declarationFormatter.canFormat(bindingNode.delegate())) {
declarations.add(bindingNode.delegate());
} else {
graph.requestedBindings(binding).stream()
.flatMap(requestedBinding -> declarations(graph, requestedBinding).stream())
.forEach(declarations::add);
}
return declarations.build();
}
private String multibindingTypeString(Binding multibinding) {
switch (multibinding.kind()) {
case MULTIBOUND_MAP:
return "Map";
case MULTIBOUND_SET:
return "Set";
default:
throw new AssertionError(multibinding);
}
}
private static <E> ImmutableSet<ImmutableSet<E>> valueSetsForEachKey(Multimap<?, E> multimap) {
return multimap.asMap().values().stream().map(ImmutableSet::copyOf).collect(toImmutableSet());
}
/** Returns the binding of the given kind that is closest to the root component. */
private static Binding rootmostBindingWithKind(
Predicate<BindingKind> bindingKindPredicate, ImmutableCollection<Binding> bindings) {
return bindings.stream()
.filter(b -> bindingKindPredicate.test(b.kind()))
.min(BY_LENGTH_OF_COMPONENT_PATH)
.get();
}
/** The identifying information about a binding, excluding its {@link Binding#componentPath()}. */
@AutoValue
abstract static | DuplicateBindingsValidator |
java | apache__maven | its/core-it-suite/src/test/resources/mng-7836-alternative-pom-syntax/maven-hocon-extension/src/main/java/org/apache/maven/hocon/HoconModelReader.java | {
"start": 1439,
"end": 2377
} | class ____ implements ModelParser {
@Override
public Optional<Source> locate(Path path) {
Path pom = Files.isDirectory(path) ? path.resolve("pom.hocon") : path;
return Files.isRegularFile(pom) ? Optional.of(new PathSource(pom)) : Optional.empty();
}
@Override
public Model parse(Source source, Map<String, ?> map) throws ModelParserException {
Config config;
if (source.getPath() != null) {
config = ConfigFactory.parseFile(source.getPath().toFile());
} else {
try (InputStream input = source.openStream()) {
config = ConfigFactory.parseReader(new InputStreamReader(input, StandardCharsets.UTF_8));
} catch (IOException e) {
throw new ModelParserException("Unable to parse: " + source.getLocation(), e);
}
}
return new HoconReader().parseModel(config.root());
}
}
| HoconModelReader |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/Converter.java | {
"start": 2752,
"end": 3431
} | class ____ found errors. But in those cases its okay as the component is opted-out.
* <p/>
* Important this configuration must be set on the class-level, not on the method.
*/
boolean ignoreOnLoadError() default false;
/**
* Whether to let the Camel compiler plugin to generate java source code for fast loading of the type converters.
* <p/>
* Important this configuration must be set on the class-level, not on the method.
*/
boolean generateLoader() default false;
/**
* Whether to let the Camel compiler plugin to generate java source code for fast loading of the type converters,
* bulked together into a single | not |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/JavaKnownImportsDownloader.java | {
"start": 1345,
"end": 3643
} | class ____ implements CompilePreProcessor {
private static final Pattern IMPORT_PATTERN = Pattern.compile(
"^import\\s+([a-zA-Z][.\\w]*)\\s*;", Pattern.MULTILINE);
private final CamelCatalog catalog = new DefaultCamelCatalog();
private final DependencyDownloader downloader;
private final KnownDependenciesResolver knownDependenciesResolver;
public JavaKnownImportsDownloader(CamelContext camelContext, KnownDependenciesResolver knownDependenciesResolver) {
this.downloader = camelContext.hasService(DependencyDownloader.class);
this.knownDependenciesResolver = knownDependenciesResolver;
camelContext.getRegistry().bind("JavaJoorKnownImportsDownloader", this);
}
@Override
public void preCompile(CamelContext camelContext, String name, String code) throws Exception {
List<String> imports = determineImports(code);
for (String imp : imports) {
// attempt known dependency resolver first
MavenGav gav = knownDependenciesResolver.mavenGavForClass(imp);
if (gav != null) {
downloadLoader(gav.getGroupId(), gav.getArtifactId(), gav.getVersion());
} else {
// is this a known bean then we can determine the dependency
for (String n : catalog.findBeansNames()) {
PojoBeanModel m = catalog.pojoBeanModel(n);
if (m != null && imp.equals(m.getJavaType())) {
downloadLoader(m.getGroupId(), m.getArtifactId(), m.getVersion());
break;
}
}
}
}
}
private void downloadLoader(String groupId, String artifactId, String version) {
if (!downloader.alreadyOnClasspath(groupId, artifactId, version)) {
downloader.downloadDependency(groupId, artifactId, version);
}
}
private static List<String> determineImports(String content) {
List<String> answer = new ArrayList<>();
final Matcher matcher = IMPORT_PATTERN.matcher(content);
while (matcher.find()) {
String imp = matcher.group(1);
imp = imp.trim();
answer.add(imp);
}
return answer;
}
}
| JavaKnownImportsDownloader |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/double2darray/Double2DArrayAssert_hasDimensions_Test.java | {
"start": 928,
"end": 1279
} | class ____ extends Double2DArrayAssertBaseTest {
@Override
protected Double2DArrayAssert invoke_api_method() {
return assertions.hasDimensions(1, 2);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasDimensions(getInfo(assertions), getActual(assertions), 1, 2);
}
}
| Double2DArrayAssert_hasDimensions_Test |
java | spring-projects__spring-framework | spring-orm/src/test/java/org/springframework/orm/jpa/hibernate/beans/NoDefinitionInSpringContextTestBean.java | {
"start": 687,
"end": 945
} | class ____ extends TestBean {
@SuppressWarnings("unused")
private NoDefinitionInSpringContextTestBean() {
throw new AssertionError("Unexpected call to the default constructor. " +
"Is Spring trying to instantiate this | NoDefinitionInSpringContextTestBean |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarnings.java | {
"start": 1177,
"end": 3135
} | class ____ extends FeatureInjector implements RestTestTransformByParentObject {
private static JsonNodeFactory jsonNodeFactory = JsonNodeFactory.withExactBigDecimals(false);
private final List<String> warnings;
private final String testName;
private final boolean isRegex;
/**
* @param warnings The warnings to inject
* @param testName The testName to inject
*/
public InjectWarnings(List<String> warnings, String testName) {
this(false, warnings, testName);
}
/**
* @param isRegex true is should inject the regex variant of warning
* @param warnings The warnings to inject
* @param testName The testName to inject
*/
public InjectWarnings(boolean isRegex, List<String> warnings, String testName) {
this.isRegex = isRegex;
this.warnings = warnings;
this.testName = Objects.requireNonNull(testName, "inject warnings is only supported for named tests");
}
@Override
public void transformTest(ObjectNode doNodeParent) {
ObjectNode doNodeValue = (ObjectNode) doNodeParent.get(getKeyToFind());
ArrayNode arrayWarnings = (ArrayNode) doNodeValue.get(getSkipFeatureName());
if (arrayWarnings == null) {
arrayWarnings = new ArrayNode(jsonNodeFactory);
doNodeValue.set(getSkipFeatureName(), arrayWarnings);
}
warnings.forEach(arrayWarnings::add);
}
@Override
@Internal
public String getKeyToFind() {
return "do";
}
@Override
@Input
public String getSkipFeatureName() {
return isRegex ? "warnings_regex" : "warnings";
}
@Override
public boolean shouldApply(RestTestContext testContext) {
return testName.equals(testContext.testName());
}
@Input
public List<String> getWarnings() {
return warnings;
}
@Input
public String getTestName() {
return testName;
}
}
| InjectWarnings |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/TestDescriptorTests.java | {
"start": 581,
"end": 1008
} | class ____ {
@Test
void isRootWithoutParent() {
TestDescriptor root = new TestDescriptorStub(UniqueId.root("root", "id"), "id");
assertTrue(root.isRoot());
}
@Test
void isRootWithParent() {
TestDescriptor child = new TestDescriptorStub(UniqueId.root("child", "child"), "child");
child.setParent(new TestDescriptorStub(UniqueId.root("root", "root"), "root"));
assertFalse(child.isRoot());
}
}
| TestDescriptorTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java | {
"start": 66899,
"end": 67720
} | class ____
implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
public FSUnSetErasureCodingPolicy(String path) {
this.path = new Path(path);
}
@Override
public Void execute(FileSystem fs) throws IOException {
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem) fs;
dfs.unsetErasureCodingPolicy(path);
} else {
throw new UnsupportedOperationException("unsetErasureCodingPolicy is "
+ "not supported for HttpFs on " + fs.getClass()
+ ". Please check your fs.defaultFS configuration");
}
return null;
}
}
/**
* Executor that performs a satisfyStoragePolicy operation.
*/
@InterfaceAudience.Private
public static | FSUnSetErasureCodingPolicy |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/HttpTypeConverterRegistrar.java | {
"start": 1085,
"end": 2388
} | class ____ implements TypeConverterRegistrar {
@Override
public void register(MutableConversionService conversionService) {
conversionService.addConverter(CharSequence.class, MediaType.class, (object, targetType, context) -> {
if (StringUtils.isEmpty(object)) {
return Optional.empty();
} else {
try {
return Optional.of(MediaType.of(object.toString()));
} catch (IllegalArgumentException e) {
context.reject(e);
return Optional.empty();
}
}
});
Map<CharSequence, Optional<SameSite>> conversions = new ConcurrentHashMap<>();
conversionService.addConverter(CharSequence.class, SameSite.class, (object, targetType, context) -> {
if (object == null) {
return Optional.empty();
}
return conversions.computeIfAbsent(object, charSequence -> {
try {
return Optional.of(SameSite.valueOf(StringUtils.capitalize(object.toString().toLowerCase())));
} catch (IllegalArgumentException e) {
return Optional.empty();
}
});
});
}
}
| HttpTypeConverterRegistrar |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/SimpleResourceTest.java | {
"start": 234,
"end": 874
} | class ____ {
private Anything object;
private SimpleResource<?> resource;
@Before
public void setUp() {
object = new Anything();
resource = new SimpleResource<>(object);
}
@Test
public void testReturnsGivenObject() {
assertEquals(object, resource.get());
}
@Test
public void testReturnsGivenObjectMultipleTimes() {
assertEquals(object, resource.get());
assertEquals(object, resource.get());
assertEquals(object, resource.get());
}
@Test(expected = NullPointerException.class)
public void testThrowsIfGivenNullData() {
new SimpleResource<>(null);
}
private static | SimpleResourceTest |
java | apache__camel | components/camel-thymeleaf/src/test/java/org/apache/camel/component/thymeleaf/ThymeleafConcurrentTest.java | {
"start": 1139,
"end": 2439
} | class ____ extends ThymeleafAbstractBaseTest {
@Test
public void testNoConcurrentProducers() throws Exception {
doSendMessages(1, 1);
}
@Test
public void testConcurrentProducers() throws Exception {
doSendMessages(10, 5);
}
private void doSendMessages(int files, int poolSize) throws Exception {
MockEndpoint mock = getMockEndpoint(MOCK_RESULT);
mock.expectedMessageCount(files);
mock.assertNoDuplicates(body());
mock.message(0).body().contains("Bye");
ExecutorService executor = Executors.newFixedThreadPool(poolSize);
for (int i = 0; i < files; i++) {
final int index = i;
executor.submit(() -> {
template.sendBody(DIRECT_START, "Hello " + index);
return null;
});
}
MockEndpoint.assertIsSatisfied(context);
executor.shutdownNow();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(DIRECT_START)
.to("thymeleaf:org/apache/camel/component/thymeleaf/concurrent.txt")
.to(MOCK_RESULT);
}
};
}
}
| ThymeleafConcurrentTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/RandomUtils.java | {
"start": 10779,
"end": 17289
} | class ____ be
* used as {@code RandomUtils.nextBytes(5);}.
* <p>
* This constructor is public to permit tools that require a JavaBean instance to operate.
* </p>
*
* @deprecated TODO Make private in 4.0.
*/
@Deprecated
public RandomUtils() {
this(SECURE_STRONG_SUPPLIER);
}
private RandomUtils(final Supplier<Random> random) {
this.random = random;
}
Random random() {
return random.get();
}
/**
* Generates a random boolean value.
*
* @return the random boolean.
* @since 3.16.0
*/
public boolean randomBoolean() {
return random().nextBoolean();
}
/**
* Generates an array of random bytes.
*
* @param count the size of the returned array.
* @return the random byte array.
* @throws IllegalArgumentException if {@code count} is negative
* @since 3.16.0
*/
public byte[] randomBytes(final int count) {
Validate.isTrue(count >= 0, "Count cannot be negative.");
final byte[] result = new byte[count];
random().nextBytes(result);
return result;
}
/**
* Generates a random double between 0 (inclusive) and Double.MAX_VALUE (exclusive).
*
* @return the random double.
* @see #randomDouble(double, double)
* @since 3.16.0
*/
public double randomDouble() {
return randomDouble(0, Double.MAX_VALUE);
}
/**
* Generates a random double within the specified range.
*
* @param startInclusive the smallest value that can be returned, must be non-negative.
* @param endExclusive the upper bound (not included).
* @throws IllegalArgumentException if {@code startInclusive > endExclusive} or if {@code startInclusive} is negative.
* @return the random double
* @since 3.16.0
*/
public double randomDouble(final double startInclusive, final double endExclusive) {
Validate.isTrue(endExclusive >= startInclusive, "Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endExclusive) {
return startInclusive;
}
return startInclusive + (endExclusive - startInclusive) * random().nextDouble();
}
/**
* Generates a random float between 0 (inclusive) and Float.MAX_VALUE (exclusive).
*
* @return the random float.
* @see #randomFloat(float, float)
* @since 3.16.0
*/
public float randomFloat() {
return randomFloat(0, Float.MAX_VALUE);
}
/**
* Generates a random float within the specified range.
*
* @param startInclusive the smallest value that can be returned, must be non-negative.
* @param endExclusive the upper bound (not included).
* @throws IllegalArgumentException if {@code startInclusive > endExclusive} or if {@code startInclusive} is negative.
* @return the random float.
*/
public float randomFloat(final float startInclusive, final float endExclusive) {
Validate.isTrue(endExclusive >= startInclusive, "Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endExclusive) {
return startInclusive;
}
return startInclusive + (endExclusive - startInclusive) * random().nextFloat();
}
/**
* Generates a random int between 0 (inclusive) and Integer.MAX_VALUE (exclusive).
*
* @return the random integer.
* @see #randomInt(int, int)
* @since 3.16.0
*/
public int randomInt() {
return randomInt(0, Integer.MAX_VALUE);
}
/**
* Generates a random integer within the specified range.
*
* @param startInclusive the smallest value that can be returned, must be non-negative.
* @param endExclusive the upper bound (not included).
* @throws IllegalArgumentException if {@code startInclusive > endExclusive} or if {@code startInclusive} is negative.
* @return the random integer.
* @since 3.16.0
*/
public int randomInt(final int startInclusive, final int endExclusive) {
Validate.isTrue(endExclusive >= startInclusive, "Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endExclusive) {
return startInclusive;
}
return startInclusive + random().nextInt(endExclusive - startInclusive);
}
/**
* Generates a random long between 0 (inclusive) and Long.MAX_VALUE (exclusive).
*
* @return the random long.
* @see #randomLong(long, long)
* @since 3.16.0
*/
public long randomLong() {
return randomLong(Long.MAX_VALUE);
}
/**
* Generates a {@code long} value between 0 (inclusive) and the specified value (exclusive).
*
* @param n Bound on the random number to be returned. Must be positive.
* @return a random {@code long} value between 0 (inclusive) and {@code n} (exclusive).
*/
private long randomLong(final long n) {
// Extracted from o.a.c.rng.core.BaseProvider.nextLong(long)
long bits;
long val;
do {
bits = random().nextLong() >>> 1;
val = bits % n;
} while (bits - val + n - 1 < 0);
return val;
}
/**
* Generates a random long within the specified range.
*
* @param startInclusive the smallest value that can be returned, must be non-negative.
* @param endExclusive the upper bound (not included).
* @throws IllegalArgumentException if {@code startInclusive > endExclusive} or if {@code startInclusive} is negative.
* @return the random long.
* @since 3.16.0
*/
public long randomLong(final long startInclusive, final long endExclusive) {
Validate.isTrue(endExclusive >= startInclusive, "Start value must be smaller or equal to end value.");
Validate.isTrue(startInclusive >= 0, "Both range values must be non-negative.");
if (startInclusive == endExclusive) {
return startInclusive;
}
return startInclusive + randomLong(endExclusive - startInclusive);
}
@Override
public String toString() {
return "RandomUtils [random=" + random() + "]";
}
}
| should |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/prefetch/S3ARemoteObjectReader.java | {
"start": 1620,
"end": 6043
} | class ____ implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(
S3ARemoteObjectReader.class);
/** We read from the underlying input stream in blocks of this size. */
private static final int READ_BUFFER_SIZE = 64 * 1024;
/** The S3 file to read. */
private final S3ARemoteObject remoteObject;
/** Set to true by close(). */
private volatile boolean closed;
private final S3AInputStreamStatistics streamStatistics;
/**
* Constructs an instance of {@link S3ARemoteObjectReader}.
*
* @param remoteObject The S3 file to read.
*
* @throws IllegalArgumentException if remoteObject is null.
*/
public S3ARemoteObjectReader(S3ARemoteObject remoteObject) {
Validate.checkNotNull(remoteObject, "remoteObject");
this.remoteObject = remoteObject;
this.streamStatistics = this.remoteObject.getStatistics();
}
/**
* Stars reading at {@code offset} and reads upto {@code size} bytes into {@code buffer}.
*
* @param buffer the buffer into which data is returned
* @param offset the absolute offset into the underlying file where reading starts.
* @param size the number of bytes to be read.
*
* @return number of bytes actually read.
* @throws IOException if there is an error reading from the file.
*
* @throws IllegalArgumentException if buffer is null.
* @throws IllegalArgumentException if offset is outside of the range [0, file size].
* @throws IllegalArgumentException if size is zero or negative.
*/
public int read(ByteBuffer buffer, long offset, int size) throws IOException {
Validate.checkNotNull(buffer, "buffer");
Validate.checkWithinRange(offset, "offset", 0, this.remoteObject.size());
Validate.checkPositiveInteger(size, "size");
if (this.closed) {
return -1;
}
int reqSize = (int) Math.min(size, this.remoteObject.size() - offset);
return readOneBlockWithRetries(buffer, offset, reqSize);
}
@Override
public void close() {
this.closed = true;
}
private int readOneBlockWithRetries(ByteBuffer buffer, long offset, int size)
throws IOException {
this.streamStatistics.readOperationStarted(offset, size);
Invoker invoker = this.remoteObject.getReadInvoker();
int invokerResponse =
invoker.retry("read", this.remoteObject.getPath(), true,
trackDurationOfOperation(streamStatistics,
STREAM_READ_REMOTE_BLOCK_READ, () -> {
try {
this.readOneBlock(buffer, offset, size);
} catch (EOFException e) {
// the base implementation swallows EOFs.
return -1;
} catch (SocketTimeoutException e) {
throw e;
} catch (IOException e) {
this.remoteObject.getStatistics().readException();
throw e;
}
return 0;
}));
int numBytesRead = buffer.position();
buffer.limit(numBytesRead);
this.remoteObject.getStatistics()
.readOperationCompleted(size, numBytesRead);
if (invokerResponse < 0) {
return invokerResponse;
} else {
return numBytesRead;
}
}
private void readOneBlock(ByteBuffer buffer, long offset, int size)
throws IOException {
int readSize = Math.min(size, buffer.remaining());
if (readSize == 0) {
return;
}
ResponseInputStream<GetObjectResponse> inputStream =
remoteObject.openForRead(offset, readSize);
int numRemainingBytes = readSize;
byte[] bytes = new byte[READ_BUFFER_SIZE];
int numBytesToRead;
int numBytes;
try {
do {
numBytesToRead = Math.min(READ_BUFFER_SIZE, numRemainingBytes);
numBytes = inputStream.read(bytes, 0, numBytesToRead);
if (numBytes < 0) {
String message = String.format(
"Unexpected end of stream: buffer[%d], readSize = %d, numRemainingBytes = %d",
buffer.capacity(), readSize, numRemainingBytes);
throw new EOFException(message);
}
if (numBytes > 0) {
buffer.put(bytes, 0, numBytes);
numRemainingBytes -= numBytes;
}
}
while (!this.closed && (numRemainingBytes > 0));
} finally {
remoteObject.close(inputStream, numRemainingBytes);
}
}
}
| S3ARemoteObjectReader |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tuple/PropertyFactory.java | {
"start": 1255,
"end": 3410
} | class ____ {
private PropertyFactory() {
}
/**
* Generates the attribute representation of the identifier for a given entity mapping.
*
* @param mappedEntity The mapping definition of the entity.
* @param generator The identifier value generator to use for this identifier.
*
* @return The appropriate IdentifierProperty definition.
*/
public static IdentifierProperty buildIdentifierAttribute(
PersistentClass mappedEntity,
Generator generator) {
Type type = mappedEntity.getIdentifier().getType();
Property property = mappedEntity.getIdentifierProperty();
if ( property == null ) {
// this is a virtual id property...
return new IdentifierProperty(
type,
mappedEntity.hasEmbeddedIdentifier(),
mappedEntity.hasIdentifierMapper(),
generator
);
}
else {
return new IdentifierProperty(
property.getName(),
type,
mappedEntity.hasEmbeddedIdentifier(),
generator
);
}
}
/**
* Generates a VersionProperty representation for an entity mapping given its
* version mapping Property.
*
* @param property The version mapping Property.
* @param lazyAvailable Is property lazy loading currently available.
*
* @return The appropriate VersionProperty definition.
*/
public static VersionProperty buildVersionProperty(
EntityPersister persister,
SessionFactoryImplementor sessionFactory,
int attributeNumber,
Property property,
boolean lazyAvailable) {
boolean lazy = lazyAvailable && property.isLazy();
return new VersionProperty(
persister,
sessionFactory,
attributeNumber,
property.getName(),
property.getValue().getType(),
new BaselineAttributeInformation.Builder()
.setLazy( lazy )
.setInsertable( property.isInsertable() )
.setUpdateable( property.isUpdatable() )
.setNullable( property.isOptional() )
.setDirtyCheckable( property.isUpdatable() && !lazy )
.setVersionable( property.isOptimisticLocked() )
.setCascadeStyle( property.getCascadeStyle() )
.setOnDeleteAction( property.getOnDeleteAction() )
.createInformation()
);
}
public | PropertyFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java | {
"start": 18502,
"end": 18587
} | class ____ {
final Object lock = new Object();
}
| A |
java | google__dagger | javatests/dagger/hilt/android/processor/internal/GeneratorsTest.java | {
"start": 2071,
"end": 3023
} | class ____ extends Hilt_MyActivity {",
" public MyActivity(",
" String supportNullable,",
" String androidxNullable,",
" String javaxNullable) {",
" super(supportNullable, androidxNullable, javaxNullable);",
" }",
"}");
HiltCompilerTests.hiltCompiler(baseActivity, myActivity)
.compile(
subject -> {
subject.hasErrorCount(0);
StringSubject stringSubject =
subject.generatedSourceFileWithPath("test/Hilt_MyActivity.java");
stringSubject.contains("package test;");
stringSubject.contains("import androidx.annotation.Nullable;");
stringSubject.contains(
JOINER.join(
"@Generated(\"dagger.hilt.android.processor.internal.androidentrypoint.ActivityGenerator\")",
"abstract | MyActivity |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IgnoredPureGetterTest.java | {
"start": 4371,
"end": 4562
} | class ____ {
void test() {
A.of(1).foo();
}
}
""")
.addOutputLines(
"B.java",
"""
| B |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/imports/innerclasses/InnerClassMapper.java | {
"start": 573,
"end": 910
} | interface ____ {
InnerClassMapper INSTANCE = Mappers.getMapper( InnerClassMapper.class );
TargetWithInnerClass sourceToTarget(SourceWithInnerClass source);
TargetInnerClass innerSourceToInnerTarget(SourceInnerClass source);
TargetInnerInnerClass innerSourceToInnerInnerTarget(SourceInnerClass source);
}
| InnerClassMapper |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng8230CIFriendlyTest.java | {
"start": 1177,
"end": 7492
} | class ____ extends AbstractMavenIntegrationTestCase {
private static final String PROPERTIES = "target/expression.properties";
/**
* Verify that CI friendly version work when using project properties
*
* @throws Exception in case of failure
*/
@Test
void testitCiFriendlyWithProjectProperties() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "cif-with-project-props");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent(PROPERTIES);
Properties props = verifier.loadProperties(PROPERTIES);
assertEquals(props.getProperty("project.version"), "1.0-SNAPSHOT");
}
/**
* Verify that CI friendly version work when using project properties
*
* @throws Exception in case of failure
*/
@Test
void testitCiFriendlyWithProjectPropertiesOverride() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "cif-with-project-props");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
verifier.addCliArgument("-Dci-version=1.1-SNAPSHOT");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent(PROPERTIES);
Properties props = verifier.loadProperties(PROPERTIES);
assertEquals(props.getProperty("project.version"), "1.1-SNAPSHOT");
}
/**
* Verify that CI friendly version work when using user properties
*
* @throws Exception in case of failure
*/
@Test
void testitCiFriendlyWithUserProperties() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "cif-with-user-props");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
verifier.addCliArgument("-Dci-version=1.1-SNAPSHOT");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent(PROPERTIES);
Properties props = verifier.loadProperties(PROPERTIES);
assertEquals(props.getProperty("project.version"), "1.1-SNAPSHOT");
}
/**
* Verify that CI friendly version fails if the properties are not given
*
* @throws Exception in case of failure
*/
@Test
void testitCiFriendlyWithUserPropertiesNotGiven() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "cif-with-user-props");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
try {
verifier.execute();
fail("Expected failure");
} catch (VerificationException e) {
assertTrue(
e.getMessage()
.contains(
"'version' contains an expression but should be a constant. @ myGroup:parent:${ci-version}"),
e.getMessage());
}
}
@Test
void testitExpressionInGroupId() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "exp-in-groupid");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
try {
verifier.execute();
fail("Expected failure");
} catch (VerificationException e) {
assertTrue(
e.getMessage()
.contains(
"'groupId' contains an expression but should be a constant. @ ${foo}:myArtifact:1.0-SNAPSHOT"),
e.getMessage());
}
}
@Test
void testitExpressionInArtifactId() throws Exception {
File testDir = extractResources("/mng-8230-ci-friendly-and-gav");
File basedir = new File(testDir, "exp-in-artifactid");
Verifier verifier = newVerifier(basedir.getAbsolutePath());
verifier.addCliArgument("-Dexpression.outputFile=" + new File(basedir, PROPERTIES).getPath());
verifier.addCliArgument("-Dexpression.expressions=project/version");
verifier.addCliArgument("org.apache.maven.its.plugins:maven-it-plugin-expression:2.1-SNAPSHOT:eval");
try {
verifier.execute();
fail("Expected failure");
} catch (VerificationException e) {
assertTrue(
e.getMessage()
.contains(
"'artifactId' contains an expression but should be a constant. @ myGroup:${foo}:1.0-SNAPSHOT"),
e.getMessage());
}
}
}
| MavenITmng8230CIFriendlyTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/StreamingRuntimeContextTest.java | {
"start": 10516,
"end": 12512
} | class ____ really registered, i.e., the execution config was applied
assertThat(valueSerializer).isInstanceOf(KryoSerializer.class);
assertThat(
((KryoSerializer<?>) valueSerializer)
.getKryo()
.getRegistration(Path.class)
.getId())
.isPositive();
}
@Test
void testMapStateReturnsEmptyMapByDefault() throws Exception {
StreamingRuntimeContext context = createMapOperatorRuntimeContext();
MapStateDescriptor<Integer, String> descr =
new MapStateDescriptor<>("name", Integer.class, String.class);
MapState<Integer, String> state = context.getMapState(descr);
Iterable<Map.Entry<Integer, String>> value = state.entries();
assertThat(value).isNotNull();
assertThat(value.iterator()).isExhausted();
}
@Test
void testV2ValueStateInstantiation() throws Exception {
final ExecutionConfig config = new ExecutionConfig();
SerializerConfigImpl serializerConfig = (SerializerConfigImpl) config.getSerializerConfig();
serializerConfig.registerKryoType(Path.class);
final AtomicReference<Object> descriptorCapture = new AtomicReference<>();
StreamingRuntimeContext context = createRuntimeContext(descriptorCapture, config, true);
org.apache.flink.api.common.state.v2.ValueStateDescriptor<TaskInfo> descr =
new org.apache.flink.api.common.state.v2.ValueStateDescriptor<>(
"name", TypeInformation.of(TaskInfo.class));
context.getValueState(descr);
org.apache.flink.api.common.state.v2.ValueStateDescriptor<?> descrIntercepted =
(org.apache.flink.api.common.state.v2.ValueStateDescriptor<?>)
descriptorCapture.get();
TypeSerializer<?> serializer = descrIntercepted.getSerializer();
// check that the Path | is |
java | google__gson | gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnFieldsTest.java | {
"start": 19082,
"end": 20670
} | class ____ implements TypeAdapterFactory {
@SuppressWarnings("unchecked")
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
TypeAdapter<String> delegate = (TypeAdapter<String>) gson.getDelegateAdapter(this, type);
return (TypeAdapter<T>)
new TypeAdapter<String>() {
@Override
public String read(JsonReader in) throws IOException {
// Perform custom deserialization
return delegate.read(in) + "-custom";
}
@Override
public void write(JsonWriter out, String value) throws IOException {
// Perform custom serialization
delegate.write(out, value + "-custom");
}
};
}
}
}
/**
* Similar to {@link #testDelegatingAdapterFactory}, except that the delegate is not looked up in
* {@code create} but instead in the adapter methods.
*/
@Test
public void testDelegatingAdapterFactory_Delayed() {
WithDelayedDelegatingFactory deserialized =
new Gson().fromJson("{\"f\":\"test\"}", WithDelayedDelegatingFactory.class);
assertThat(deserialized.f).isEqualTo("test-custom");
WithDelayedDelegatingFactory serialized = new WithDelayedDelegatingFactory();
serialized.f = "value";
assertThat(new Gson().toJson(serialized)).isEqualTo("{\"f\":\"value-custom\"}");
}
// suppress Error Prone warning; should be clear that `Factory` refers to nested class
@SuppressWarnings("SameNameButDifferent")
private static | Factory |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java | {
"start": 14314,
"end": 14693
} | enum ____ implements Function<io.reactivex.Single<?>, Mono<?>> {
INSTANCE;
@Override
public Mono<?> apply(io.reactivex.Single<?> source) {
return Mono.from(source.toFlowable());
}
}
/**
* An adapter {@link Function} to adopt a {@link io.reactivex.Single} to {@link Publisher}.
*/
public | RxJava2SingleToMonoAdapter |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveToString.java | {
"start": 657,
"end": 978
} | class ____ extends BasicErrorMessageFactory {
public static ErrorMessageFactory shouldNotHaveToString(String other) {
return new ShouldNotHaveToString(other);
}
private ShouldNotHaveToString(String other) {
super("%nExpecting actual's toString() not to be equal to:%n %s", other);
}
}
| ShouldNotHaveToString |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersTest.java | {
"start": 66095,
"end": 66914
} | class ____ {
record A(int x) {}
record B(long y) {
// BUG: Diagnostic contains:
B {}
}
record C(long y) {
// BUG: Diagnostic contains:
C {}
C(int z) {
this((long) (z + 1));
}
}
record D(List<Integer> xs) {
// BUG: Diagnostic contains:
D {}
D(Set<Integer> s) {
this(List.of(1));
}
}
record E() {
// BUG: Diagnostic contains:
E {}
E(int x) {
this();
}
}
}
""")
.doTest();
}
}
| Test |
java | netty__netty | example/src/main/java/io/netty/example/qotm/QuoteOfTheMomentClientHandler.java | {
"start": 857,
"end": 1448
} | class ____ extends SimpleChannelInboundHandler<DatagramPacket> {
@Override
public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
if (response.startsWith("QOTM: ")) {
System.out.println("Quote of the Moment: " + response.substring(6));
ctx.close();
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
| QuoteOfTheMomentClientHandler |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSourceExecSerializationTests.java | {
"start": 484,
"end": 1540
} | class ____ extends AbstractPhysicalPlanSerializationTests<ExchangeSourceExec> {
static ExchangeSourceExec randomExchangeSourceExec() {
Source source = randomSource();
List<Attribute> output = randomFieldAttributes(1, 5, false);
boolean intermediateAgg = randomBoolean();
return new ExchangeSourceExec(source, output, intermediateAgg);
}
@Override
protected ExchangeSourceExec createTestInstance() {
return randomExchangeSourceExec();
}
@Override
protected ExchangeSourceExec mutateInstance(ExchangeSourceExec instance) throws IOException {
List<Attribute> output = instance.output();
boolean intermediateAgg = instance.isIntermediateAgg();
if (randomBoolean()) {
output = randomValueOtherThan(output, () -> randomFieldAttributes(1, 5, false));
} else {
intermediateAgg = false == intermediateAgg;
}
return new ExchangeSourceExec(instance.source(), output, intermediateAgg);
}
}
| ExchangeSourceExecSerializationTests |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/reactive/server/WebTestClient.java | {
"start": 30552,
"end": 30718
} | interface ____ extends RequestBodySpec, RequestHeadersUriSpec<RequestBodySpec> {
}
/**
* Chained API for applying assertions to a response.
*/
| RequestBodyUriSpec |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/EmptyArrayBlockingQueueDeserTest.java | {
"start": 589,
"end": 1062
} | class ____{
ArrayBlockingQueue<Double> values;
public Collection<Double> getValues() {
return values;
}
}
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testEmptyBlockingQueue() throws Exception
{
String json = MAPPER.writeValueAsString(new RemoteEntity());
Entity entity = MAPPER.readValue(json, Entity.class);
assertEquals(0, entity.getValues().size());
}
}
| Entity |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/JsonValueSerializationTest.java | {
"start": 2126,
"end": 2266
} | class ____ extends ValueBase {
public String b = "b";
}
// Finally, let's also test static vs dynamic type
static | ValueType |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxIndex.java | {
"start": 2352,
"end": 4195
} | class ____<T, I> implements InnerOperator<T, I> {
final CoreSubscriber<? super I> actual;
final BiFunction<? super Long, ? super T, ? extends I> indexMapper;
boolean done;
long index = 0;
@SuppressWarnings("NotNullFieldNotInitialized") // s initialized in onSubscribe
Subscription s;
IndexSubscriber(CoreSubscriber<? super I> actual,
BiFunction<? super Long, ? super T, ? extends I> indexMapper) {
this.actual = actual;
this.indexMapper = indexMapper;
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = s;
actual.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
if (done) {
Operators.onNextDropped(t, actual.currentContext());
return;
}
long i = this.index;
try {
I typedIndex = indexMapper.apply(i, t);
this.index = i + 1L;
actual.onNext(typedIndex);
}
catch (Throwable e) {
onError(Operators.onOperatorError(s, e, t, actual.currentContext()));
}
}
@Override
public void onError(Throwable t) {
if (done) {
Operators.onErrorDropped(t, actual.currentContext());
return;
}
done = true;
actual.onError(t);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
actual.onComplete();
}
@Override
public CoreSubscriber<? super I> actual() {
return this.actual;
}
@Override
public void request(long n) {
s.request(n);
}
@Override
public void cancel() {
s.cancel();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.TERMINATED) return done;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerOperator.super.scanUnsafe(key);
}
}
static final | IndexSubscriber |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/KameletMainInjector.java | {
"start": 1069,
"end": 4395
} | class ____ implements Injector {
private static final String ACCEPTED_STUB_NAMES
= "StubComponent,BeanComponent,ClassComponent,DirectComponent,KameletComponent,LogComponent,RestComponent"
+ ",RestApiComponent,PlatformHttpComponent,SedaComponent,VertxHttpComponent";
private final Injector delegate;
private final String stubPattern;
public KameletMainInjector(Injector delegate, String stubPattern, boolean silent) {
this.delegate = delegate;
this.stubPattern = stubPattern;
}
@Override
public <T> T newInstance(Class<T> type) {
boolean accept = acceptComponent(type);
if (!accept) {
return (T) delegate.newInstance(StubComponent.class);
}
return delegate.newInstance(type);
}
@Override
public <T> T newInstance(Class<T> type, String factoryMethod) {
boolean accept = acceptComponent(type);
if (!accept) {
return (T) delegate.newInstance(StubComponent.class);
}
return delegate.newInstance(type, factoryMethod);
}
@Override
public <T> T newInstance(Class<T> type, Class<?> factoryClass, String factoryMethod) {
boolean accept = acceptComponent(type);
if (!accept) {
return (T) delegate.newInstance(StubComponent.class);
}
return delegate.newInstance(type, factoryClass, factoryMethod);
}
@Override
public <T> T newInstance(Class<T> type, boolean postProcessBean) {
boolean accept = acceptComponent(type);
if (!accept) {
return (T) delegate.newInstance(StubComponent.class);
}
return delegate.newInstance(type, postProcessBean);
}
@Override
public boolean supportsAutoWiring() {
return delegate.supportsAutoWiring();
}
private boolean acceptComponent(Class<?> type) {
boolean accept = true;
if (stubPattern != null && Component.class.isAssignableFrom(type)) {
accept = accept(type);
if (!accept && !("*".equals(stubPattern) || "component:*".equals(stubPattern))) {
// grab component name via annotation trick!
org.apache.camel.spi.annotations.Component ann
= ObjectHelper.getAnnotation(this, org.apache.camel.spi.annotations.Component.class);
if (ann != null) {
boolean stubbed = false;
String name = ann.value();
for (String n : name.split(",")) {
for (String p : stubPattern.split(",")) {
if (p.startsWith("component:")) {
p = p.substring(10);
}
stubbed |= PatternHelper.matchPattern(n, p);
}
}
accept = !stubbed;
} else {
accept = true;
}
} else {
accept = true;
}
}
return accept;
}
private boolean accept(Class<?> type) {
String shortName = type.getSimpleName();
// we are stubbing but need to accept the following
return ACCEPTED_STUB_NAMES.contains(shortName);
}
}
| KameletMainInjector |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/util/FieldInfo.java | {
"start": 619,
"end": 20538
} | class ____ implements Comparable<FieldInfo> {
public final String name;
public final Method method;
public final Field field;
private int ordinal = 0;
public final Class<?> fieldClass;
public final Type fieldType;
public final Class<?> declaringClass;
public final boolean getOnly;
public final int serialzeFeatures;
public final int parserFeatures;
public final String label;
private final JSONField fieldAnnotation;
private final JSONField methodAnnotation;
public final boolean fieldAccess;
public final boolean fieldTransient;
public final char[] name_chars;
public final boolean isEnum;
public final boolean jsonDirect;
public final boolean unwrapped;
public final String format;
public final String[] alternateNames;
public final long nameHashCode;
public FieldInfo(String name, //
Class<?> declaringClass, //
Class<?> fieldClass, //
Type fieldType, //
Field field, //
int ordinal, //
int serialzeFeatures, //
int parserFeatures){
if (ordinal < 0) {
ordinal = 0;
}
this.name = name;
this.declaringClass = declaringClass;
this.fieldClass = fieldClass;
this.fieldType = fieldType;
this.method = null;
this.field = field;
this.ordinal = ordinal;
this.serialzeFeatures = serialzeFeatures;
this.parserFeatures = parserFeatures;
isEnum = fieldClass.isEnum();
if (field != null) {
int modifiers = field.getModifiers();
fieldAccess = (modifiers & Modifier.PUBLIC) != 0 || method == null;
fieldTransient = Modifier.isTransient(modifiers);
} else {
fieldTransient = false;
fieldAccess = false;
}
name_chars = genFieldNameChars();
if (field != null) {
TypeUtils.setAccessible(field);
}
this.label = "";
fieldAnnotation = field == null ? null : TypeUtils.getAnnotation(field, JSONField.class);
methodAnnotation = null;
this.getOnly = false;
this.jsonDirect = false;
this.unwrapped = false;
this.format = null;
this.alternateNames = new String[0];
nameHashCode = nameHashCode64(name, fieldAnnotation);
}
public FieldInfo(String name, //
Method method, //
Field field, //
Class<?> clazz, //
Type type, //
int ordinal, //
int serialzeFeatures, //
int parserFeatures, //
JSONField fieldAnnotation, //
JSONField methodAnnotation, //
String label){
this(name, method, field, clazz, type, ordinal, serialzeFeatures, parserFeatures,
fieldAnnotation, methodAnnotation, label, null);
}
public FieldInfo(String name, //
Method method, //
Field field, //
Class<?> clazz, //
Type type, //
int ordinal, //
int serialzeFeatures, //
int parserFeatures, //
JSONField fieldAnnotation, //
JSONField methodAnnotation, //
String label,
Map<TypeVariable, Type> genericInfo){
if (field != null) {
String fieldName = field.getName();
if (fieldName.equals(name)) {
name = fieldName;
}
}
if (ordinal < 0) {
ordinal = 0;
}
this.name = name;
this.method = method;
this.field = field;
this.ordinal = ordinal;
this.serialzeFeatures = serialzeFeatures;
this.parserFeatures = parserFeatures;
this.fieldAnnotation = fieldAnnotation;
this.methodAnnotation = methodAnnotation;
if (field != null) {
int modifiers = field.getModifiers();
fieldAccess = ((modifiers & Modifier.PUBLIC) != 0 || method == null);
fieldTransient = Modifier.isTransient(modifiers)
|| TypeUtils.isTransient(method);
} else {
fieldAccess = false;
fieldTransient = TypeUtils.isTransient(method);
}
if (label != null && label.length() > 0) {
this.label = label;
} else {
this.label = "";
}
String format = null;
JSONField annotation = getAnnotation();
nameHashCode = nameHashCode64(name, annotation);
boolean jsonDirect = false;
if (annotation != null) {
format = annotation.format();
if (format.trim().length() == 0) {
format = null;
}
jsonDirect = annotation.jsonDirect();
unwrapped = annotation.unwrapped();
alternateNames = annotation.alternateNames();
} else {
jsonDirect = false;
unwrapped = false;
alternateNames = new String[0];
}
this.format = format;
name_chars = genFieldNameChars();
if (method != null) {
TypeUtils.setAccessible(method);
}
if (field != null) {
TypeUtils.setAccessible(field);
}
boolean getOnly = false;
Type fieldType;
Class<?> fieldClass;
if (method != null) {
Class<?>[] types;
if ((types = method.getParameterTypes()).length == 1) {
fieldClass = types[0];
fieldType = method.getGenericParameterTypes()[0];
} else if (types.length == 2 && types[0] == String.class && types[1] == Object.class) {
fieldType = fieldClass = types[0];
} else {
fieldClass = method.getReturnType();
fieldType = method.getGenericReturnType();
getOnly = true;
}
this.declaringClass = method.getDeclaringClass();
} else {
fieldClass = field.getType();
fieldType = field.getGenericType();
this.declaringClass = field.getDeclaringClass();
getOnly = Modifier.isFinal(field.getModifiers());
}
this.getOnly = getOnly;
this.jsonDirect = jsonDirect && fieldClass == String.class;
if (clazz != null && fieldClass == Object.class && fieldType instanceof TypeVariable) {
TypeVariable<?> tv = (TypeVariable<?>) fieldType;
Type genericFieldType = getInheritGenericType(clazz, type, tv);
if (genericFieldType != null) {
this.fieldClass = TypeUtils.getClass(genericFieldType);
this.fieldType = genericFieldType;
isEnum = fieldClass.isEnum();
return;
}
}
Type genericFieldType = fieldType;
if (!(fieldType instanceof Class)) {
genericFieldType = getFieldType(clazz, type != null ? type : clazz, fieldType, genericInfo);
if (genericFieldType != fieldType) {
if (genericFieldType instanceof ParameterizedType) {
fieldClass = TypeUtils.getClass(genericFieldType);
} else if (genericFieldType instanceof Class) {
fieldClass = TypeUtils.getClass(genericFieldType);
}
}
}
this.fieldType = genericFieldType;
this.fieldClass = fieldClass;
isEnum = fieldClass.isEnum();
}
private long nameHashCode64(String name, JSONField annotation)
{
if (annotation != null && annotation.name().length() != 0) {
return TypeUtils.fnv1a_64_lower(name);
}
return TypeUtils.fnv1a_64_extract(name);
}
protected char[] genFieldNameChars() {
int nameLen = this.name.length();
char[] name_chars = new char[nameLen + 3];
this.name.getChars(0, this.name.length(), name_chars, 1);
name_chars[0] = '"';
name_chars[nameLen + 1] = '"';
name_chars[nameLen + 2] = ':';
return name_chars;
}
@SuppressWarnings("unchecked")
public <T extends Annotation> T getAnnation(Class<T> annotationClass) {
if (annotationClass == JSONField.class) {
return (T) getAnnotation();
}
T annotatition = null;
if (method != null) {
annotatition = TypeUtils.getAnnotation(method, annotationClass);
}
if (annotatition == null && field != null) {
annotatition = TypeUtils.getAnnotation(field, annotationClass);
}
return annotatition;
}
public static Type getFieldType(final Class<?> clazz, final Type type, Type fieldType){
return getFieldType(clazz, type, fieldType, null);
}
public static Type getFieldType(final Class<?> clazz, final Type type, Type fieldType, Map<TypeVariable, Type> genericInfo) {
if (clazz == null || type == null) {
return fieldType;
}
if (fieldType instanceof GenericArrayType) {
GenericArrayType genericArrayType = (GenericArrayType) fieldType;
Type componentType = genericArrayType.getGenericComponentType();
Type componentTypeX = getFieldType(clazz, type, componentType, genericInfo);
if (componentType != componentTypeX) {
Type fieldTypeX = Array.newInstance(TypeUtils.getClass(componentTypeX), 0).getClass();
return fieldTypeX;
}
return fieldType;
}
if (!TypeUtils.isGenericParamType(type)) {
return fieldType;
}
if (fieldType instanceof TypeVariable) {
ParameterizedType paramType = (ParameterizedType) TypeUtils.getGenericParamType(type);
Class<?> parameterizedClass = TypeUtils.getClass(paramType);
final TypeVariable<?> typeVar = (TypeVariable<?>) fieldType;
TypeVariable<?>[] typeVariables = parameterizedClass.getTypeParameters();
for (int i = 0; i < typeVariables.length; ++i) {
if (typeVariables[i].getName().equals(typeVar.getName())) {
fieldType = paramType.getActualTypeArguments()[i];
return fieldType;
}
}
}
if (fieldType instanceof ParameterizedType) {
ParameterizedType parameterizedFieldType = (ParameterizedType) fieldType;
Type[] arguments = parameterizedFieldType.getActualTypeArguments();
TypeVariable<?>[] typeVariables;
ParameterizedType paramType;
boolean changed = getArgument(arguments, genericInfo);
//if genericInfo is not working use the old path;
if(!changed){
if (type instanceof ParameterizedType) {
paramType = (ParameterizedType) type;
typeVariables = clazz.getTypeParameters();
} else if(clazz.getGenericSuperclass() instanceof ParameterizedType) {
paramType = (ParameterizedType) clazz.getGenericSuperclass();
typeVariables = clazz.getSuperclass().getTypeParameters();
} else {
paramType = parameterizedFieldType;
typeVariables = type.getClass().getTypeParameters();
}
changed = getArgument(arguments, typeVariables, paramType.getActualTypeArguments());
}
if (changed) {
fieldType = TypeReference.intern(
new ParameterizedTypeImpl(arguments, parameterizedFieldType.getOwnerType(),
parameterizedFieldType.getRawType())
);
return fieldType;
}
}
return fieldType;
}
private static boolean getArgument(Type[] typeArgs, Map<TypeVariable, Type> genericInfo){
if(genericInfo == null || genericInfo.size() == 0){
return false;
}
boolean changed = false;
for (int i = 0; i < typeArgs.length; ++i) {
Type typeArg = typeArgs[i];
if (typeArg instanceof ParameterizedType) {
ParameterizedType p_typeArg = (ParameterizedType) typeArg;
Type[] p_typeArg_args = p_typeArg.getActualTypeArguments();
boolean p_changed = getArgument(p_typeArg_args, genericInfo);
if (p_changed) {
typeArgs[i] = TypeReference.intern(
new ParameterizedTypeImpl(p_typeArg_args, p_typeArg.getOwnerType(), p_typeArg.getRawType())
);
changed = true;
}
} else if (typeArg instanceof TypeVariable) {
if (genericInfo.containsKey(typeArg)) {
typeArgs[i] = genericInfo.get(typeArg);
changed = true;
}
}
}
return changed;
}
private static boolean getArgument(Type[] typeArgs, TypeVariable[] typeVariables, Type[] arguments) {
if (arguments == null || typeVariables.length == 0) {
return false;
}
boolean changed = false;
for (int i = 0; i < typeArgs.length; ++i) {
Type typeArg = typeArgs[i];
if (typeArg instanceof ParameterizedType) {
ParameterizedType p_typeArg = (ParameterizedType) typeArg;
Type[] p_typeArg_args = p_typeArg.getActualTypeArguments();
boolean p_changed = getArgument(p_typeArg_args, typeVariables, arguments);
if (p_changed) {
typeArgs[i] = TypeReference.intern(
new ParameterizedTypeImpl(p_typeArg_args, p_typeArg.getOwnerType(), p_typeArg.getRawType())
);
changed = true;
}
} else if (typeArg instanceof TypeVariable) {
for (int j = 0; j < typeVariables.length; ++j) {
if (typeArg.equals(typeVariables[j])) {
typeArgs[i] = arguments[j];
changed = true;
}
}
}
}
return changed;
}
private static Type getInheritGenericType(Class<?> clazz, Type type, TypeVariable<?> tv) {
GenericDeclaration gd = tv.getGenericDeclaration();
Class<?> class_gd = null;
if (gd instanceof Class) {
class_gd = (Class<?>) tv.getGenericDeclaration();
}
Type[] arguments = null;
if (class_gd == clazz) {
if (type instanceof ParameterizedType) {
ParameterizedType ptype = (ParameterizedType) type;
arguments = ptype.getActualTypeArguments();
}
} else {
for (Class<?> c = clazz; c != null && c != Object.class && c != class_gd; c = c.getSuperclass()) {
Type superType = c.getGenericSuperclass();
if (superType instanceof ParameterizedType) {
ParameterizedType p_superType = (ParameterizedType) superType;
Type[] p_superType_args = p_superType.getActualTypeArguments();
getArgument(p_superType_args, c.getTypeParameters(), arguments);
arguments = p_superType_args;
}
}
}
if (arguments == null || class_gd == null) {
return null;
}
Type actualType = null;
TypeVariable<?>[] typeVariables = class_gd.getTypeParameters();
for (int j = 0; j < typeVariables.length; ++j) {
if (tv.equals(typeVariables[j])) {
actualType = arguments[j];
break;
}
}
return actualType;
}
public String toString() {
return this.name;
}
public Member getMember() {
if (method != null) {
return method;
} else {
return field;
}
}
protected Class<?> getDeclaredClass() {
if (this.method != null) {
return this.method.getDeclaringClass();
}
if (this.field != null) {
return this.field.getDeclaringClass();
}
return null;
}
public int compareTo(FieldInfo o) {
// Deal extend bridge
if (o.method != null && this.method != null
&& o.method.isBridge() && !this.method.isBridge()
&& o.method.getName().equals(this.method.getName())) {
return 1;
}
if (this.ordinal < o.ordinal) {
return -1;
}
if (this.ordinal > o.ordinal) {
return 1;
}
int result = this.name.compareTo(o.name);
if (result != 0) {
return result;
}
Class<?> thisDeclaringClass = this.getDeclaredClass();
Class<?> otherDeclaringClass = o.getDeclaredClass();
if (thisDeclaringClass != null && otherDeclaringClass != null && thisDeclaringClass != otherDeclaringClass) {
if (thisDeclaringClass.isAssignableFrom(otherDeclaringClass)) {
return -1;
}
if (otherDeclaringClass.isAssignableFrom(thisDeclaringClass)) {
return 1;
}
}
boolean isSampeType = this.field != null && this.field.getType() == this.fieldClass;
boolean oSameType = o.field != null && o.field.getType() == o.fieldClass;
if (isSampeType && !oSameType) {
return 1;
}
if (oSameType && !isSampeType) {
return -1;
}
if (o.fieldClass.isPrimitive() && !this.fieldClass.isPrimitive()) {
return 1;
}
if (this.fieldClass.isPrimitive() && !o.fieldClass.isPrimitive()) {
return -1;
}
if (o.fieldClass.getName().startsWith("java.") && !this.fieldClass.getName().startsWith("java.")) {
return 1;
}
if (this.fieldClass.getName().startsWith("java.") && !o.fieldClass.getName().startsWith("java.")) {
return -1;
}
return this.fieldClass.getName().compareTo(o.fieldClass.getName());
}
public JSONField getAnnotation() {
if (this.fieldAnnotation != null) {
return this.fieldAnnotation;
}
return this.methodAnnotation;
}
public String getFormat() {
return format;
}
public Object get(Object javaObject) throws IllegalAccessException, InvocationTargetException {
return method != null
? method.invoke(javaObject)
: field.get(javaObject);
}
public void set(Object javaObject, Object value) throws IllegalAccessException, InvocationTargetException {
if (method != null) {
method.invoke(javaObject, new Object[] { value });
return;
}
field.set(javaObject, value);
}
public void setAccessible() throws SecurityException {
if (method != null) {
TypeUtils.setAccessible(method);
return;
}
TypeUtils.setAccessible(field);
}
}
| FieldInfo |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/generation/filters/CustomFilterGenerator.java | {
"start": 30405,
"end": 32278
} | class
____ = cc.getMethodCreator("<init>", void.class, declaringClassName);
}
ctor.setModifiers(Modifier.PUBLIC);
ctor.addAnnotation(Inject.class);
ctor.invokeSpecialMethod(MethodDescriptor.ofConstructor(superClassName), ctor.getThis());
ResultHandle self = ctor.getThis();
if (checkForOptionalBean) {
ResultHandle instance = ctor.getMethodParam(0);
ResultHandle isResolvable = ctor
.invokeInterfaceMethod(MethodDescriptor.ofMethod(Instance.class, "isResolvable", boolean.class), instance);
BranchResult isResolvableBranch = ctor.ifTrue(isResolvable);
BytecodeCreator isResolvableTrue = isResolvableBranch.trueBranch();
isResolvableTrue.writeInstanceField(delegateField, self, isResolvableTrue
.invokeInterfaceMethod(MethodDescriptor.ofMethod(Instance.class, "get", Object.class), instance));
BytecodeCreator isResolvableFalse = isResolvableBranch.falseBranch();
isResolvableFalse.writeInstanceField(delegateField, self, isResolvableFalse.loadNull());
} else {
ctor.writeInstanceField(delegateField, self, ctor.getMethodParam(0));
}
ctor.returnValue(null);
if (scopeInspectionResult.needsProxy) {
// generate no-args constructor needed for creating proxies
MethodCreator noArgsCtor = cc.getMethodCreator("<init>", void.class);
noArgsCtor.setModifiers(Modifier.PUBLIC);
noArgsCtor.invokeSpecialMethod(MethodDescriptor.ofConstructor(superClassName), noArgsCtor.getThis());
noArgsCtor.writeInstanceField(delegateField, noArgsCtor.getThis(), noArgsCtor.loadNull());
noArgsCtor.returnValue(null);
}
return delegateField;
}
/**
* The generated | ctor |
java | apache__camel | components/camel-hazelcast/src/test/java/org/apache/camel/component/hazelcast/HazelcastSedaFIFOTest.java | {
"start": 1169,
"end": 2108
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:result")
private MockEndpoint mock;
@Test
public void fifoTesting() throws Exception {
final int bodyCount = 5;
List<String> bodies = new ArrayList<>();
for (int i = 0; i < bodyCount; i++) {
bodies.add("test" + i);
}
mock.expectedBodiesReceived(bodies);
mock.expectedMessageCount(bodyCount);
for (int i = 0; i < bodyCount; i++) {
template.sendBody("hazelcast-seda:foo", "test" + i);
}
MockEndpoint.assertIsSatisfied(context);
mock.reset();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("hazelcast-seda:foo").to("mock:result");
}
};
}
}
| HazelcastSedaFIFOTest |
java | quarkusio__quarkus | extensions/hibernate-validator/runtime/src/main/java/io/quarkus/hibernate/validator/runtime/clockprovider/RuntimeReinitializedDefaultClockProvider.java | {
"start": 295,
"end": 758
} | class ____ implements ClockProvider {
public static final RuntimeReinitializedDefaultClockProvider INSTANCE = new RuntimeReinitializedDefaultClockProvider();
private static final RuntimeReinitializedDefaultClock clock = new RuntimeReinitializedDefaultClock();
private RuntimeReinitializedDefaultClockProvider() {
}
@Override
public Clock getClock() {
return clock;
}
private static | RuntimeReinitializedDefaultClockProvider |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/VarCheckerTest.java | {
"start": 4472,
"end": 4821
} | class ____ {
// BUG: Diagnostic contains: native void f(int y);
native void f(final int y);
}
""")
.doTest();
}
@Test
public void nativeMethod() {
compilationHelper
.addSourceLines(
"Test.java",
// TODO(b/21633565): force line break
" | Test |
java | google__dagger | javatests/dagger/internal/codegen/MultipleRequestTest.java | {
"start": 2248,
"end": 2542
} | class ____ {",
" @Inject Dep() {}",
"}"),
CompilerTests.javaSource(
"test.FieldInjectsMultiple",
"package test;",
"",
"import javax.inject.Inject;",
"",
" | Dep |
java | spring-projects__spring-boot | loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/Layout.java | {
"start": 733,
"end": 1042
} | interface ____ to determine the layout for a particular type of archive.
* Layouts may additionally implement {@link CustomLoaderLayout} if they wish to write
* custom loader classes.
*
* @author Phillip Webb
* @since 1.0.0
* @see Layouts
* @see RepackagingLayout
* @see CustomLoaderLayout
*/
public | used |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 42628,
"end": 42821
} | class ____ implements DialectFeatureCheck {
@Override
public boolean apply(Dialect dialect) {
return dialect.supportsCaseInsensitiveLike();
}
}
public static | SupportsCaseInsensitiveLike |
java | netty__netty | transport-classes-epoll/src/main/java/io/netty/channel/epoll/NativeArrays.java | {
"start": 715,
"end": 1828
} | class ____ {
// These are initialized on first use
private IovArray iovArray;
private NativeDatagramPacketArray datagramPacketArray;
/**
* Return a cleared {@link IovArray} that can be used for writes.
*/
IovArray cleanIovArray() {
if (iovArray == null) {
iovArray = new IovArray();
} else {
iovArray.clear();
}
return iovArray;
}
/**
* Return a cleared {@link NativeDatagramPacketArray} that can be used for writes.
*/
NativeDatagramPacketArray cleanDatagramPacketArray() {
if (datagramPacketArray == null) {
datagramPacketArray = new NativeDatagramPacketArray();
} else {
datagramPacketArray.clear();
}
return datagramPacketArray;
}
void free() {
// release native memory
if (iovArray != null) {
iovArray.release();
iovArray = null;
}
if (datagramPacketArray != null) {
datagramPacketArray.release();
datagramPacketArray = null;
}
}
}
| NativeArrays |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/SortMergeOneSideOuterJoinIterator.java | {
"start": 1369,
"end": 3660
} | class ____ extends SortMergeJoinIterator {
public SortMergeOneSideOuterJoinIterator(
BinaryRowDataSerializer probeSerializer,
BinaryRowDataSerializer bufferedSerializer,
Projection<RowData, BinaryRowData> probeProjection,
Projection<RowData, BinaryRowData> bufferedProjection,
RecordComparator keyComparator,
MutableObjectIterator<RowData> probeIterator,
MutableObjectIterator<BinaryRowData> bufferedIterator,
ResettableExternalBuffer buffer,
boolean[] filterNullKeys)
throws IOException {
super(
probeSerializer,
bufferedSerializer,
probeProjection,
bufferedProjection,
keyComparator,
probeIterator,
bufferedIterator,
buffer,
filterNullKeys);
}
public boolean nextOuterJoin() throws IOException {
if (!nextProbe()) {
return false; // no probe row, over.
}
if (matchKey != null && keyComparator.compare(probeKey, matchKey) == 0) {
// probe has a same key, so same matches.
return true; // match join.
}
if (bufferedRow == null) {
matchKey = null;
matchBuffer.reset();
matchBuffer.complete();
return true; // outer join.
} else {
// find next equivalent key.
while (true) {
int cmp = keyComparator.compare(probeKey, bufferedKey);
if (cmp > 0) {
if (!advanceNextSuitableBufferedRow()) {
matchKey = null;
matchBuffer.reset();
matchBuffer.complete();
return true; // outer join.
}
} else if (cmp < 0) {
matchKey = null;
matchBuffer.reset();
matchBuffer.complete();
return true; // outer join.
} else {
bufferMatchingRows();
return true; // match join.
}
}
}
}
}
| SortMergeOneSideOuterJoinIterator |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/AnnotationValueArray.java | {
"start": 54,
"end": 186
} | class ____ {
static final org.jboss.jandex.AnnotationValue[] EMPTY = new org.jboss.jandex.AnnotationValue[0];
}
| AnnotationValueArray |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveHierarchyBuildItem.java | {
"start": 9376,
"end": 9474
} | class ____.
*
* @param className a {@link DotName} representing the name of the | name |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/NullableSerializerTest.java | {
"start": 3082,
"end": 3260
} | class ____ extends NullableSerializerTest {
@Override
boolean isPaddingNullValue() {
return false;
}
}
}
| NullableSerializerWithoutPaddingTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/main/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/lib/package-info.java | {
"start": 907,
"end": 1275
} | class ____ instantiating
* different DocumentStore reader writer client based on the DocumentVendor
* configured.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
package org.apache.hadoop.yarn.server.timelineservice.documentstore.lib;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
| for |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/protocol/RedisProtocolException.java | {
"start": 178,
"end": 310
} | class ____ extends RedisException {
public RedisProtocolException(String msg) {
super(msg);
}
}
| RedisProtocolException |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java | {
"start": 18420,
"end": 18946
} | class ____ configure the ZooKeeper client connection.
* @param zkPrincipal Optional.
* @param kerberosPrincipal Optional. Use along with kerberosKeytab.
* @param kerberosKeytab Optional. Use along with kerberosPrincipal.
*/
public HadoopZookeeperFactory(String zkPrincipal, String kerberosPrincipal,
String kerberosKeytab) {
this(zkPrincipal, kerberosPrincipal, kerberosKeytab, false,
new TruststoreKeystore(new Configuration()));
}
/**
* Constructor for the helper | to |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/blockloader/docvalues/DenseVectorFromBinaryBlockLoader.java | {
"start": 941,
"end": 2444
} | class ____ extends BlockDocValuesReader.DocValuesBlockLoader {
private final String fieldName;
private final int dims;
private final IndexVersion indexVersion;
private final DenseVectorFieldMapper.ElementType elementType;
public DenseVectorFromBinaryBlockLoader(
String fieldName,
int dims,
IndexVersion indexVersion,
DenseVectorFieldMapper.ElementType elementType
) {
this.fieldName = fieldName;
this.dims = dims;
this.indexVersion = indexVersion;
this.elementType = elementType;
}
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.denseVectors(expectedCount, dims);
}
@Override
public AllReader reader(LeafReaderContext context) throws IOException {
BinaryDocValues docValues = context.reader().getBinaryDocValues(fieldName);
if (docValues == null) {
return new ConstantNullsReader();
}
return switch (elementType) {
case FLOAT -> new FloatDenseVectorFromBinary(docValues, dims, indexVersion);
case BFLOAT16 -> new BFloat16DenseVectorFromBinary(docValues, dims, indexVersion);
case BYTE -> new ByteDenseVectorFromBinary(docValues, dims, indexVersion);
case BIT -> new BitDenseVectorFromBinary(docValues, dims, indexVersion);
};
}
// Abstract base for dense vector readers
private abstract static | DenseVectorFromBinaryBlockLoader |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnnecessarilyFullyQualifiedTest.java | {
"start": 9194,
"end": 9442
} | class ____ {}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void unbatchedFindings() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| AbstractType |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/AdviceWithErrorHandlerRemoveTest.java | {
"start": 1193,
"end": 3782
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testAdviceErrorHandlerRemove() throws Exception {
context.addRoutes(createRouteBuilder());
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:b").expectedMessageCount(0);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:d").expectedMessageCount(0);
getMockEndpoint("mock:dead").expectedMessageCount(0);
AdviceWith.adviceWith(context.getRouteDefinition("foo"), context, new AdviceWithRouteBuilder() {
@Override
public void configure() {
getOriginalRoute().errorHandler(noErrorHandler());
}
});
context.start();
Exception e = assertThrows(Exception.class, () -> template.sendBody("direct:foo", "Hello World"),
"Should throw exception");
assertEquals("Forced", e.getCause().getMessage());
assertMockEndpointsSatisfied();
}
@Test
public void testAdviceErrorHandlerReplace() throws Exception {
context.addRoutes(createRouteBuilder());
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:b").expectedMessageCount(0);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:d").expectedMessageCount(0);
getMockEndpoint("mock:dead").expectedMessageCount(0);
getMockEndpoint("mock:dead2").expectedMessageCount(1);
AdviceWith.adviceWith(context.getRouteDefinition("foo"), context, new AdviceWithRouteBuilder() {
@Override
public void configure() {
// override errorHandler by using on exception
weaveAddFirst().onException(Exception.class).handled(true).to("mock:dead2");
}
});
context.start();
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:bar").routeId("bar").to("mock:c").to("mock:d");
from("direct:foo").routeId("foo").errorHandler(deadLetterChannel("mock:dead")).to("mock:a")
.throwException(new IllegalArgumentException("Forced")).to("mock:b");
}
};
}
}
| AdviceWithErrorHandlerRemoveTest |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java | {
"start": 861,
"end": 4946
} | class ____ extends AbstractMultivalueFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvMedianDoubleEvaluator.class);
public MvMedianDoubleEvaluator(EvalOperator.ExpressionEvaluator field,
DriverContext driverContext) {
super(driverContext, field);
}
@Override
public String name() {
return "MvMedian";
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNullable(Block fieldVal) {
if (fieldVal.mvSortedAscending()) {
return evalAscendingNullable(fieldVal);
}
DoubleBlock v = (DoubleBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
MvMedian.Doubles work = new MvMedian.Doubles();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
int first = v.getFirstValueIndex(p);
int end = first + valueCount;
for (int i = first; i < end; i++) {
double value = v.getDouble(i);
MvMedian.process(work, value);
}
double result = MvMedian.finish(work);
builder.appendDouble(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNotNullable(Block fieldVal) {
if (fieldVal.mvSortedAscending()) {
return evalAscendingNotNullable(fieldVal);
}
DoubleBlock v = (DoubleBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) {
MvMedian.Doubles work = new MvMedian.Doubles();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
int first = v.getFirstValueIndex(p);
int end = first + valueCount;
for (int i = first; i < end; i++) {
double value = v.getDouble(i);
MvMedian.process(work, value);
}
double result = MvMedian.finish(work);
builder.appendDouble(result);
}
return builder.build().asBlock();
}
}
/**
* Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order.
*/
private Block evalAscendingNullable(Block fieldVal) {
DoubleBlock v = (DoubleBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
MvMedian.Doubles work = new MvMedian.Doubles();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
int first = v.getFirstValueIndex(p);
double result = MvMedian.ascending(v, first, valueCount);
builder.appendDouble(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing at least one multivalued field and all multivalued fields are in ascending order.
*/
private Block evalAscendingNotNullable(Block fieldVal) {
DoubleBlock v = (DoubleBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) {
MvMedian.Doubles work = new MvMedian.Doubles();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
int first = v.getFirstValueIndex(p);
double result = MvMedian.ascending(v, first, valueCount);
builder.appendDouble(result);
}
return builder.build().asBlock();
}
}
@Override
public long baseRamBytesUsed() {
return BASE_RAM_BYTES_USED + field.baseRamBytesUsed();
}
public static | MvMedianDoubleEvaluator |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/LimitTests.java | {
"start": 423,
"end": 1675
} | class ____ extends ESTestCase {
private final List<Integer> list = asList(1, 2, 3, 4, 5, 6, 7);
public void testLimitUnderResults() throws Exception {
assertEquals(asList(1, 2, 3, 4, 5), new Limit(5, 0).view(list));
}
public void testLimitWithOffsetEqualResults() throws Exception {
assertEquals(asList(5, 6, 7), new Limit(3, 4).view(list));
}
public void testLimitWithOffsetUnderResults() throws Exception {
assertEquals(asList(5, 6), new Limit(2, 4).view(list));
}
public void testLimitOverResultsNoOffset() throws Exception {
assertEquals(list, new Limit(8, randomInt(100)).view(list));
}
public void testLimitEqualResults() throws Exception {
assertEquals(list, new Limit(7, randomInt(100)).view(list));
}
public void testLimitOverResultsWithHigherOffset() throws Exception {
assertEquals(asList(6, 7), new Limit(2, 8).view(list));
}
public void testLimitOverResultsWithEqualOffset() throws Exception {
assertEquals(asList(6, 7), new Limit(2, 7).view(list));
}
public void testLimitOverResultsWithSmallerOffset() throws Exception {
assertEquals(asList(3, 4, 5, 6, 7), new Limit(5, 6).view(list));
}
}
| LimitTests |
java | apache__flink | flink-libraries/flink-state-processing-api/src/main/java/org/apache/flink/state/api/runtime/SavepointRuntimeContext.java | {
"start": 2987,
"end": 10052
} | class ____ implements RuntimeContext {
private static final String REGISTRATION_EXCEPTION_MSG =
"State Descriptors may only be registered inside of open";
private final RuntimeContext ctx;
private final KeyedStateStore keyedStateStore;
private final List<StateDescriptor<?, ?>> registeredDescriptors;
private boolean stateRegistrationAllowed;
public SavepointRuntimeContext(RuntimeContext ctx, KeyedStateStore keyedStateStore) {
this.ctx = Preconditions.checkNotNull(ctx);
this.keyedStateStore = Preconditions.checkNotNull(keyedStateStore);
this.stateRegistrationAllowed = true;
this.registeredDescriptors = new ArrayList<>();
}
@Override
public JobInfo getJobInfo() {
return ctx.getJobInfo();
}
@Override
public TaskInfo getTaskInfo() {
return ctx.getTaskInfo();
}
@Override
public OperatorMetricGroup getMetricGroup() {
return ctx.getMetricGroup();
}
@Override
public <T> TypeSerializer<T> createSerializer(TypeInformation<T> typeInformation) {
return ctx.createSerializer(typeInformation);
}
@Override
public Map<String, String> getGlobalJobParameters() {
return ctx.getGlobalJobParameters();
}
@Override
public boolean isObjectReuseEnabled() {
return ctx.isObjectReuseEnabled();
}
@Override
public ClassLoader getUserCodeClassLoader() {
return ctx.getUserCodeClassLoader();
}
@Override
public void registerUserCodeClassLoaderReleaseHookIfAbsent(
String releaseHookName, Runnable releaseHook) {
ctx.registerUserCodeClassLoaderReleaseHookIfAbsent(releaseHookName, releaseHook);
}
@Override
public <V, A extends Serializable> void addAccumulator(
String name, Accumulator<V, A> accumulator) {
ctx.addAccumulator(name, accumulator);
}
@Override
public <V, A extends Serializable> Accumulator<V, A> getAccumulator(String name) {
return ctx.getAccumulator(name);
}
@Override
public IntCounter getIntCounter(String name) {
return ctx.getIntCounter(name);
}
@Override
public LongCounter getLongCounter(String name) {
return ctx.getLongCounter(name);
}
@Override
public DoubleCounter getDoubleCounter(String name) {
return ctx.getDoubleCounter(name);
}
@Override
public Histogram getHistogram(String name) {
return ctx.getHistogram(name);
}
@Override
public Set<ExternalResourceInfo> getExternalResourceInfos(String resourceName) {
throw new UnsupportedOperationException(
"Do not support external resource in current environment");
}
@Override
public boolean hasBroadcastVariable(String name) {
return ctx.hasBroadcastVariable(name);
}
@Override
public <RT> List<RT> getBroadcastVariable(String name) {
return ctx.getBroadcastVariable(name);
}
@Override
public <T, C> C getBroadcastVariableWithInitializer(
String name, BroadcastVariableInitializer<T, C> initializer) {
return ctx.getBroadcastVariableWithInitializer(name, initializer);
}
@Override
public DistributedCache getDistributedCache() {
return ctx.getDistributedCache();
}
@Override
public <T> ValueState<T> getState(ValueStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getState(stateProperties);
}
@Override
public <T> ListState<T> getListState(ListStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getListState(stateProperties);
}
@Override
public <T> ReducingState<T> getReducingState(ReducingStateDescriptor<T> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getReducingState(stateProperties);
}
@Override
public <IN, ACC, OUT> AggregatingState<IN, OUT> getAggregatingState(
AggregatingStateDescriptor<IN, ACC, OUT> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getAggregatingState(stateProperties);
}
@Override
public <UK, UV> MapState<UK, UV> getMapState(MapStateDescriptor<UK, UV> stateProperties) {
if (!stateRegistrationAllowed) {
throw new RuntimeException(REGISTRATION_EXCEPTION_MSG);
}
registeredDescriptors.add(stateProperties);
return keyedStateStore.getMapState(stateProperties);
}
@Override
public <T> org.apache.flink.api.common.state.v2.ValueState<T> getState(
org.apache.flink.api.common.state.v2.ValueStateDescriptor<T> stateProperties) {
throw new UnsupportedOperationException("State processor api does not support state v2.");
}
@Override
public <T> org.apache.flink.api.common.state.v2.ListState<T> getListState(
org.apache.flink.api.common.state.v2.ListStateDescriptor<T> stateProperties) {
throw new UnsupportedOperationException("State processor api does not support state v2.");
}
@Override
public <T> org.apache.flink.api.common.state.v2.ReducingState<T> getReducingState(
org.apache.flink.api.common.state.v2.ReducingStateDescriptor<T> stateProperties) {
throw new UnsupportedOperationException("State processor api does not support state v2.");
}
@Override
public <IN, ACC, OUT>
org.apache.flink.api.common.state.v2.AggregatingState<IN, OUT> getAggregatingState(
org.apache.flink.api.common.state.v2.AggregatingStateDescriptor<IN, ACC, OUT>
stateProperties) {
throw new UnsupportedOperationException("State processor api does not support state v2.");
}
@Override
public <UK, UV> org.apache.flink.api.common.state.v2.MapState<UK, UV> getMapState(
org.apache.flink.api.common.state.v2.MapStateDescriptor<UK, UV> stateProperties) {
throw new UnsupportedOperationException("State processor api does not support state v2.");
}
public List<StateDescriptor<?, ?>> getStateDescriptors() {
if (registeredDescriptors.isEmpty()) {
return Collections.emptyList();
}
return new ArrayList<>(registeredDescriptors);
}
public void disableStateRegistration() throws Exception {
stateRegistrationAllowed = false;
}
}
| SavepointRuntimeContext |
java | google__guice | extensions/persist/src/com/google/inject/persist/jpa/JpaFinderProxy.java | {
"start": 3429,
"end": 3660
} | class ____ Finder's returnAs could not be instantated: "
+ finderDescriptor.returnCollectionType,
e);
} catch (IllegalAccessException e) {
throw new RuntimeException(
"Specified collection | of |
java | spring-projects__spring-boot | core/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/json/JsonMarshalTesterRuntimeHints.java | {
"start": 1399,
"end": 2126
} | class ____ implements RuntimeHintsRegistrar {
private final Class<? extends AbstractJsonMarshalTester> tester;
protected JsonMarshalTesterRuntimeHints(Class<? extends AbstractJsonMarshalTester> tester) {
this.tester = tester;
}
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
ReflectionHints reflection = hints.reflection();
reflection.registerType(this.tester, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
Method method = ReflectionUtils.findMethod(this.tester, "initialize", Class.class, ResolvableType.class);
Assert.state(method != null, "'method' must not be null");
reflection.registerMethod(method, ExecutableMode.INVOKE);
}
}
| JsonMarshalTesterRuntimeHints |
java | google__dagger | javatests/dagger/internal/codegen/DependencyCycleValidationTest.java | {
"start": 12403,
"end": 12517
} | class ____ {",
" @Inject A(Set<C> cSet) {}",
" }",
"",
" static | A |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java | {
"start": 30796,
"end": 31632
} | class ____ {
void m() {
synchronized (Outer.this) {
Inner.this.x++;
}
// BUG: Diagnostic contains:
// should be guarded by 'Outer.this'
Inner.this.x++;
}
}
}
}
}
""")
.doTest();
}
// Test that the analysis doesn't crash on lock expressions it doesn't recognize.
// Note: there's currently no way to use @GuardedBy to specify that the guard is a specific array
// element.
@Test
public void complexLockExpression() {
compilationHelper
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
| InnerMost |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/ITestExponentialRetryPolicy.java | {
"start": 2818,
"end": 15493
} | class ____ extends AbstractAbfsIntegrationTest {
private final int maxRetryCount = 30;
private final int noRetryCount = 0;
private final int retryCount = new Random().nextInt(maxRetryCount);
private final int retryCountBeyondMax = maxRetryCount + 1;
private static final String TEST_PATH = "/testfile";
private static final double MULTIPLYING_FACTOR = 1.5;
private static final int ANALYSIS_PERIOD = 10000;
private static final String DUMMY_ACCOUNT_NAME = "dummy.dfs.core.windows.net";
private static final String DUMMY_ACCOUNT_NAME_1 = "dummy1.dfs.core.windows.net";
private static final String DUMMY_ACCOUNT_KEY = "dummyKey";
public ITestExponentialRetryPolicy() throws Exception {
super();
}
@Test
public void testDifferentMaxIORetryCount() throws Exception {
AbfsConfiguration abfsConfig = getAbfsConfig();
abfsConfig.setMaxIoRetries(noRetryCount);
testMaxIOConfig(abfsConfig);
abfsConfig.setMaxIoRetries(retryCount);
testMaxIOConfig(abfsConfig);
abfsConfig.setMaxIoRetries(retryCountBeyondMax);
testMaxIOConfig(abfsConfig);
}
@Test
public void testDefaultMaxIORetryCount() throws Exception {
AbfsConfiguration abfsConfig = getAbfsConfig();
Assertions.assertThat(abfsConfig.getMaxIoRetries())
.describedAs("Max retry count should be %s", maxRetryCount)
.isEqualTo(maxRetryCount);
testMaxIOConfig(abfsConfig);
}
@Test
public void testClientSideThrottlingConfigs() throws Exception {
final Configuration configuration = new Configuration();
configuration.setBoolean(FS_AZURE_ENABLE_AUTOTHROTTLING, true);
AbfsConfiguration abfsConfiguration = new AbfsConfiguration(configuration,
DUMMY_ACCOUNT_NAME);
Assertions.assertThat(abfsConfiguration.isAutoThrottlingEnabled())
.describedAs("Client-side throttling enabled by configuration key")
.isTrue();
configuration.unset(FS_AZURE_ENABLE_AUTOTHROTTLING);
AbfsConfiguration abfsConfiguration2 = new AbfsConfiguration(configuration,
DUMMY_ACCOUNT_NAME);
Assertions.assertThat(abfsConfiguration2.isAutoThrottlingEnabled())
.describedAs("Client-side throttling should be disabled by default")
.isFalse();
}
@Test
public void testThrottlingIntercept() throws Exception {
AzureBlobFileSystem fs = getFileSystem();
final Configuration configuration = new Configuration();
configuration.addResource(TEST_CONFIGURATION_FILE_NAME);
configuration.setBoolean(FS_AZURE_ENABLE_AUTOTHROTTLING, false);
configuration.set(FS_AZURE_ACCOUNT_KEY + DOT + DUMMY_ACCOUNT_NAME,
DUMMY_ACCOUNT_KEY);
configuration.set(FS_AZURE_ACCOUNT_KEY + DOT + DUMMY_ACCOUNT_NAME_1,
DUMMY_ACCOUNT_KEY);
// On disabling throttling AbfsNoOpThrottlingIntercept object is returned
AbfsConfiguration abfsConfiguration = new AbfsConfiguration(configuration,
DUMMY_ACCOUNT_NAME);
AbfsThrottlingIntercept intercept;
AbfsClient abfsClient = ITestAbfsClient.createTestClientFromCurrentContext(fs.getAbfsStore().getClient(), abfsConfiguration);
intercept = abfsClient.getIntercept();
Assertions.assertThat(intercept)
.describedAs("AbfsNoOpThrottlingIntercept instance expected")
.isInstanceOf(AbfsNoOpThrottlingIntercept.class);
configuration.setBoolean(FS_AZURE_ENABLE_AUTOTHROTTLING, true);
configuration.setBoolean(FS_AZURE_ACCOUNT_LEVEL_THROTTLING_ENABLED, true);
// On enabling throttling AbfsClientThrottlingIntercept object is returned
AbfsConfiguration abfsConfiguration1 = new AbfsConfiguration(configuration,
DUMMY_ACCOUNT_NAME_1);
AbfsClient abfsClient1 = ITestAbfsClient.createTestClientFromCurrentContext(fs.getAbfsStore().getClient(), abfsConfiguration1);
intercept = abfsClient1.getIntercept();
Assertions.assertThat(intercept)
.describedAs("AbfsClientThrottlingIntercept instance expected")
.isInstanceOf(AbfsClientThrottlingIntercept.class);
}
@Test
public void testCreateMultipleAccountThrottling() throws Exception {
Configuration config = new Configuration(getRawConfiguration());
String accountName = config.get(FS_AZURE_ACCOUNT_NAME);
if (accountName == null) {
// check if accountName is set using different config key
accountName = config.get(FS_AZURE_ABFS_ACCOUNT1_NAME);
}
assumeTrue(accountName != null && !accountName.isEmpty(),
"Not set: " + FS_AZURE_ABFS_ACCOUNT1_NAME);
Configuration rawConfig1 = new Configuration();
rawConfig1.addResource(TEST_CONFIGURATION_FILE_NAME);
AbfsRestOperation successOp = mock(AbfsRestOperation.class);
AbfsHttpOperation http500Op = mock(AbfsHttpOperation.class);
when(http500Op.getStatusCode()).thenReturn(HTTP_INTERNAL_ERROR);
when(successOp.getResult()).thenReturn(http500Op);
AbfsConfiguration configuration = Mockito.mock(AbfsConfiguration.class);
when(configuration.getAnalysisPeriod()).thenReturn(ANALYSIS_PERIOD);
when(configuration.isAutoThrottlingEnabled()).thenReturn(true);
when(configuration.accountThrottlingEnabled()).thenReturn(false);
AbfsThrottlingIntercept instance1 = AbfsThrottlingInterceptFactory.getInstance(accountName, configuration);
String accountName1 = config.get(FS_AZURE_ABFS_ACCOUNT1_NAME);
assumeTrue(accountName1 != null && !accountName1.isEmpty(),
"Not set: " + FS_AZURE_ABFS_ACCOUNT1_NAME);
AbfsThrottlingIntercept instance2 = AbfsThrottlingInterceptFactory.getInstance(accountName1, configuration);
//if singleton is enabled, for different accounts both the instances should return same value
Assertions.assertThat(instance1)
.describedAs(
"if singleton is enabled, for different accounts both the instances should return same value")
.isEqualTo(instance2);
when(configuration.accountThrottlingEnabled()).thenReturn(true);
AbfsThrottlingIntercept instance3 = AbfsThrottlingInterceptFactory.getInstance(accountName, configuration);
AbfsThrottlingIntercept instance4 = AbfsThrottlingInterceptFactory.getInstance(accountName1, configuration);
AbfsThrottlingIntercept instance5 = AbfsThrottlingInterceptFactory.getInstance(accountName, configuration);
//if singleton is not enabled, for different accounts instances should return different value
Assertions.assertThat(instance3)
.describedAs(
"iff singleton is not enabled, for different accounts instances should return different value")
.isNotEqualTo(instance4);
//if singleton is not enabled, for same accounts instances should return same value
Assertions.assertThat(instance3)
.describedAs(
"if singleton is not enabled, for same accounts instances should return same value")
.isEqualTo(instance5);
}
@Test
public void testOperationOnAccountIdle() throws Exception {
//Get the filesystem.
AzureBlobFileSystem fs = getFileSystem();
AbfsClient client = fs.getAbfsStore().getClient();
AbfsConfiguration configuration1 = client.getAbfsConfiguration();
assumeTrue(configuration1.isAutoThrottlingEnabled());
assumeTrue(configuration1.accountThrottlingEnabled());
AbfsClientThrottlingIntercept accountIntercept
= (AbfsClientThrottlingIntercept) client.getIntercept();
final byte[] b = new byte[2 * MIN_BUFFER_SIZE];
new Random().nextBytes(b);
Path testPath = path(TEST_PATH);
//Do an operation on the filesystem.
try (FSDataOutputStream stream = fs.create(testPath)) {
stream.write(b);
}
//Don't perform any operation on the account.
int sleepTime = (int) ((getAbfsConfig().getAccountOperationIdleTimeout()) * MULTIPLYING_FACTOR);
Thread.sleep(sleepTime);
try (FSDataInputStream streamRead = fs.open(testPath)) {
streamRead.read(b);
}
//Perform operations on another account.
AzureBlobFileSystem fs1 = new AzureBlobFileSystem();
Configuration config = new Configuration(getRawConfiguration());
String accountName1 = config.get(FS_AZURE_ABFS_ACCOUNT1_NAME);
assumeTrue(accountName1 != null && !accountName1.isEmpty(),
"Not set: " + FS_AZURE_ABFS_ACCOUNT1_NAME);
final String abfsUrl1 = this.getFileSystemName() + "12" + "@" + accountName1;
URI defaultUri1 = null;
defaultUri1 = new URI("abfss", abfsUrl1, null, null, null);
fs1.initialize(defaultUri1, getRawConfiguration());
AbfsClient client1 = fs1.getAbfsStore().getClient();
AbfsClientThrottlingIntercept accountIntercept1
= (AbfsClientThrottlingIntercept) client1.getIntercept();
try (FSDataOutputStream stream1 = fs1.create(testPath)) {
stream1.write(b);
}
//Verify the write analyzer for first account is idle but the read analyzer is not idle.
Assertions.assertThat(accountIntercept.getWriteThrottler()
.getIsOperationOnAccountIdle()
.get())
.describedAs("Write analyzer for first account should be idle the first time")
.isTrue();
Assertions.assertThat(
accountIntercept.getReadThrottler()
.getIsOperationOnAccountIdle()
.get())
.describedAs("Read analyzer for first account should not be idle")
.isFalse();
//Verify the write analyzer for second account is not idle.
Assertions.assertThat(
accountIntercept1.getWriteThrottler()
.getIsOperationOnAccountIdle()
.get())
.describedAs("Write analyzer for second account should not be idle")
.isFalse();
//Again perform an operation on the first account.
try (FSDataOutputStream stream2 = fs.create(testPath)) {
stream2.write(b);
}
//Verify the write analyzer on first account is not idle.
Assertions.assertThat(
accountIntercept.getWriteThrottler()
.getIsOperationOnAccountIdle()
.get())
.describedAs(
"Write analyzer for first account should not be idle second time")
.isFalse();
}
@Test
public void testAbfsConfigConstructor() throws Exception {
// Ensure we choose expected values that are not defaults
ExponentialRetryPolicy template = new ExponentialRetryPolicy(
getAbfsConfig().getMaxIoRetries());
int testModifier = 1;
int expectedMaxRetries = template.getMaxRetryCount() + testModifier;
int expectedMinBackoff = template.getMinBackoff() + testModifier;
int expectedMaxBackoff = template.getMaxBackoff() + testModifier;
int expectedDeltaBackoff = template.getDeltaBackoff() + testModifier;
Configuration config = new Configuration(this.getRawConfiguration());
config.setInt(AZURE_MAX_IO_RETRIES, expectedMaxRetries);
config.setInt(AZURE_MIN_BACKOFF_INTERVAL, expectedMinBackoff);
config.setInt(AZURE_MAX_BACKOFF_INTERVAL, expectedMaxBackoff);
config.setInt(AZURE_BACKOFF_INTERVAL, expectedDeltaBackoff);
ExponentialRetryPolicy policy = new ExponentialRetryPolicy(
new AbfsConfiguration(config, "dummyAccountName"));
Assertions.assertThat(policy.getMaxRetryCount())
.describedAs("Max retry count was not set as expected.")
.isEqualTo(expectedMaxRetries);
Assertions.assertThat(policy.getMinBackoff())
.describedAs("Min backoff interval was not set as expected.")
.isEqualTo(expectedMinBackoff);
Assertions.assertThat(policy.getMaxBackoff())
.describedAs("Max backoff interval was not set as expected")
.isEqualTo(expectedMaxBackoff);
Assertions.assertThat(policy.getDeltaBackoff())
.describedAs("Delta backoff interval was not set as expected.")
.isEqualTo(expectedDeltaBackoff);
}
private AbfsConfiguration getAbfsConfig() throws Exception {
Configuration
config = new Configuration(this.getRawConfiguration());
return new AbfsConfiguration(config, "dummyAccountName");
}
private void testMaxIOConfig(AbfsConfiguration abfsConfig) {
ExponentialRetryPolicy retryPolicy = new ExponentialRetryPolicy(
abfsConfig.getMaxIoRetries());
int localRetryCount = 0;
while (localRetryCount < abfsConfig.getMaxIoRetries()) {
Assertions.assertThat(retryPolicy.shouldRetry(localRetryCount, -1))
.describedAs("Retry should be allowed when retryCount less than max count configured.")
.isTrue();
localRetryCount++;
}
Assertions.assertThat(localRetryCount)
.describedAs("When all retries are exhausted, the retryCount will be same as max configured.")
.isEqualTo(abfsConfig.getMaxIoRetries());
}
}
| ITestExponentialRetryPolicy |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/MetadataRepositoriesMetadataTests.java | {
"start": 16114,
"end": 17527
} | class ____ extends AbstractNamedDiffable<Metadata.ClusterCustom> implements Metadata.ClusterCustom {
private final List<RepositoryMetadata> repositories;
public TestBwcRepositoryMetadata(List<RepositoryMetadata> repositories) {
this.repositories = repositories;
}
public TestBwcRepositoryMetadata(StreamInput in) throws IOException {
this.repositories = in.readCollectionAsImmutableList(RepositoryMetadata::new);
}
@Override
public EnumSet<Metadata.XContentContext> context() {
return Metadata.API_AND_GATEWAY;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.minimumCompatible();
}
@Override
public String getWriteableName() {
return RepositoriesMetadata.TYPE;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeCollection(repositories);
}
public static NamedDiff<Metadata.ClusterCustom> readDiffFrom(StreamInput in) throws IOException {
return readDiffFrom(Metadata.ClusterCustom.class, RepositoriesMetadata.TYPE, in);
}
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return null;
}
}
}
| TestBwcRepositoryMetadata |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/program/rest/RestClusterClient.java | {
"start": 10177,
"end": 48523
} | class ____<T> implements ClusterClient<T> {
private static final Logger LOG = LoggerFactory.getLogger(RestClusterClient.class);
private final RestClusterClientConfiguration restClusterClientConfiguration;
private final Configuration configuration;
private final RestClient restClient;
private final ExecutorService executorService =
Executors.newFixedThreadPool(
4, new ExecutorThreadFactory("Flink-RestClusterClient-IO"));
private final WaitStrategy waitStrategy;
private final T clusterId;
private final ClientHighAvailabilityServices clientHAServices;
private final LeaderRetrievalService webMonitorRetrievalService;
private final LeaderRetriever webMonitorLeaderRetriever = new LeaderRetriever();
private final AtomicBoolean running = new AtomicBoolean(true);
/** ExecutorService to run operations that can be retried on exceptions. */
private final ScheduledExecutorService retryExecutorService;
private final Predicate<Throwable> unknownJobStateRetryable =
exception ->
ExceptionUtils.findThrowable(exception, JobStateUnknownException.class)
.isPresent();
private final URL jobmanagerUrl;
private final Collection<HttpHeader> customHttpHeaders;
public RestClusterClient(Configuration config, T clusterId) throws Exception {
this(config, clusterId, DefaultClientHighAvailabilityServicesFactory.INSTANCE);
}
public RestClusterClient(
Configuration config, T clusterId, ClientHighAvailabilityServicesFactory factory)
throws Exception {
this(config, null, clusterId, new ExponentialWaitStrategy(10L, 2000L), factory, null);
}
public RestClusterClient(
Configuration config,
T clusterId,
ClientHighAvailabilityServicesFactory factory,
EventLoopGroup group)
throws Exception {
this(config, null, clusterId, new ExponentialWaitStrategy(10L, 2000L), factory, group);
}
@VisibleForTesting
RestClusterClient(
Configuration configuration,
@Nullable RestClient restClient,
T clusterId,
WaitStrategy waitStrategy)
throws Exception {
this(
configuration,
restClient,
clusterId,
waitStrategy,
DefaultClientHighAvailabilityServicesFactory.INSTANCE,
null);
}
private RestClusterClient(
Configuration configuration,
@Nullable RestClient restClient,
T clusterId,
WaitStrategy waitStrategy,
ClientHighAvailabilityServicesFactory clientHAServicesFactory,
@Nullable EventLoopGroup group)
throws Exception {
this.configuration = checkNotNull(configuration);
this.restClusterClientConfiguration =
RestClusterClientConfiguration.fromConfiguration(configuration);
this.customHttpHeaders =
ClientUtils.readHeadersFromEnvironmentVariable(
ConfigConstants.FLINK_REST_CLIENT_HEADERS);
jobmanagerUrl =
new URL(
SecurityOptions.isRestSSLEnabled(configuration) ? "https" : "http",
configuration.get(JobManagerOptions.ADDRESS),
configuration.get(JobManagerOptions.PORT),
configuration.get(RestOptions.PATH));
if (restClient != null) {
this.restClient = restClient;
} else {
this.restClient =
RestClient.forUrl(configuration, executorService, jobmanagerUrl, group);
}
this.waitStrategy = checkNotNull(waitStrategy);
this.clusterId = checkNotNull(clusterId);
this.clientHAServices =
clientHAServicesFactory.create(
configuration,
exception ->
webMonitorLeaderRetriever.handleError(
new FlinkException(
"Fatal error happened with client HA "
+ "services.",
exception)));
this.webMonitorRetrievalService = clientHAServices.getClusterRestEndpointLeaderRetriever();
this.retryExecutorService =
Executors.newSingleThreadScheduledExecutor(
new ExecutorThreadFactory("Flink-RestClusterClient-Retry"));
startLeaderRetrievers();
}
private void startLeaderRetrievers() throws Exception {
this.webMonitorRetrievalService.start(webMonitorLeaderRetriever);
}
@Override
public Configuration getFlinkConfiguration() {
return new Configuration(configuration);
}
@Override
public void close() {
if (running.compareAndSet(true, false)) {
ExecutorUtils.gracefulShutdown(
restClusterClientConfiguration.getRetryDelay(),
TimeUnit.MILLISECONDS,
retryExecutorService);
this.restClient.shutdown(Duration.ofSeconds(5));
ExecutorUtils.gracefulShutdown(5, TimeUnit.SECONDS, this.executorService);
try {
webMonitorRetrievalService.stop();
} catch (Exception e) {
LOG.error("An error occurred during stopping the WebMonitorRetrievalService", e);
}
try {
clientHAServices.close();
} catch (Exception e) {
LOG.error(
"An error occurred during stopping the ClientHighAvailabilityServices", e);
}
}
}
/**
* Requests the job details.
*
* @param jobId The job id
* @return Job details
*/
public CompletableFuture<JobDetailsInfo> getJobDetails(JobID jobId) {
final JobDetailsHeaders detailsHeaders = JobDetailsHeaders.getInstance();
final JobMessageParameters params = new JobMessageParameters();
params.jobPathParameter.resolve(jobId);
return sendRequest(detailsHeaders, params);
}
@Override
public CompletableFuture<JobStatus> getJobStatus(JobID jobId) {
final CheckedSupplier<CompletableFuture<JobStatus>> operation =
() -> requestJobStatus(jobId);
return retry(operation, unknownJobStateRetryable);
}
/**
* Requests the {@link JobResult} for the given {@link JobID}. The method retries multiple times
* to poll the {@link JobResult} before giving up.
*
* @param jobId specifying the job for which to retrieve the {@link JobResult}
* @return Future which is completed with the {@link JobResult} once the job has completed or
* with a failure if the {@link JobResult} could not be retrieved.
*/
@Override
public CompletableFuture<JobResult> requestJobResult(@Nonnull JobID jobId) {
final CheckedSupplier<CompletableFuture<JobResult>> operation =
() -> requestJobResultInternal(jobId);
return retry(operation, unknownJobStateRetryable);
}
@Override
public CompletableFuture<JobID> submitJob(@Nonnull ExecutionPlan executionPlan) {
CompletableFuture<java.nio.file.Path> executionPlanFileFuture =
CompletableFuture.supplyAsync(
() -> {
try {
final java.nio.file.Path executionPlanFile =
Files.createTempFile(
"flink-executionPlan-" + executionPlan.getJobID(),
".bin");
try (ObjectOutputStream objectOut =
new ObjectOutputStream(
Files.newOutputStream(executionPlanFile))) {
objectOut.writeObject(executionPlan);
}
return executionPlanFile;
} catch (IOException e) {
throw new CompletionException(
new FlinkException(
"Failed to serialize ExecutionPlan.", e));
}
},
executorService);
CompletableFuture<Tuple2<JobSubmitRequestBody, Collection<FileUpload>>> requestFuture =
executionPlanFileFuture.thenApply(
executionPlanFile -> {
List<String> jarFileNames = new ArrayList<>(8);
List<JobSubmitRequestBody.DistributedCacheFile> artifactFileNames =
new ArrayList<>(8);
Collection<FileUpload> filesToUpload = new ArrayList<>(8);
filesToUpload.add(
new FileUpload(
executionPlanFile, RestConstants.CONTENT_TYPE_BINARY));
for (Path jar : executionPlan.getUserJars()) {
jarFileNames.add(jar.getName());
filesToUpload.add(
new FileUpload(
Paths.get(jar.toUri()),
RestConstants.CONTENT_TYPE_JAR));
}
for (Map.Entry<String, DistributedCache.DistributedCacheEntry>
artifacts : executionPlan.getUserArtifacts().entrySet()) {
final Path artifactFilePath =
new Path(artifacts.getValue().filePath);
try {
// Only local artifacts need to be uploaded.
if (!artifactFilePath.getFileSystem().isDistributedFS()) {
artifactFileNames.add(
new JobSubmitRequestBody.DistributedCacheFile(
artifacts.getKey(),
artifactFilePath.getName()));
filesToUpload.add(
new FileUpload(
Paths.get(artifactFilePath.getPath()),
RestConstants.CONTENT_TYPE_BINARY));
}
} catch (IOException e) {
throw new CompletionException(
new FlinkException(
"Failed to get the FileSystem of artifact "
+ artifactFilePath
+ ".",
e));
}
}
final JobSubmitRequestBody requestBody =
new JobSubmitRequestBody(
executionPlanFile.getFileName().toString(),
jarFileNames,
artifactFileNames);
return Tuple2.of(
requestBody, Collections.unmodifiableCollection(filesToUpload));
});
final CompletableFuture<JobSubmitResponseBody> submissionFuture =
requestFuture.thenCompose(
requestAndFileUploads -> {
LOG.info(
"Submitting job '{}' ({}).",
executionPlan.getName(),
executionPlan.getJobID());
return sendRetriableRequest(
JobSubmitHeaders.getInstance(),
EmptyMessageParameters.getInstance(),
requestAndFileUploads.f0,
requestAndFileUploads.f1,
isConnectionProblemOrServiceUnavailable(),
(receiver, error) -> {
if (error != null) {
LOG.warn(
"Attempt to submit job '{}' ({}) to '{}' has failed.",
executionPlan.getName(),
executionPlan.getJobID(),
receiver,
error);
} else {
LOG.info(
"Successfully submitted job '{}' ({}) to '{}'.",
executionPlan.getName(),
executionPlan.getJobID(),
receiver);
}
});
});
submissionFuture
.exceptionally(ignored -> null) // ignore errors
.thenCompose(ignored -> executionPlanFileFuture)
.thenAccept(
executionPlanFile -> {
try {
Files.delete(executionPlanFile);
} catch (IOException e) {
LOG.warn(
"Could not delete temporary file {}.",
executionPlanFile,
e);
}
});
return submissionFuture
.thenApply(ignore -> executionPlan.getJobID())
.exceptionally(
(Throwable throwable) -> {
throw new CompletionException(
new JobSubmissionException(
executionPlan.getJobID(),
"Failed to submit ExecutionPlan.",
ExceptionUtils.stripCompletionException(throwable)));
});
}
@Override
public CompletableFuture<Acknowledge> cancel(JobID jobID) {
JobCancellationMessageParameters params =
new JobCancellationMessageParameters()
.resolveJobId(jobID)
.resolveTerminationMode(
TerminationModeQueryParameter.TerminationMode.CANCEL);
CompletableFuture<EmptyResponseBody> responseFuture =
sendRequest(JobCancellationHeaders.getInstance(), params);
return responseFuture.thenApply(ignore -> Acknowledge.get());
}
@Override
public CompletableFuture<String> stopWithSavepoint(
final JobID jobId,
final boolean advanceToEndOfTime,
@Nullable final String savepointDirectory,
final SavepointFormatType formatType) {
return stopWithSavepoint(jobId, advanceToEndOfTime, savepointDirectory, formatType, false);
}
@Override
public CompletableFuture<String> stopWithDetachedSavepoint(
final JobID jobId,
final boolean advanceToEndOfTime,
@Nullable final String savepointDirectory,
final SavepointFormatType formatType) {
return stopWithSavepoint(jobId, advanceToEndOfTime, savepointDirectory, formatType, true);
}
@Override
public CompletableFuture<String> cancelWithSavepoint(
JobID jobId, @Nullable String savepointDirectory, SavepointFormatType formatType) {
return triggerSavepoint(jobId, savepointDirectory, true, formatType, false);
}
@Override
public CompletableFuture<String> triggerSavepoint(
final JobID jobId,
final @Nullable String savepointDirectory,
final SavepointFormatType formatType) {
return triggerSavepoint(jobId, savepointDirectory, false, formatType, false);
}
@Override
public CompletableFuture<Long> triggerCheckpoint(JobID jobId, CheckpointType checkpointType) {
final CheckpointTriggerHeaders checkpointTriggerHeaders =
CheckpointTriggerHeaders.getInstance();
final CheckpointTriggerMessageParameters checkpointTriggerMessageParameters =
checkpointTriggerHeaders.getUnresolvedMessageParameters();
checkpointTriggerMessageParameters.jobID.resolve(jobId);
final CompletableFuture<TriggerResponse> responseFuture =
sendRequest(
checkpointTriggerHeaders,
checkpointTriggerMessageParameters,
new CheckpointTriggerRequestBody(checkpointType, null));
return responseFuture
.thenCompose(
checkpointTriggerResponseBody -> {
final TriggerId checkpointTriggerId =
checkpointTriggerResponseBody.getTriggerId();
return pollCheckpointAsync(jobId, checkpointTriggerId);
})
.thenApply(
checkpointInfo -> {
if (checkpointInfo.getFailureCause() != null) {
throw new CompletionException(checkpointInfo.getFailureCause());
}
return checkpointInfo.getCheckpointId();
});
}
@Override
public CompletableFuture<String> triggerDetachedSavepoint(
final JobID jobId,
final @Nullable String savepointDirectory,
final SavepointFormatType formatType) {
return triggerSavepoint(jobId, savepointDirectory, false, formatType, true);
}
@Override
public CompletableFuture<CoordinationResponse> sendCoordinationRequest(
JobID jobId, String operatorUid, CoordinationRequest request) {
ClientCoordinationHeaders headers = ClientCoordinationHeaders.getInstance();
ClientCoordinationMessageParameters params = new ClientCoordinationMessageParameters();
params.jobPathParameter.resolve(jobId);
params.operatorUidPathParameter.resolve(operatorUid);
SerializedValue<CoordinationRequest> serializedRequest;
try {
serializedRequest = new SerializedValue<>(request);
} catch (IOException e) {
return FutureUtils.completedExceptionally(e);
}
ClientCoordinationRequestBody requestBody =
new ClientCoordinationRequestBody(serializedRequest);
return sendRequest(headers, params, requestBody)
.thenApply(
responseBody -> {
try {
return responseBody
.getSerializedCoordinationResponse()
.deserializeValue(getClass().getClassLoader());
} catch (IOException | ClassNotFoundException e) {
throw new CompletionException(
"Failed to deserialize coordination response", e);
}
});
}
public CompletableFuture<String> stopWithSavepoint(
final JobID jobId,
final boolean advanceToEndOfTime,
@Nullable final String savepointDirectory,
final SavepointFormatType formatType,
final boolean isDetachedMode) {
final StopWithSavepointTriggerHeaders stopWithSavepointTriggerHeaders =
StopWithSavepointTriggerHeaders.getInstance();
final SavepointTriggerMessageParameters stopWithSavepointTriggerMessageParameters =
stopWithSavepointTriggerHeaders.getUnresolvedMessageParameters();
stopWithSavepointTriggerMessageParameters.jobID.resolve(jobId);
final CompletableFuture<TriggerResponse> responseFuture =
sendRequest(
stopWithSavepointTriggerHeaders,
stopWithSavepointTriggerMessageParameters,
new StopWithSavepointRequestBody(
savepointDirectory, advanceToEndOfTime, formatType, null));
return getSavepointTriggerFuture(jobId, isDetachedMode, responseFuture);
}
private CompletableFuture<String> triggerSavepoint(
final JobID jobId,
final @Nullable String savepointDirectory,
final boolean cancelJob,
final SavepointFormatType formatType,
final boolean isDetachedMode) {
final SavepointTriggerHeaders savepointTriggerHeaders =
SavepointTriggerHeaders.getInstance();
final SavepointTriggerMessageParameters savepointTriggerMessageParameters =
savepointTriggerHeaders.getUnresolvedMessageParameters();
savepointTriggerMessageParameters.jobID.resolve(jobId);
final CompletableFuture<TriggerResponse> responseFuture =
sendRequest(
savepointTriggerHeaders,
savepointTriggerMessageParameters,
new SavepointTriggerRequestBody(
savepointDirectory, cancelJob, formatType, null));
return getSavepointTriggerFuture(jobId, isDetachedMode, responseFuture);
}
private CompletableFuture<String> getSavepointTriggerFuture(
JobID jobId,
boolean isDetachedMode,
CompletableFuture<TriggerResponse> responseFuture) {
CompletableFuture<String> futureResult;
if (isDetachedMode) {
// we just return the savepoint trigger id in detached savepoint,
// that means the client could exit immediately
futureResult =
responseFuture.thenApply((TriggerResponse tr) -> tr.getTriggerId().toString());
} else {
// otherwise we need to wait the savepoint to be succeeded
// and return the savepoint path
futureResult =
responseFuture
.thenCompose(
savepointTriggerResponseBody -> {
final TriggerId savepointTriggerId =
savepointTriggerResponseBody.getTriggerId();
return pollSavepointAsync(jobId, savepointTriggerId);
})
.thenApply(
savepointInfo -> {
if (savepointInfo.getFailureCause() != null) {
throw new CompletionException(
savepointInfo.getFailureCause());
}
return savepointInfo.getLocation();
});
}
return futureResult;
}
@Override
public CompletableFuture<Map<String, Object>> getAccumulators(JobID jobID, ClassLoader loader) {
final JobAccumulatorsHeaders accumulatorsHeaders = JobAccumulatorsHeaders.getInstance();
final JobAccumulatorsMessageParameters accMsgParams =
accumulatorsHeaders.getUnresolvedMessageParameters();
accMsgParams.jobPathParameter.resolve(jobID);
accMsgParams.includeSerializedAccumulatorsParameter.resolve(
Collections.singletonList(true));
CompletableFuture<JobAccumulatorsInfo> responseFuture =
sendRequest(accumulatorsHeaders, accMsgParams);
return responseFuture
.thenApply(JobAccumulatorsInfo::getSerializedUserAccumulators)
.thenApply(
accumulators -> {
try {
return AccumulatorHelper.deserializeAndUnwrapAccumulators(
accumulators, loader);
} catch (Exception e) {
throw new CompletionException(
"Cannot deserialize and unwrap accumulators properly.", e);
}
});
}
private CompletableFuture<SavepointInfo> pollSavepointAsync(
final JobID jobId, final TriggerId triggerID) {
return pollResourceAsync(
() -> {
final SavepointStatusHeaders savepointStatusHeaders =
SavepointStatusHeaders.getInstance();
final SavepointStatusMessageParameters savepointStatusMessageParameters =
savepointStatusHeaders.getUnresolvedMessageParameters();
savepointStatusMessageParameters.jobIdPathParameter.resolve(jobId);
savepointStatusMessageParameters.triggerIdPathParameter.resolve(triggerID);
return sendRequest(savepointStatusHeaders, savepointStatusMessageParameters);
});
}
private CompletableFuture<CheckpointInfo> pollCheckpointAsync(
final JobID jobId, final TriggerId triggerID) {
return pollResourceAsync(
() -> {
final CheckpointStatusHeaders checkpointStatusHeaders =
CheckpointStatusHeaders.getInstance();
final CheckpointStatusMessageParameters checkpointStatusMessageParameters =
checkpointStatusHeaders.getUnresolvedMessageParameters();
checkpointStatusMessageParameters.jobIdPathParameter.resolve(jobId);
checkpointStatusMessageParameters.triggerIdPathParameter.resolve(triggerID);
return sendRequest(checkpointStatusHeaders, checkpointStatusMessageParameters);
});
}
@Override
public CompletableFuture<Collection<JobStatusMessage>> listJobs() {
return sendRequest(JobsOverviewHeaders.getInstance())
.thenApply(
(multipleJobsDetails) ->
multipleJobsDetails.getJobs().stream()
.map(
detail ->
new JobStatusMessage(
detail.getJobId(),
detail.getJobName(),
detail.getStatus(),
detail.getStartTime()))
.collect(Collectors.toList()));
}
@Override
public T getClusterId() {
return clusterId;
}
@Override
public CompletableFuture<Acknowledge> disposeSavepoint(String savepointPath) {
final SavepointDisposalRequest savepointDisposalRequest =
new SavepointDisposalRequest(savepointPath);
final CompletableFuture<TriggerResponse> savepointDisposalTriggerFuture =
sendRequest(
SavepointDisposalTriggerHeaders.getInstance(), savepointDisposalRequest);
final CompletableFuture<AsynchronousOperationInfo> savepointDisposalFuture =
savepointDisposalTriggerFuture.thenCompose(
(TriggerResponse triggerResponse) -> {
final TriggerId triggerId = triggerResponse.getTriggerId();
final SavepointDisposalStatusHeaders savepointDisposalStatusHeaders =
SavepointDisposalStatusHeaders.getInstance();
final SavepointDisposalStatusMessageParameters
savepointDisposalStatusMessageParameters =
savepointDisposalStatusHeaders
.getUnresolvedMessageParameters();
savepointDisposalStatusMessageParameters.triggerIdPathParameter.resolve(
triggerId);
return pollResourceAsync(
() ->
sendRequest(
savepointDisposalStatusHeaders,
savepointDisposalStatusMessageParameters));
});
return savepointDisposalFuture.thenApply(
(AsynchronousOperationInfo asynchronousOperationInfo) -> {
if (asynchronousOperationInfo.getFailureCause() == null) {
return Acknowledge.get();
} else {
throw new CompletionException(asynchronousOperationInfo.getFailureCause());
}
});
}
@Override
public CompletableFuture<Set<AbstractID>> listCompletedClusterDatasetIds() {
return sendRequest(ClusterDataSetListHeaders.INSTANCE)
.thenApply(
clusterDataSetListResponseBody ->
clusterDataSetListResponseBody.getDataSets().stream()
.filter(ClusterDataSetEntry::isComplete)
.map(ClusterDataSetEntry::getDataSetId)
.map(id -> new AbstractID(StringUtils.hexStringToByte(id)))
.collect(Collectors.toSet()));
}
@Override
public CompletableFuture<Void> invalidateClusterDataset(AbstractID clusterDatasetId) {
final ClusterDataSetDeleteTriggerHeaders triggerHeader =
ClusterDataSetDeleteTriggerHeaders.INSTANCE;
final ClusterDataSetDeleteTriggerMessageParameters parameters =
triggerHeader.getUnresolvedMessageParameters();
parameters.clusterDataSetIdPathParameter.resolve(
new IntermediateDataSetID(clusterDatasetId));
final CompletableFuture<TriggerResponse> triggerFuture =
sendRequest(triggerHeader, parameters);
final CompletableFuture<AsynchronousOperationInfo> clusterDatasetDeleteFuture =
triggerFuture.thenCompose(
triggerResponse -> {
final TriggerId triggerId = triggerResponse.getTriggerId();
final ClusterDataSetDeleteStatusHeaders statusHeaders =
ClusterDataSetDeleteStatusHeaders.INSTANCE;
final ClusterDataSetDeleteStatusMessageParameters
statusMessageParameters =
statusHeaders.getUnresolvedMessageParameters();
statusMessageParameters.triggerIdPathParameter.resolve(triggerId);
return pollResourceAsync(
() -> sendRequest(statusHeaders, statusMessageParameters));
});
return clusterDatasetDeleteFuture.thenApply(
asynchronousOperationInfo -> {
if (asynchronousOperationInfo.getFailureCause() == null) {
return null;
} else {
throw new CompletionException(asynchronousOperationInfo.getFailureCause());
}
});
}
@Override
public CompletableFuture<Void> reportHeartbeat(JobID jobId, long expiredTimestamp) {
JobClientHeartbeatParameters params =
new JobClientHeartbeatParameters().resolveJobId(jobId);
CompletableFuture<EmptyResponseBody> responseFuture =
sendRequest(
JobClientHeartbeatHeaders.getInstance(),
params,
new JobClientHeartbeatRequestBody(expiredTimestamp));
return responseFuture.thenApply(ignore -> null);
}
@Override
public void shutDownCluster() {
try {
sendRequest(ShutdownHeaders.getInstance()).get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
LOG.error("Error while shutting down cluster", e);
}
}
/**
* Creates a {@code CompletableFuture} that polls a {@code AsynchronouslyCreatedResource} until
* its {@link AsynchronouslyCreatedResource#queueStatus() QueueStatus} becomes {@link
* QueueStatus.Id#COMPLETED COMPLETED}. The future completes with the result of {@link
* AsynchronouslyCreatedResource#resource()}.
*
* @param resourceFutureSupplier The operation which polls for the {@code
* AsynchronouslyCreatedResource}.
* @param <R> The type of the resource.
* @param <A> The type of the {@code AsynchronouslyCreatedResource}.
* @return A {@code CompletableFuture} delivering the resource.
*/
private <R, A extends AsynchronouslyCreatedResource<R>> CompletableFuture<R> pollResourceAsync(
final Supplier<CompletableFuture<A>> resourceFutureSupplier) {
return pollResourceAsync(resourceFutureSupplier, new CompletableFuture<>(), 0);
}
private <R, A extends AsynchronouslyCreatedResource<R>> CompletableFuture<R> pollResourceAsync(
final Supplier<CompletableFuture<A>> resourceFutureSupplier,
final CompletableFuture<R> resultFuture,
final long attempt) {
resourceFutureSupplier
.get()
.whenComplete(
(asynchronouslyCreatedResource, throwable) -> {
if (throwable != null) {
resultFuture.completeExceptionally(throwable);
} else {
if (asynchronouslyCreatedResource.queueStatus().getId()
== QueueStatus.Id.COMPLETED) {
resultFuture.complete(asynchronouslyCreatedResource.resource());
} else {
retryExecutorService.schedule(
() -> {
pollResourceAsync(
resourceFutureSupplier,
resultFuture,
attempt + 1);
},
waitStrategy.sleepTime(attempt),
TimeUnit.MILLISECONDS);
}
}
});
return resultFuture;
}
/**
* Update {@link JobResourceRequirements} of a given job.
*
* @param jobId jobId specifies the job for which to change the resource requirements
* @param jobResourceRequirements new resource requirements for the provided job
* @return Future which is completed upon successful operation.
*/
public CompletableFuture<Acknowledge> updateJobResourceRequirements(
JobID jobId, JobResourceRequirements jobResourceRequirements) {
final JobMessageParameters params = new JobMessageParameters();
params.jobPathParameter.resolve(jobId);
return sendRequest(
JobResourcesRequirementsUpdateHeaders.INSTANCE,
params,
new JobResourceRequirementsBody(jobResourceRequirements))
.thenApply(ignored -> Acknowledge.get());
}
@VisibleForTesting
URL getJobmanagerUrl() {
return jobmanagerUrl;
}
@VisibleForTesting
Collection<HttpHeader> getCustomHttpHeaders() {
return customHttpHeaders;
}
/**
* Get an overview of the Flink cluster.
*
* @return Future with the {@link ClusterOverviewWithVersion cluster overview}.
*/
public CompletableFuture<ClusterOverviewWithVersion> getClusterOverview() {
return sendRequest(
ClusterOverviewHeaders.getInstance(),
EmptyMessageParameters.getInstance(),
EmptyRequestBody.getInstance());
}
// ======================================
// Legacy stuff we actually implement
// ======================================
@Override
public String getWebInterfaceURL() {
try {
return getWebMonitorBaseUrl().get().toString();
} catch (InterruptedException | ExecutionException e) {
ExceptionUtils.checkInterrupted(e);
LOG.warn("Could not retrieve the web | RestClusterClient |
java | dropwizard__dropwizard | dropwizard-testing/src/test/java/io/dropwizard/testing/junit5/DropwizardAppExtensionWithoutConfigTest.java | {
"start": 1593,
"end": 1762
} | class ____ {
@GET
public Response get() {
return Response.ok(Collections.singletonMap("color", "orange")).build();
}
}
}
| TestResource |
java | apache__flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/functions/timestamps/AscendingTimestampExtractor.java | {
"start": 3687,
"end": 4242
} | interface ____ extends java.io.Serializable {
/**
* Called when the property of monotonously ascending timestamps is violated, i.e., when
* {@code elementTimestamp < lastTimestamp}.
*
* @param elementTimestamp The timestamp of the current element.
* @param lastTimestamp The last timestamp.
*/
void handleViolation(long elementTimestamp, long lastTimestamp);
}
/** Handler that does nothing when timestamp monotony is violated. */
public static final | MonotonyViolationHandler |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java | {
"start": 3615,
"end": 4622
} | class ____
// commands, and then this method can be abstract
if (this.getClass().equals(FsShell.class)) {
factory.registerCommands(FsCommand.class);
}
}
/**
* Returns the Trash object associated with this shell.
* @return Path to the trash
* @throws IOException upon error
*/
public Path getCurrentTrashDir() throws IOException {
return getTrash().getCurrentTrashDir();
}
/**
* Returns the current trash location for the path specified
* @param path to be deleted
* @return path to the trash
* @throws IOException raised on errors performing I/O.
*/
public Path getCurrentTrashDir(Path path) throws IOException {
return getTrash().getCurrentTrashDir(path);
}
protected String getUsagePrefix() {
return usagePrefix;
}
// NOTE: Usage/Help are inner classes to allow access to outer methods
// that access commandFactory
/**
* Display help for commands with their short usage and long description.
*/
protected | for |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/dialect/postgis/PostgisDollarQuoteNativeQueryTest.java | {
"start": 2209,
"end": 2545
} | class ____ {
@Id
Long id;
@JdbcTypeCode(SqlTypes.GEOMETRY)
Point<G2D> point;
public Location() {
}
public Location(Long id, Point<G2D> point) {
this.id = id;
this.point = point;
}
@Override
public String toString() {
return "Location{" +
"id=" + id +
", point=" + point +
'}';
}
}
}
| Location |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceManager.java | {
"start": 8612,
"end": 9234
} | class ____ implements
MultipleArcTransition<ServiceManager, ServiceEvent, State> {
@Override
public State transition(ServiceManager serviceManager, ServiceEvent event) {
ServiceState currState = serviceManager.serviceSpec.getState();
ServiceState targetState = checkIfStable(serviceManager.serviceSpec);
if (targetState.equals(ServiceState.STABLE)) {
if (serviceManager.finalizeUpgrade(
currState.equals(ServiceState.CANCEL_UPGRADING))) {
return State.STABLE;
}
}
return State.UPGRADING;
}
}
private static | StartFromUpgradeTransition |
java | quarkusio__quarkus | core/processor/src/main/java/io/quarkus/annotation/processor/ExtensionAnnotationProcessor.java | {
"start": 1256,
"end": 4573
} | class ____ extends AbstractProcessor {
private static final String DEBUG = "debug-extension-annotation-processor";
private Utils utils;
private List<ExtensionProcessor> extensionProcessors;
@Override
public synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
utils = new Utils(processingEnv);
boolean useConfigMapping = !Boolean
.parseBoolean(utils.processingEnv().getOptions().getOrDefault(Options.LEGACY_CONFIG_ROOT, "false"));
if (!useConfigMapping) {
throw new IllegalArgumentException(
"Starting with Quarkus 3.25, legacy config classes (deprecated since Quarkus 3.19) are not supported anymore. "
+ "Please migrate the configuration of your extension to interfaces annotated with @ConfigMapping. See https://quarkus.io/guides/config-mappings#config-mappings for more information.");
}
boolean debug = Boolean.getBoolean(DEBUG);
ExtensionModule extensionModule = utils.extension().getExtensionModule();
Config config = new Config(extensionModule, debug);
List<ExtensionProcessor> extensionProcessors = new ArrayList<>();
extensionProcessors.add(new ExtensionBuildProcessor());
boolean generateDoc = !"false".equals(processingEnv.getOptions().get(Options.GENERATE_DOC));
// for now, we generate the old config doc by default but we will change this behavior soon
if (generateDoc) {
if (extensionModule.detected()) {
extensionProcessors.add(new ConfigDocExtensionProcessor());
} else {
processingEnv.getMessager().printMessage(Kind.WARNING,
"We could not detect the groupId and artifactId of this module (maybe you are using Gradle to build your extension?). The generation of the configuration documentation has been disabled.");
}
}
this.extensionProcessors = Collections.unmodifiableList(extensionProcessors);
for (ExtensionProcessor extensionProcessor : this.extensionProcessors) {
extensionProcessor.init(config, utils);
}
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Types.SUPPORTED_ANNOTATIONS_TYPES;
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latest();
}
@Override
public Iterable<? extends Completion> getCompletions(Element element, AnnotationMirror annotation, ExecutableElement member,
String userText) {
return Collections.emptySet();
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
try {
for (ExtensionProcessor extensionProcessor : extensionProcessors) {
extensionProcessor.process(annotations, roundEnv);
}
if (roundEnv.processingOver()) {
for (ExtensionProcessor extensionProcessor : extensionProcessors) {
extensionProcessor.finalizeProcessing();
}
}
return true;
} finally {
JDeparser.dropCaches();
}
}
}
| ExtensionAnnotationProcessor |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/appender/ConsoleAppender.java | {
"start": 13265,
"end": 13871
} | class ____ implements ManagerFactory<OutputStreamManager, FactoryData> {
/**
* Create an OutputStreamManager.
*
* @param name The name of the entity to manage.
* @param data The data required to create the entity.
* @return The OutputStreamManager
*/
@Override
public OutputStreamManager createManager(final String name, final FactoryData data) {
return new OutputStreamManager(data.os, data.name, data.layout, true);
}
}
public Target getTarget() {
return target;
}
}
| ConsoleManagerFactory |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-qute/deployment/src/test/java/io/quarkus/qute/resteasy/deployment/Templates.java | {
"start": 152,
"end": 237
} | class ____ {
public static native TemplateInstance toplevel(String name);
}
| Templates |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/handler/predicate/VersionRoutePredicateFactoryTests.java | {
"start": 1312,
"end": 2488
} | class ____ {
@Test
void versionPredicateWorks() {
VersionRoutePredicateFactory factory = new VersionRoutePredicateFactory(apiVersionStrategy());
Predicate<ServerWebExchange> predicate = factory
.apply(new VersionRoutePredicateFactory.Config().setVersion("1.1"));
assertThat(predicate.test(exchange("1.1"))).isTrue();
assertThat(predicate.test(exchange("1.5"))).isFalse();
predicate = factory.apply(new VersionRoutePredicateFactory.Config().setVersion("1.1+"));
assertThat(predicate.test(exchange("1.5"))).isTrue();
}
private static ServerWebExchange exchange(String version) {
ApiVersionStrategy versionStrategy = apiVersionStrategy();
Comparable<?> parsedVersion = versionStrategy.parseVersion(version);
MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("https://localhost"));
exchange.getAttributes().put(HandlerMapping.API_VERSION_ATTRIBUTE, parsedVersion);
return exchange;
}
static DefaultApiVersionStrategy apiVersionStrategy() {
return new DefaultApiVersionStrategy(List.of(exchange -> null), new SemanticApiVersionParser(), true, null,
false, null, null);
}
}
| VersionRoutePredicateFactoryTests |
java | quarkusio__quarkus | extensions/elasticsearch-java-client/runtime/src/main/java/io/quarkus/elasticsearch/javaclient/runtime/graalvm/ElasticsearchJavaClientFeature.java | {
"start": 3678,
"end": 4019
} | class ____'t matter.
return;
}
var deserializable = builtClass.getAnnotation(JsonpDeserializable.class);
if (deserializable == null) {
logf("Could not find @JsonpDeserializable on %s for builder %s",
builtClass, builderSubClass);
// Just ignore and hope this | doesn |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 133817,
"end": 134932
} | class ____ implements HttpMessageConverter<Object> {
private final List<MediaType> supportedMediaTypes;
public SimpleMessageConverter(MediaType... supportedMediaTypes) {
this.supportedMediaTypes = Arrays.asList(supportedMediaTypes);
}
@Override
public boolean canRead(Class<?> clazz, @Nullable MediaType mediaType) {
return supportedMediaTypes.contains(mediaType);
}
@Override
public boolean canWrite(Class<?> clazz, @Nullable MediaType mediaType) {
return supportedMediaTypes.contains(mediaType);
}
@Override
public List<MediaType> getSupportedMediaTypes() {
return supportedMediaTypes;
}
@Override
public Object read(Class<?> clazz, HttpInputMessage inputMessage)
throws IOException, HttpMessageNotReadableException {
return null;
}
@Override
public void write(Object o, @Nullable MediaType contentType, HttpOutputMessage outputMessage)
throws IOException, HttpMessageNotWritableException {
outputMessage.getHeaders().setContentType(contentType);
outputMessage.getBody(); // force a header write
}
}
@Controller
static | SimpleMessageConverter |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/serde/RexNodeJsonSerializer.java | {
"start": 25264,
"end": 26010
} | class ____ not be stateful (i.e. containing only transient and static "
+ "fields) and should provide a default constructor. Depending on the "
+ "catalog implementation, it might be necessary to only serialize the "
+ "function's identifier by setting the option '%s'='%s'. One then needs "
+ "to guarantee that the same catalog object is also present during a "
+ "restore.",
objectIdentifier.asSummaryString(),
TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS.key(),
CatalogPlanCompilation.IDENTIFIER));
}
}
| must |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/JoinQueryOperation.java | {
"start": 1952,
"end": 5837
} | enum ____ {
INNER,
LEFT_OUTER,
RIGHT_OUTER,
FULL_OUTER
}
public JoinQueryOperation(
QueryOperation left,
QueryOperation right,
JoinType joinType,
ResolvedExpression condition,
boolean correlated) {
this.left = left;
this.right = right;
this.joinType = joinType;
this.condition = condition;
this.correlated = correlated;
this.resolvedSchema = calculateResultingSchema(left, right);
}
private ResolvedSchema calculateResultingSchema(QueryOperation left, QueryOperation right) {
final ResolvedSchema leftSchema = left.getResolvedSchema();
final ResolvedSchema rightSchema = right.getResolvedSchema();
return ResolvedSchema.physical(
Stream.concat(
leftSchema.getColumnNames().stream(),
rightSchema.getColumnNames().stream())
.collect(Collectors.toList()),
Stream.concat(
leftSchema.getColumnDataTypes().stream(),
rightSchema.getColumnDataTypes().stream())
.collect(Collectors.toList()));
}
public JoinType getJoinType() {
return joinType;
}
public ResolvedExpression getCondition() {
return condition;
}
public boolean isCorrelated() {
return correlated;
}
@Override
public ResolvedSchema getResolvedSchema() {
return resolvedSchema;
}
@Override
public String asSummaryString() {
Map<String, Object> args = new LinkedHashMap<>();
args.put("joinType", joinType);
args.put("condition", condition);
args.put("correlated", correlated);
return OperationUtils.formatWithChildren(
"Join", args, getChildren(), Operation::asSummaryString);
}
@Override
public String asSerializableString(SqlFactory sqlFactory) {
Map<Integer, String> inputAliases = new HashMap<>();
inputAliases.put(0, INPUT_1_ALIAS);
inputAliases.put(
1, correlated ? CorrelatedFunctionQueryOperation.INPUT_ALIAS : INPUT_2_ALIAS);
return String.format(
"SELECT %s FROM (%s\n) %s %s JOIN %s ON %s",
getSelectList(),
OperationUtils.indent(left.asSerializableString(sqlFactory)),
INPUT_1_ALIAS,
joinType.toString().replaceAll("_", " "),
rightToSerializable(sqlFactory),
OperationExpressionsUtils.scopeReferencesWithAlias(inputAliases, condition)
.asSerializableString(sqlFactory));
}
private String getSelectList() {
String leftColumns =
OperationUtils.formatSelectColumns(left.getResolvedSchema(), INPUT_1_ALIAS);
String rightColumns =
OperationUtils.formatSelectColumns(
right.getResolvedSchema(),
correlated ? CorrelatedFunctionQueryOperation.INPUT_ALIAS : INPUT_2_ALIAS);
return leftColumns + ", " + rightColumns;
}
private String rightToSerializable(SqlFactory sqlFactory) {
final StringBuilder s = new StringBuilder();
if (!correlated) {
s.append("(");
}
s.append(OperationUtils.indent(right.asSerializableString(sqlFactory)));
if (!correlated) {
s.append("\n)");
s.append(" ");
s.append(INPUT_2_ALIAS);
}
return s.toString();
}
@Override
public List<QueryOperation> getChildren() {
return Arrays.asList(left, right);
}
@Override
public <T> T accept(QueryOperationVisitor<T> visitor) {
return visitor.visit(this);
}
}
| JoinType |
java | apache__camel | components/camel-aws/camel-aws-xray/src/main/java/org/apache/camel/component/aws/xray/decorators/messaging/JmsSegmentDecorator.java | {
"start": 912,
"end": 1271
} | class ____ extends AbstractMessagingSegmentDecorator {
public static final String JMS_MESSAGE_ID = "JMSMessageID";
@Override
public String getComponent() {
return "jms";
}
@Override
protected String getMessageId(Exchange exchange) {
return (String) exchange.getIn().getHeader(JMS_MESSAGE_ID);
}
}
| JmsSegmentDecorator |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/management/ManagementWithPemAndTlsRegistryTest.java | {
"start": 1111,
"end": 2993
} | class ____ {
private static final String configuration = """
quarkus.management.enabled=true
quarkus.management.root-path=/management
quarkus.tls.key-store.pem.0.cert=server.crt
quarkus.tls.key-store.pem.0.key=server.key
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset(configuration), "application.properties")
.addAsResource(new File("target/certs/ssl-management-interface-test.key"), "server.key")
.addAsResource(new File("target/certs/ssl-management-interface-test.crt"), "server.crt")
.addClasses(MyObserver.class))
.addBuildChainCustomizer(buildCustomizer());
static Consumer<BuildChainBuilder> buildCustomizer() {
return new Consumer<BuildChainBuilder>() {
@Override
public void accept(BuildChainBuilder builder) {
builder.addBuildStep(new BuildStep() {
@Override
public void execute(BuildContext context) {
NonApplicationRootPathBuildItem buildItem = context.consume(NonApplicationRootPathBuildItem.class);
context.produce(buildItem.routeBuilder()
.management()
.route("my-route")
.handler(new MyHandler())
.blockingRoute()
.build());
}
}).produces(RouteBuildItem.class)
.consumes(NonApplicationRootPathBuildItem.class)
.build();
}
};
}
public static | ManagementWithPemAndTlsRegistryTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/illegal/InterceptorWithProducerMethodTest.java | {
"start": 1796,
"end": 2038
} | class ____ {
@AroundInvoke
Object intercept(InvocationContext ctx) throws Exception {
return ctx.proceed();
}
@Produces
String produce() {
return "";
}
}
}
| MyInterceptor |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ClearInferenceEndpointCacheAction.java | {
"start": 9753,
"end": 10129
} | class ____ extends AckedBatchedClusterStateUpdateTask {
private final ProjectId projectId;
private RefreshCacheMetadataVersionTask(ProjectId projectId, ActionListener<AcknowledgedResponse> listener) {
super(TimeValue.THIRTY_SECONDS, listener);
this.projectId = projectId;
}
}
private static | RefreshCacheMetadataVersionTask |
java | apache__dubbo | dubbo-plugin/dubbo-qos/src/main/java/org/apache/dubbo/qos/command/impl/OnlineApp.java | {
"start": 1177,
"end": 1524
} | class ____ extends BaseOnline {
public OnlineApp(FrameworkModel frameworkModel) {
super(frameworkModel);
}
@Override
protected void doExport(ProviderModel.RegisterStatedURL statedURL) {
if (UrlUtils.isServiceDiscoveryURL(statedURL.getRegistryUrl())) {
super.doExport(statedURL);
}
}
}
| OnlineApp |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlAlterMaterializedTableResumeConverter.java | {
"start": 1303,
"end": 2060
} | class ____
implements SqlNodeConverter<SqlAlterMaterializedTableResume> {
@Override
public Operation convertSqlNode(
SqlAlterMaterializedTableResume sqlAlterMaterializedTableResume,
ConvertContext context) {
UnresolvedIdentifier unresolvedIdentifier =
UnresolvedIdentifier.of(sqlAlterMaterializedTableResume.getFullName());
ObjectIdentifier identifier =
context.getCatalogManager().qualifyIdentifier(unresolvedIdentifier);
// get table options
final Map<String, String> options = sqlAlterMaterializedTableResume.getProperties();
return new AlterMaterializedTableResumeOperation(identifier, options);
}
}
| SqlAlterMaterializedTableResumeConverter |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/condition/NameValueExpression.java | {
"start": 1138,
"end": 3759
} | class ____ {
private final String name;
private final String value;
private final boolean negated;
private NameValueExpression(String name, String value, boolean negated) {
this.name = name;
this.value = value;
this.negated = negated;
}
public NameValueExpression(String name, String value) {
this.name = name;
this.value = value;
negated = false;
}
public static Set<NameValueExpression> parse(String... params) {
if (ArrayUtils.isEmpty(params)) {
return Collections.emptySet();
}
int len = params.length;
Set<NameValueExpression> expressions = CollectionUtils.newHashSet(len);
for (String param : params) {
expressions.add(parse(param));
}
return expressions;
}
public static NameValueExpression parse(String expr) {
int index = expr.indexOf('=');
if (index == -1) {
boolean negated = expr.indexOf('!') == 0;
return new NameValueExpression(negated ? expr.substring(1) : expr, null, negated);
} else {
boolean negated = index > 0 && expr.charAt(index - 1) == '!';
return new NameValueExpression(
negated ? expr.substring(0, index - 1) : expr.substring(0, index),
expr.substring(index + 1),
negated);
}
}
public String getName() {
return name;
}
public String getValue() {
return value;
}
public boolean match(Predicate<String> nameFn, Function<String, String> valueFn) {
boolean matched;
if (value == null) {
matched = nameFn.test(name);
} else {
matched = Objects.equals(valueFn.apply(name), value);
}
return matched != negated;
}
public boolean match(Function<String, String> valueFn) {
return match(n -> valueFn.apply(n) != null, valueFn);
}
@Override
public int hashCode() {
return Objects.hash(name, value, negated);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != NameValueExpression.class) {
return false;
}
NameValueExpression other = (NameValueExpression) obj;
return negated == other.negated && Objects.equals(name, other.name) && Objects.equals(value, other.value);
}
@Override
public String toString() {
return name + (negated ? "!=" : "=") + value;
}
}
| NameValueExpression |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/telemetry/RecordingInstruments.java | {
"start": 1756,
"end": 2364
} | class ____ implements Instrument {
protected final String name;
private final MetricRecorder<Instrument> recorder;
public RecordingInstrument(String name, MetricRecorder<Instrument> recorder) {
this.name = Objects.requireNonNull(name);
this.recorder = Objects.requireNonNull(recorder);
}
protected void call(Number value, Map<String, Object> attributes) {
recorder.call(this, value, attributes);
}
@Override
public String getName() {
return name;
}
}
protected | RecordingInstrument |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/serialization/entity/PK.java | {
"start": 465,
"end": 959
} | class ____ implements Serializable {
private Long id;
public PK() {
}
public PK(Long id) {
this.id = id;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
PK pk = (PK) o;
return Objects.equals( id, pk.id );
}
@Override
public int hashCode() {
return Objects.hash( id );
}
}
| PK |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/LocalTemporaryTableMutationStrategyNoDropTest.java | {
"start": 3770,
"end": 4039
} | class ____ implements ParameterMarkerStrategy {
@Override
public String createMarker(int position, JdbcType jdbcType) {
return MARKER;
}
}
@Entity(name = "ParentEntity")
@Inheritance(strategy = InheritanceType.JOINED)
public static | ParameterMarkerStrategyImpl |
java | apache__camel | components/camel-google/camel-google-drive/src/test/java/org/apache/camel/component/google/drive/DriveChangesIT.java | {
"start": 1667,
"end": 3431
} | class ____ extends AbstractGoogleDriveTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(DriveChangesIT.class);
private static final String PATH_PREFIX
= GoogleDriveApiCollection.getCollection().getApiName(DriveChangesApiMethod.class).getName();
@Test
public void testGet() {
final com.google.api.services.drive.model.ChangeList list = requestBody("direct://LIST", null);
List<Change> items = list.getChanges();
assumeFalse(items.isEmpty());
Change change = items.get(0);
String id = change.getDriveId();
// using String message body for single parameter "changeId"
final com.google.api.services.drive.model.Change result = requestBody("direct://GET", id);
assertNotNull(result, "get result");
LOG.debug("get: {}", result);
}
@Test
public void testList() {
final com.google.api.services.drive.model.ChangeList result = requestBody("direct://LIST", null);
assertNotNull(result, "list result");
LOG.debug("list: {}", result);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// test route for get
from("direct://GET")
.to("google-drive://" + PATH_PREFIX + "/get?inBody=changeId");
// test route for list
from("direct://LIST")
.to("google-drive://" + PATH_PREFIX + "/list");
// test route for watch
from("direct://WATCH")
.to("google-drive://" + PATH_PREFIX + "/watch?inBody=contentChannel");
}
};
}
}
| DriveChangesIT |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java | {
"start": 841,
"end": 1208
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final String NAME = "cluster:admin/xpack/ccr/auto_follow_pattern/delete";
public static final DeleteAutoFollowPatternAction INSTANCE = new DeleteAutoFollowPatternAction();
private DeleteAutoFollowPatternAction() {
super(NAME);
}
public static | DeleteAutoFollowPatternAction |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nestedbeans/mixed/source/Interior.java | {
"start": 248,
"end": 640
} | class ____ {
private String designer;
private Ornament ornament;
public String getDesigner() {
return designer;
}
public void setDesigner(String designer) {
this.designer = designer;
}
public Ornament getOrnament() {
return ornament;
}
public void setOrnament(Ornament ornament) {
this.ornament = ornament;
}
}
| Interior |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sorted/set/SortComparatorTest.java | {
"start": 2390,
"end": 2593
} | class ____ implements Comparator<Cat> {
@Override
public int compare(Cat cat1, Cat cat2) {
return String.CASE_INSENSITIVE_ORDER.compare( cat1.nickname, cat2.nickname );
}
}
}
| CatNicknameComparator |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/stream/JSONWriterTest_2.java | {
"start": 234,
"end": 876
} | class ____ extends TestCase {
public void test_writer() throws Exception {
StringWriter out = new StringWriter();
JSONWriter writer = new JSONWriter(out);
writer.config(SerializerFeature.UseSingleQuotes, true);
writer.startObject();
writer.writeObject("a");
writer.writeObject("1");
writer.writeObject("b");
writer.writeObject("2");
writer.writeObject("c");
writer.writeObject("3");
writer.endObject();
writer.close();
Assert.assertEquals("{'a':'1','b':'2','c':'3'}", out.toString());
}
}
| JSONWriterTest_2 |
java | spring-projects__spring-boot | module/spring-boot-data-elasticsearch/src/dockerTest/java/org/springframework/boot/data/elasticsearch/autoconfigure/DataElasticsearchAutoConfigurationIntegrationTests.java | {
"start": 1809,
"end": 3020
} | class ____ {
@Container
static final ElasticsearchContainer elasticsearch = TestImage.container(ElasticsearchContainer.class);
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ElasticsearchRestClientAutoConfiguration.class,
ElasticsearchClientAutoConfiguration.class, DataElasticsearchAutoConfiguration.class));
@Test
void reactiveClientCanQueryElasticsearchNode() {
this.contextRunner
.withPropertyValues("spring.elasticsearch.uris=" + elasticsearch.getHttpHostAddress(),
"spring.elasticsearch.connection-timeout=120s", "spring.elasticsearch.socket-timeout=120s")
.run((context) -> {
ReactiveElasticsearchClient client = context.getBean(ReactiveElasticsearchClient.class);
Mono<IndexResponse> index = client
.index((b) -> b.index("foo").id("1").document(Map.of("a", "alpha", "b", "bravo")));
index.block();
Mono<GetResponse<Object>> get = client.get((b) -> b.index("foo").id("1"), Object.class);
GetResponse<Object> response = get.block();
assertThat(response).isNotNull();
assertThat(response.found()).isTrue();
});
}
}
| DataElasticsearchAutoConfigurationIntegrationTests |
java | redisson__redisson | redisson-quarkus/redisson-quarkus-30/cdi/integration-tests/src/test/java/org/redisson/quarkus/client/it/QuarkusRedissonClientResourceTest.java | {
"start": 518,
"end": 2134
} | class ____ {
@Container
public static final GenericContainer REDIS = new FixedHostPortGenericContainer("redis:latest")
.withFixedExposedPort(6379, 6379);
@Test
public void testRemoteService() {
given()
.when().get("/quarkus-redisson-client/remoteService")
.then()
.statusCode(200)
.body(is("executed"));
}
@Test
public void testMap() {
given()
.when().get("/quarkus-redisson-client/map")
.then()
.statusCode(200)
.body(is("2"));
}
@Test
public void testPingAll() {
given()
.when().get("/quarkus-redisson-client/pingAll")
.then()
.statusCode(200)
.body(is("OK"));
}
@Test
public void testBucket() {
given()
.when().get("/quarkus-redisson-client/bucket")
.then()
.statusCode(200)
.body(is("world"));
}
@Test
public void testDeleteBucket() {
given()
.when().get("/quarkus-redisson-client/delBucket")
.then()
.statusCode(200)
.body(is("true"));
}
// @Test
// public void testExecuteTask() {
// given()
// .when().get("/quarkus-redisson-client/executeTask")
// .then()
// .statusCode(200)
// .body(is("hello"));
// }
}
| QuarkusRedissonClientResourceTest |
java | grpc__grpc-java | xds/src/generated/thirdparty/grpc/io/envoyproxy/envoy/service/rate_limit_quota/v3/RateLimitQuotaServiceGrpc.java | {
"start": 12957,
"end": 13594
} | class ____
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
RateLimitQuotaServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return io.envoyproxy.envoy.service.rate_limit_quota.v3.RlqsProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("RateLimitQuotaService");
}
}
private static final | RateLimitQuotaServiceBaseDescriptorSupplier |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java | {
"start": 4146,
"end": 5411
} | class ____ extends DocWriteResponse.Builder {
private GetResult getResult = null;
public void setGetResult(GetResult getResult) {
this.getResult = getResult;
}
@Override
public UpdateResponse build() {
UpdateResponse update;
if (shardInfo != null) {
update = new UpdateResponse(shardInfo, shardId, id, seqNo, primaryTerm, version, result);
} else {
update = new UpdateResponse(shardId, id, seqNo, primaryTerm, version, result);
}
if (getResult != null) {
update.setGetResult(
new GetResult(
update.getIndex(),
update.getId(),
getResult.getSeqNo(),
getResult.getPrimaryTerm(),
update.getVersion(),
getResult.isExists(),
getResult.internalSourceRef(),
getResult.getDocumentFields(),
getResult.getMetadataFields()
)
);
}
update.setForcedRefresh(forcedRefresh);
return update;
}
}
}
| Builder |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndex.java | {
"start": 18568,
"end": 18972
} | class ____ implements Metadata {
@Override
public boolean hasMetadata() {
return false;
}
@Override
public boolean isCompatible() {
throw new UnsupportedOperationException();
}
@Override
public String getVersion() {
throw new UnsupportedOperationException();
}
}
private static | NoMetadata |
java | apache__dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/propertyconfigurer/consumer3/PropertySourcesInJavaConfigTest.java | {
"start": 5873,
"end": 5941
} | class ____ {}
@Configuration
static | ImportPropertyConfiguration |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/qualifiers/Location.java | {
"start": 684,
"end": 737
} | interface ____ {
String value();
static | Location |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.