language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | junit-team__junit5 | platform-tooling-support-tests/projects/standalone/src/standalone/SuiteIntegration.java | {
"start": 566,
"end": 633
} | class ____ {
@Test
void successful() {
}
}
}
| SingleTestContainer |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/runner/DefaultDispatcherRunnerTest.java | {
"start": 1785,
"end": 15012
} | class ____ extends TestLogger {
private TestingLeaderElection leaderElection;
private TestingFatalErrorHandler testingFatalErrorHandler;
private TestingDispatcherLeaderProcessFactory testingDispatcherLeaderProcessFactory;
@Before
public void setup() {
leaderElection = new TestingLeaderElection();
testingFatalErrorHandler = new TestingFatalErrorHandler();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.defaultValue();
}
@After
public void teardown() throws Exception {
leaderElection.close();
if (testingFatalErrorHandler != null) {
testingFatalErrorHandler.rethrowError();
testingFatalErrorHandler = null;
}
}
@Test
public void testLeaderElectionLifecycle() throws Exception {
assertTrue(leaderElection.isStopped());
try (final DispatcherRunner unusedDisptacherRunner = createDispatcherRunner()) {
assertFalse(leaderElection.isStopped());
}
assertTrue(leaderElection.isStopped());
}
@Test
public void closeAsync_doesNotCompleteUncompletedShutDownFuture() throws Exception {
final DispatcherRunner dispatcherRunner = createDispatcherRunner();
final CompletableFuture<Void> terminationFuture = dispatcherRunner.closeAsync();
terminationFuture.get();
final CompletableFuture<ApplicationStatus> shutDownFuture =
dispatcherRunner.getShutDownFuture();
assertThat(shutDownFuture.isDone(), is(false));
}
@Test
public void getShutDownFuture_whileRunning_forwardsDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(leaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
assertThat(dispatcherShutDownFuture.get(), is(finalApplicationStatus));
}
}
@Test
public void getShutDownFuture_afterClose_ignoresDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(leaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
dispatcherRunner.closeAsync();
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
try {
dispatcherShutDownFuture.get(10L, TimeUnit.MILLISECONDS);
fail(
"The dispatcher runner should no longer react to the dispatcher leader process's shut down request if it has been terminated.");
} catch (TimeoutException expected) {
}
}
}
@Test
public void getShutDownFuture_newLeader_ignoresOldDispatcherLeaderProcessShutDownRequest()
throws Exception {
final UUID firstLeaderSessionId = UUID.randomUUID();
final UUID secondLeaderSessionId = UUID.randomUUID();
final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess firstTestingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(firstLeaderSessionId)
.setShutDownFuture(shutDownFuture)
.build();
final TestingDispatcherLeaderProcess secondTestingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(secondLeaderSessionId).build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstTestingDispatcherLeaderProcess, secondTestingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(firstLeaderSessionId);
final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture =
dispatcherRunner.getShutDownFuture();
assertFalse(dispatcherShutDownFuture.isDone());
leaderElection.isLeader(secondLeaderSessionId);
final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN;
shutDownFuture.complete(finalApplicationStatus);
assertFalse(dispatcherShutDownFuture.isDone());
}
}
@Test
public void revokeLeadership_withExistingLeader_stopsLeaderProcess() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final OneShotLatch startLatch = new OneShotLatch();
final OneShotLatch stopLatch = new OneShotLatch();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setStartConsumer(ignored -> startLatch.trigger())
.setCloseAsyncSupplier(
() -> {
stopLatch.trigger();
return FutureUtils.completedVoidFuture();
})
.build());
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(leaderSessionId);
// wait until the leader process has been started
startLatch.await();
leaderElection.notLeader();
// verify that the leader gets stopped
stopLatch.await();
}
}
@Test
public void grantLeadership_withExistingLeader_waitsForTerminationOfFirstLeader()
throws Exception {
final UUID firstLeaderSessionId = UUID.randomUUID();
final UUID secondLeaderSessionId = UUID.randomUUID();
final StartStopDispatcherLeaderProcess firstTestingDispatcherLeaderProcess =
StartStopDispatcherLeaderProcess.create(firstLeaderSessionId);
final StartStopDispatcherLeaderProcess secondTestingDispatcherLeaderProcess =
StartStopDispatcherLeaderProcess.create(secondLeaderSessionId);
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess(),
secondTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess());
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(firstLeaderSessionId);
assertThat(firstTestingDispatcherLeaderProcess.isStarted(), is(true));
leaderElection.isLeader(secondLeaderSessionId);
assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(false));
firstTestingDispatcherLeaderProcess.terminateProcess();
assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(true));
secondTestingDispatcherLeaderProcess
.terminateProcess(); // make the dispatcherRunner terminate
}
}
@Test
public void grantLeadership_validLeader_confirmsLeaderSession() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
final LeaderInformation leaderInformation =
leaderElection.isLeader(leaderSessionId).join();
assertThat(leaderInformation.getLeaderSessionID(), is(leaderSessionId));
}
}
@Test
public void grantLeadership_oldLeader_doesNotConfirmLeaderSession() throws Exception {
final UUID leaderSessionId = UUID.randomUUID();
final CompletableFuture<String> contenderConfirmationFuture = new CompletableFuture<>();
final TestingDispatcherLeaderProcess testingDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(leaderSessionId)
.setConfirmLeaderSessionFuture(contenderConfirmationFuture)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(testingDispatcherLeaderProcess);
try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) {
leaderElection.isLeader(leaderSessionId);
leaderElection.notLeader();
// complete the confirmation future after losing the leadership
contenderConfirmationFuture.complete("leader address");
assertThat(leaderElection.hasLeadershipAsync(leaderSessionId).get(), is(false));
}
}
@Test
public void
grantLeadership_multipleLeaderChanges_lastDispatcherLeaderProcessWaitsForOthersToTerminateBeforeItStarts()
throws Exception {
final UUID firstLeaderSession = UUID.randomUUID();
final UUID secondLeaderSession = UUID.randomUUID();
final UUID thirdLeaderSession = UUID.randomUUID();
final CompletableFuture<Void> firstDispatcherLeaderProcessTerminationFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess firstDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(firstLeaderSession)
.setCloseAsyncSupplier(() -> firstDispatcherLeaderProcessTerminationFuture)
.build();
final CompletableFuture<Void> secondDispatcherLeaderProcessTerminationFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess secondDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(secondLeaderSession)
.setCloseAsyncSupplier(() -> secondDispatcherLeaderProcessTerminationFuture)
.build();
final CompletableFuture<Void> thirdDispatcherLeaderProcessHasBeenStartedFuture =
new CompletableFuture<>();
final TestingDispatcherLeaderProcess thirdDispatcherLeaderProcess =
TestingDispatcherLeaderProcess.newBuilder(thirdLeaderSession)
.setStartConsumer(
thirdDispatcherLeaderProcessHasBeenStartedFuture::complete)
.build();
testingDispatcherLeaderProcessFactory =
TestingDispatcherLeaderProcessFactory.from(
firstDispatcherLeaderProcess,
secondDispatcherLeaderProcess,
thirdDispatcherLeaderProcess);
final DispatcherRunner dispatcherRunner = createDispatcherRunner();
try {
leaderElection.isLeader(firstLeaderSession);
leaderElection.isLeader(secondLeaderSession);
leaderElection.isLeader(thirdLeaderSession);
firstDispatcherLeaderProcessTerminationFuture.complete(null);
assertThat(thirdDispatcherLeaderProcessHasBeenStartedFuture.isDone(), is(false));
secondDispatcherLeaderProcessTerminationFuture.complete(null);
assertThat(thirdDispatcherLeaderProcessHasBeenStartedFuture.isDone(), is(true));
} finally {
firstDispatcherLeaderProcessTerminationFuture.complete(null);
secondDispatcherLeaderProcessTerminationFuture.complete(null);
dispatcherRunner.close();
}
}
private static final | DefaultDispatcherRunnerTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/ParallelReduceSeedTest.java | {
"start": 1204,
"end": 8066
} | class ____ extends ParallelOperatorTest<String, String> {
@Override
protected Scenario<String, String> defaultScenarioOptions(Scenario<String, String> defaultOptions) {
return defaultOptions.receive(4, i -> item(0));
}
@Override
protected List<Scenario<String, String>> scenarios_operatorSuccess() {
return Arrays.asList(
scenario(f -> f.reduce(() -> item(0), (a, b) -> a))
);
}
@Override
protected List<Scenario<String, String>> scenarios_operatorError() {
return Arrays.asList(
scenario(f -> f.reduce(() -> "", (a, b) -> null)),
scenario(f -> f.reduce(() -> null, (a, b) -> a + b))
);
}
@Test
public void collectAsyncFused() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Scheduler scheduler = Schedulers.newParallel("test", 3);
Flux.range(1, 100000)
.parallel(3)
.runOn(scheduler)
.collect(ArrayList::new, ArrayList::add)
.sequential()
.reduce(0, (a, b) -> a + b.size())
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void collectAsync() {
Scheduler s = Schedulers.newParallel("test", 3);
Supplier<List<Integer>> as = () -> new ArrayList<>();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 100000)
.hide()
.parallel(3)
.runOn(s)
.collect(as, (a, b) -> a.add(b))
.doOnNext(v -> System.out.println(v.size()))
.sequential()
.reduce(0, (a, b) -> a + b.size())
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void collectAsync2() {
Scheduler s = Schedulers.newParallel("test", 3);
Supplier<List<Integer>> as = () -> new ArrayList<>();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 100000)
.hide()
.publishOn(s)
.parallel(3)
.runOn(s)
.hide()
.collect(as, (a, b) -> a.add(b))
.doOnNext(v -> System.out.println(v.size()))
.sequential()
.reduce(0, (a, b) -> a + b.size())
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void collectAsync3() {
Scheduler s = Schedulers.newParallel("test", 3);
Supplier<List<Integer>> as = () -> new ArrayList<>();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 100000)
.hide()
.publishOn(s)
.parallel(3)
.runOn(s)
.filter(t -> true)
.collect(as, (a, b) -> a.add(b))
.doOnNext(v -> System.out.println(v.size()))
.groups()
.flatMap(v -> v)
.reduce(0, (a, b) -> b.size() + a)
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void collectAsync3Fused() {
Scheduler s = Schedulers.newParallel("test", 3);
Supplier<List<Integer>> as = () -> new ArrayList<>();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 100000)
.publishOn(s)
.parallel(3)
.runOn(s)
.collect(as, (a, b) -> a.add(b))
.doOnNext(v -> System.out.println(v.size()))
.groups()
.flatMap(v -> v)
.reduce(0, (a, b) -> b.size() + a)
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void collectAsync3Take() {
Scheduler s = Schedulers.newParallel("test", 4);
Supplier<List<Integer>> as = () -> new ArrayList<>();
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 100000)
.publishOn(s)
.parallel(3)
.runOn(s)
.collect(as, (a, b) -> a.add(b))
.doOnNext(v -> System.out.println(v.size()))
.groups()
.flatMap(v -> v)
.reduce(0, (a, b) -> b.size() + a)
.subscribe(ts);
ts.await(Duration.ofSeconds(5));
ts.assertValues(100_000)
.assertNoError()
.assertComplete();
}
@Test
public void failInitial() {
Supplier<Integer> as = () -> {
throw new RuntimeException("test");
};
StepVerifier.create(Flux.range(1, 10)
.parallel(3)
.reduce(as, (a, b) -> b + a))
.verifyErrorMessage("test");
}
@Test
public void failCombination() {
StepVerifier.create(Flux.range(1, 10)
.parallel(3)
.reduce(() -> 0, (a, b) -> {
throw new RuntimeException("test");
}))
.verifyErrorMessage("test");
}
@Test
public void testPrefetch() {
assertThat(Flux.range(1, 10)
.parallel(3)
.reduce(() -> 0, (a, b) -> a + b)
.getPrefetch()).isEqualTo(Integer.MAX_VALUE);
}
@Test
public void parallelism() {
ParallelFlux<Integer> source = Flux.just(500, 300).parallel(10);
ParallelReduceSeed<Integer, String> test = new ParallelReduceSeed<>(source, () -> "", (s, i) -> s + i);
assertThat(test.parallelism())
.isEqualTo(source.parallelism())
.isEqualTo(10);
}
@Test
public void scanOperator() {
ParallelFlux<Integer> source = Flux.just(500, 300).parallel(10);
ParallelReduceSeed<Integer, String> test = new ParallelReduceSeed<>(source, () -> "", (s, i) -> s + i);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(source);
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(Integer.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanSubscriber() {
ParallelFlux<Integer> source = Flux.just(500, 300).parallel(2);
LambdaSubscriber<String> subscriber = new LambdaSubscriber<>(null, e -> {
}, null, null);
ParallelReduceSeed.ParallelReduceSeedSubscriber<Integer, String> test = new ParallelReduceSeed.ParallelReduceSeedSubscriber<>(
subscriber, "", (s, i) -> s + i);
@SuppressWarnings("unchecked")
final CoreSubscriber<Integer>[] testSubscribers = new CoreSubscriber[2];
testSubscribers[0] = test;
testSubscribers[1] = new ParallelReduceSeed.ParallelReduceSeedSubscriber<>(
subscriber, "", (s, i) -> s + i);;
source.subscribe(testSubscribers);
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.PREFETCH)).isEqualTo(0);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(subscriber);
assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
test.done = true;
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
test.done = false;
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
}
| ParallelReduceSeedTest |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/ApplicationServiceRecordProcessor.java | {
"start": 10330,
"end": 11207
} | class ____ extends AApplicationRecordDescriptor {
/**
* Creates an application AAAA record descriptor.
*
* @param path registry path for service record
* @param record service record
* @throws Exception
*/
public AAAAApplicationRecordDescriptor(String path,
ServiceRecord record) throws Exception {
super(path, record);
}
/**
* Initializes the descriptor parameters.
*
* @param serviceRecord the service record.
*/
@Override protected void init(ServiceRecord serviceRecord)
throws Exception {
super.init(serviceRecord);
if (getTarget() == null) {
return;
}
try {
this.setTarget(getIpv6Address(getTarget()));
} catch (UnknownHostException e) {
throw new IllegalStateException(e);
}
}
}
}
| AAAAApplicationRecordDescriptor |
java | apache__kafka | connect/test-plugins/src/main/java/org/apache/kafka/connect/tools/SchemaSourceTask.java | {
"start": 1378,
"end": 6783
} | class ____ extends SourceTask {
private static final Logger log = LoggerFactory.getLogger(SchemaSourceTask.class);
public static final String NAME_CONFIG = "name";
public static final String ID_CONFIG = "id";
public static final String TOPIC_CONFIG = "topic";
public static final String NUM_MSGS_CONFIG = "num.messages";
public static final String THROUGHPUT_CONFIG = "throughput";
public static final String MULTIPLE_SCHEMA_CONFIG = "multiple.schema";
public static final String PARTITION_COUNT_CONFIG = "partition.count";
private static final String ID_FIELD = "id";
private static final String SEQNO_FIELD = "seqno";
private ThroughputThrottler throttler;
private int id; // Task ID
private String topic;
private Map<String, Integer> partition;
private long startingSeqno;
private long seqno;
private long count;
private long maxNumMsgs;
private boolean multipleSchema;
private int partitionCount;
private static final Schema VALUE_SCHEMA = SchemaBuilder.struct().version(1).name("record")
.field("boolean", Schema.BOOLEAN_SCHEMA)
.field("int", Schema.INT32_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("partitioning", Schema.INT32_SCHEMA)
.field("id", Schema.INT32_SCHEMA)
.field("seqno", Schema.INT64_SCHEMA)
.build();
private static final Schema VALUE_SCHEMA_2 = SchemaBuilder.struct().version(2).name("record")
.field("boolean", Schema.BOOLEAN_SCHEMA)
.field("int", Schema.INT32_SCHEMA)
.field("long", Schema.INT64_SCHEMA)
.field("float", Schema.FLOAT32_SCHEMA)
.field("double", Schema.FLOAT64_SCHEMA)
.field("partitioning", Schema.INT32_SCHEMA)
.field("string", SchemaBuilder.string().defaultValue("abc").build())
.field("id", Schema.INT32_SCHEMA)
.field("seqno", Schema.INT64_SCHEMA)
.build();
@Override
public String version() {
return new SchemaSourceConnector().version();
}
@Override
public void start(Map<String, String> props) {
final long throughput;
String name = props.get(NAME_CONFIG);
try {
id = Integer.parseInt(props.get(ID_CONFIG));
topic = props.get(TOPIC_CONFIG);
maxNumMsgs = Long.parseLong(props.get(NUM_MSGS_CONFIG));
multipleSchema = Boolean.parseBoolean(props.get(MULTIPLE_SCHEMA_CONFIG));
partitionCount = Integer.parseInt(props.getOrDefault(PARTITION_COUNT_CONFIG, "1"));
throughput = Long.parseLong(props.get(THROUGHPUT_CONFIG));
} catch (NumberFormatException e) {
throw new ConnectException("Invalid SchemaSourceTask configuration", e);
}
throttler = new ThroughputThrottler(throughput, System.currentTimeMillis());
partition = Map.of(ID_FIELD, id);
Map<String, Object> previousOffset = this.context.offsetStorageReader().offset(partition);
if (previousOffset != null) {
seqno = (Long) previousOffset.get(SEQNO_FIELD) + 1;
} else {
seqno = 0;
}
startingSeqno = seqno;
count = 0;
log.info("Started SchemaSourceTask {}-{} producing to topic {} resuming from seqno {}", name, id, topic, startingSeqno);
}
@Override
public List<SourceRecord> poll() {
if (count < maxNumMsgs) {
long sendStartMs = System.currentTimeMillis();
if (throttler.shouldThrottle(seqno - startingSeqno, sendStartMs)) {
throttler.throttle();
}
Map<String, Long> ccOffset = Map.of(SEQNO_FIELD, seqno);
int partitionVal = (int) (seqno % partitionCount);
final Struct data;
final SourceRecord srcRecord;
if (!multipleSchema || count % 2 == 0) {
data = new Struct(VALUE_SCHEMA)
.put("boolean", true)
.put("int", 12)
.put("long", 12L)
.put("float", 12.2f)
.put("double", 12.2)
.put("partitioning", partitionVal)
.put("id", id)
.put("seqno", seqno);
srcRecord = new SourceRecord(partition, ccOffset, topic, id, Schema.STRING_SCHEMA, "key", VALUE_SCHEMA, data);
} else {
data = new Struct(VALUE_SCHEMA_2)
.put("boolean", true)
.put("int", 12)
.put("long", 12L)
.put("float", 12.2f)
.put("double", 12.2)
.put("partitioning", partitionVal)
.put("string", "def")
.put("id", id)
.put("seqno", seqno);
srcRecord = new SourceRecord(partition, ccOffset, topic, id, Schema.STRING_SCHEMA, "key", VALUE_SCHEMA_2, data);
}
System.out.println("{\"task\": " + id + ", \"seqno\": " + seqno + "}");
seqno++;
count++;
return List.of(srcRecord);
} else {
throttler.throttle();
return List.of();
}
}
@Override
public void stop() {
throttler.wakeup();
}
}
| SchemaSourceTask |
java | apache__avro | lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java | {
"start": 2823,
"end": 5209
} | class ____ extends AvroReducer<Text, Count, WordCount> {
@Override
public void reduce(Text word, Iterable<Count> counts, AvroCollector<WordCount> collector, Reporter reporter)
throws IOException {
long sum = 0;
for (Count count : counts)
sum += count.count;
collector.collect(new WordCount(word.text, sum));
}
}
@Test
@SuppressWarnings("deprecation")
void job() throws Exception {
JobConf job = new JobConf();
String dir = "target/testReflectJob";
Path inputPath = new Path(dir + "/in");
Path outputPath = new Path(dir + "/out");
outputPath.getFileSystem(job).delete(outputPath);
inputPath.getFileSystem(job).delete(inputPath);
writeLinesFile(new File(dir + "/in"));
job.setJobName("reflect");
AvroJob.setInputSchema(job, ReflectData.get().getSchema(Text.class));
AvroJob.setMapOutputSchema(job, new Pair(new Text(""), new Count(0L)).getSchema());
AvroJob.setOutputSchema(job, ReflectData.get().getSchema(WordCount.class));
AvroJob.setMapperClass(job, MapImpl.class);
// AvroJob.setCombinerClass(job, ReduceImpl.class);
AvroJob.setReducerClass(job, ReduceImpl.class);
FileInputFormat.setInputPaths(job, inputPath);
FileOutputFormat.setOutputPath(job, outputPath);
AvroJob.setReflect(job); // use reflection
JobClient.runJob(job);
validateCountsFile(new File(new File(dir, "out"), "part-00000.avro"));
}
private void writeLinesFile(File dir) throws IOException {
DatumWriter<Text> writer = new ReflectDatumWriter<>();
DataFileWriter<Text> out = new DataFileWriter<>(writer);
File linesFile = new File(dir + "/lines.avro");
dir.mkdirs();
out.create(ReflectData.get().getSchema(Text.class), linesFile);
for (String line : WordCountUtil.LINES)
out.append(new Text(line));
out.close();
}
private void validateCountsFile(File file) throws Exception {
DatumReader<WordCount> reader = new ReflectDatumReader<>();
InputStream in = new BufferedInputStream(new FileInputStream(file));
DataFileStream<WordCount> counts = new DataFileStream<>(in, reader);
int numWords = 0;
for (WordCount wc : counts) {
assertEquals(WordCountUtil.COUNTS.get(wc.word), (Long) wc.count, wc.word);
numWords++;
}
in.close();
assertEquals(WordCountUtil.COUNTS.size(), numWords);
}
}
| ReduceImpl |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SnmpComponentBuilderFactory.java | {
"start": 1408,
"end": 1887
} | interface ____ {
/**
* SNMP (camel-snmp)
* Receive traps and poll SNMP (Simple Network Management Protocol) capable
* devices.
*
* Category: monitoring
* Since: 2.1
* Maven coordinates: org.apache.camel:camel-snmp
*
* @return the dsl builder
*/
static SnmpComponentBuilder snmp() {
return new SnmpComponentBuilderImpl();
}
/**
* Builder for the SNMP component.
*/
| SnmpComponentBuilderFactory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DifferentNameButSameTest.java | {
"start": 7477,
"end": 7696
} | interface ____ {}
}
""")
.expectUnchanged()
.addInputLines(
"Test.java",
"""
package pkg;
import pkg.Foo.Builder;
| Builder |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/pathparams/HttpPathParamLimitWithReactiveRoutes400Test.java | {
"start": 538,
"end": 2174
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.http-client.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.http-server.enabled", "true")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "true")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClasses(Util.class,
Resource.class));
@Inject
MeterRegistry registry;
public static final int COUNT = 101;
@Test
void testWithReactiveRoute400() throws InterruptedException {
registry.clear();
for (int i = 0; i < COUNT; i++) {
RestAssured.get("/rr").then().statusCode(400);
RestAssured.get("/rr/foo-" + i).then().statusCode(400);
}
Util.waitForMeters(registry.find("http.server.requests").timers(), COUNT);
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("uri", "/rr").tag("method", "GET")
.timers().iterator().next().count());
Assertions.assertEquals(COUNT, registry.find("http.server.requests")
.tag("method", "GET").tag("uri", "/rr/{message}")
.timers().iterator().next().count());
}
@Singleton
public static | HttpPathParamLimitWithReactiveRoutes400Test |
java | micronaut-projects__micronaut-core | inject-java-test/src/test/groovy/io/micronaut/inject/visitor/beans/builder/SubBuilder.java | {
"start": 628,
"end": 924
} | class ____ extends SuperBuilder.Builder {
private String bar;
public Builder bar(String bar) {
this.bar = bar;
return this;
}
@Override
public SubBuilder build() {
return new SubBuilder(foo, bar);
}
}
}
| Builder |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_611/SomeClass.java | {
"start": 1496,
"end": 1789
} | class ____ {
private final String value;
public Target(String value) {
this.value = value;
}
public String getValue() {
return value;
}
}
}
}
}
| Target |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/metrics/KafkaMbeanTest.java | {
"start": 1621,
"end": 6162
} | class ____ {
private final MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
private Sensor sensor;
private MetricName countMetricName;
private MetricName sumMetricName;
private Metrics metrics;
@BeforeEach
public void setup() throws Exception {
metrics = new Metrics();
metrics.addReporter(new JmxReporter());
sensor = metrics.sensor("kafka.requests");
countMetricName = metrics.metricName("pack.bean1.count", "grp1");
sensor.add(countMetricName, new WindowedCount());
sumMetricName = metrics.metricName("pack.bean1.sum", "grp1");
sensor.add(sumMetricName, new WindowedSum());
}
@AfterEach
public void tearDown() {
metrics.close();
}
@Test
public void testGetAttribute() throws Exception {
sensor.record(2.5);
Object counterAttribute = getAttribute(countMetricName);
assertEquals(1.0, counterAttribute);
Object sumAttribute = getAttribute(sumMetricName);
assertEquals(2.5, sumAttribute);
}
@Test
public void testGetAttributeUnknown() throws Exception {
sensor.record(2.5);
try {
getAttribute(sumMetricName, "name");
fail("Should have gotten attribute not found");
} catch (AttributeNotFoundException e) {
// Expected
}
}
@Test
public void testGetAttributes() throws Exception {
sensor.record(3.5);
sensor.record(4.0);
AttributeList attributeList = getAttributes(countMetricName, countMetricName.name(), sumMetricName.name());
List<Attribute> attributes = attributeList.asList();
assertEquals(2, attributes.size());
for (Attribute attribute : attributes) {
if (countMetricName.name().equals(attribute.getName()))
assertEquals(2.0, attribute.getValue());
else if (sumMetricName.name().equals(attribute.getName()))
assertEquals(7.5, attribute.getValue());
else
fail("Unexpected attribute returned: " + attribute.getName());
}
}
@Test
public void testGetAttributesWithUnknown() throws Exception {
sensor.record(3.5);
sensor.record(4.0);
AttributeList attributeList = getAttributes(countMetricName, countMetricName.name(),
sumMetricName.name(), "name");
List<Attribute> attributes = attributeList.asList();
assertEquals(2, attributes.size());
for (Attribute attribute : attributes) {
if (countMetricName.name().equals(attribute.getName()))
assertEquals(2.0, attribute.getValue());
else if (sumMetricName.name().equals(attribute.getName()))
assertEquals(7.5, attribute.getValue());
else
fail("Unexpected attribute returned: " + attribute.getName());
}
}
@Test
public void testInvoke() {
RuntimeMBeanException e = assertThrows(RuntimeMBeanException.class,
() -> mBeanServer.invoke(objectName(countMetricName), "something", null, null));
assertEquals(UnsupportedOperationException.class, e.getCause().getClass());
}
@Test
public void testSetAttribute() {
RuntimeMBeanException e = assertThrows(RuntimeMBeanException.class,
() -> mBeanServer.setAttribute(objectName(countMetricName), new Attribute("anything", 1)));
assertEquals(UnsupportedOperationException.class, e.getCause().getClass());
}
@Test
public void testSetAttributes() {
RuntimeMBeanException e = assertThrows(RuntimeMBeanException.class,
() -> mBeanServer.setAttributes(objectName(countMetricName), new AttributeList(1)));
assertEquals(UnsupportedOperationException.class, e.getCause().getClass());
}
private ObjectName objectName(MetricName metricName) throws Exception {
return new ObjectName(JmxReporter.getMBeanName("", metricName));
}
private Object getAttribute(MetricName metricName, String attribute) throws Exception {
return mBeanServer.getAttribute(objectName(metricName), attribute);
}
private Object getAttribute(MetricName metricName) throws Exception {
return getAttribute(metricName, metricName.name());
}
private AttributeList getAttributes(MetricName metricName, String... attributes) throws Exception {
return mBeanServer.getAttributes(objectName(metricName), attributes);
}
}
| KafkaMbeanTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/CrossClusterApiKeySignatureManager.java | {
"start": 2056,
"end": 7416
} | class ____ {
private final Logger logger = LogManager.getLogger(getClass());
private final Environment environment;
private final AtomicReference<X509ExtendedTrustManager> trustManager = new AtomicReference<>();
private final Map<String, X509KeyPair> keyPairByClusterAlias = new ConcurrentHashMap<>();
private final Map<String, SslConfiguration> sslSigningConfigByClusterAlias = new ConcurrentHashMap<>();
private final AtomicReference<SslConfiguration> sslTrustConfig = new AtomicReference<>();
private static final Map<String, String> SIGNATURE_ALGORITHM_BY_TYPE = Map.of("RSA", "SHA256withRSA", "EC", "SHA256withECDSA");
@SuppressWarnings("this-escape")
public CrossClusterApiKeySignatureManager(Environment environment) {
this.environment = environment;
loadSigningConfigs();
loadTrustConfig();
}
public void reload(Settings settings) {
logger.trace("Loading trust config with settings [{}]", settings);
try {
var sslConfig = loadSslConfig(environment, settings);
var trustConfig = sslConfig.trustConfig();
// Only load a trust manager if trust is explicitly configured or system default, to avoid using key store as trust store
if (trustConfig.hasExplicitConfig() || trustConfig.isSystemDefault()) {
X509ExtendedTrustManager newTrustManager = settings.getAsBoolean(
SETTINGS_PART_SIGNING + "." + SETTINGS_PART_DIAGNOSE_TRUST,
true
) ? wrapInDiagnosticTrustManager(trustConfig.createTrustManager()) : trustConfig.createTrustManager();
if (newTrustManager.getAcceptedIssuers().length == 0) {
logger.warn("Cross cluster API Key trust configuration [{}] has no accepted certificate issuers", trustConfig);
trustManager.set(null);
} else {
sslTrustConfig.set(sslConfig);
trustManager.set(newTrustManager);
}
} else {
trustManager.set(null);
}
} catch (Exception e) {
throw new IllegalStateException("Failed to load trust config", e);
}
}
public void reload(String clusterAlias, Settings settings) {
logger.trace("Loading signing config for [{}] with settings [{}]", clusterAlias, settings);
if (settings.getByPrefix(SETTINGS_PART_SIGNING).isEmpty() == false) {
try {
var sslConfig = loadSslConfig(environment, settings);
sslSigningConfigByClusterAlias.put(clusterAlias, sslConfig);
var keyConfig = sslConfig.keyConfig();
if (keyConfig.hasKeyMaterial()) {
String alias = settings.get(SETTINGS_PART_SIGNING + "." + KEYSTORE_ALIAS_SUFFIX);
X509KeyManager keyManager = keyConfig.createKeyManager();
if (keyManager == null) {
throw new IllegalStateException("Cannot create key manager for key config [" + keyConfig + "]");
}
var keyPair = Strings.isNullOrEmpty(alias)
? buildKeyPair(keyManager, keyConfig)
: buildKeyPair(keyManager, keyConfig, alias);
logger.trace("Key pair [{}] found for [{}]", keyPair, clusterAlias);
keyPairByClusterAlias.put(clusterAlias, keyPair);
} else {
keyPairByClusterAlias.remove(clusterAlias);
}
} catch (Exception e) {
throw new IllegalStateException(Strings.format("Failed to load signing config for cluster [%s]", clusterAlias), e);
}
} else {
logger.trace("No valid signing config settings found for [{}] with settings [{}]", clusterAlias, settings);
keyPairByClusterAlias.remove(clusterAlias);
}
}
public Collection<Path> getDependentTrustFiles() {
var sslConfig = sslTrustConfig.get();
return sslConfig == null ? Collections.emptyList() : sslConfig.getDependentFiles();
}
public Collection<Path> getDependentSigningFiles(String clusterAlias) {
var sslConfig = sslSigningConfigByClusterAlias.get(clusterAlias);
return sslConfig == null ? Collections.emptyList() : sslConfig.getDependentFiles();
}
public void validate(Settings settings) {
if (settings.getByPrefix(SETTINGS_PART_SIGNING).isEmpty() == false) {
var sslConfig = loadSslConfig(environment, settings);
if (sslConfig != null) {
sslConfig.getDependentFiles().forEach(path -> {
if (Files.exists(path) == false) {
throw new IllegalArgumentException(Strings.format("Configured file [%s] not found", path));
}
});
}
}
}
// Visible for testing
X509TrustManager getTrustManager() {
return trustManager.get();
}
public Verifier verifier() {
return new Verifier();
}
public Signer signerForClusterAlias(String clusterAlias) {
return keyPairByClusterAlias.containsKey(clusterAlias) ? new Signer(clusterAlias) : null;
}
public | CrossClusterApiKeySignatureManager |
java | alibaba__nacos | console/src/main/java/com/alibaba/nacos/console/controller/v3/ConsoleHealthController.java | {
"start": 1549,
"end": 2714
} | class ____ {
private final HealthProxy healthProxy;
public ConsoleHealthController(HealthProxy healthProxy) {
this.healthProxy = healthProxy;
}
/**
* Whether the Nacos is in broken states or not, and cannot recover except by being restarted.
*
* @return HTTP code equal to 200 indicates that Nacos is in right states. HTTP code equal to 500 indicates that
* Nacos is in broken states.
*/
@GetMapping("/liveness")
public Result<String> liveness() {
return Result.success("ok");
}
/**
* Ready to receive the request or not.
*
* @return HTTP code equal to 200 indicates that Nacos is ready. HTTP code equal to 500 indicates that Nacos is not
* ready.
*/
@GetMapping("/readiness")
public ResponseEntity<Result<String>> readiness() throws NacosException {
Result<String> ret = healthProxy.checkReadiness();
if (ret.getCode() == 0) {
return ResponseEntity.ok().body(ret);
} else {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(ret);
}
}
}
| ConsoleHealthController |
java | spring-projects__spring-boot | module/spring-boot-amqp/src/main/java/org/springframework/boot/amqp/autoconfigure/RabbitProperties.java | {
"start": 10842,
"end": 15281
} | class ____ {
private static final String SUN_X509 = "SunX509";
/**
* Whether to enable SSL support. Determined automatically if an address is
* provided with the protocol (amqp:// vs. amqps://).
*/
private @Nullable Boolean enabled;
/**
* SSL bundle name.
*/
private @Nullable String bundle;
/**
* Path to the key store that holds the SSL certificate.
*/
private @Nullable String keyStore;
/**
* Key store type.
*/
private String keyStoreType = "PKCS12";
/**
* Password used to access the key store.
*/
private @Nullable String keyStorePassword;
/**
* Key store algorithm.
*/
private String keyStoreAlgorithm = SUN_X509;
/**
* Trust store that holds SSL certificates.
*/
private @Nullable String trustStore;
/**
* Trust store type.
*/
private String trustStoreType = "JKS";
/**
* Password used to access the trust store.
*/
private @Nullable String trustStorePassword;
/**
* Trust store algorithm.
*/
private String trustStoreAlgorithm = SUN_X509;
/**
* SSL algorithm to use. By default, configured by the Rabbit client library.
*/
private @Nullable String algorithm;
/**
* Whether to enable server side certificate validation.
*/
private boolean validateServerCertificate = true;
/**
* Whether to enable hostname verification.
*/
private boolean verifyHostname = true;
public @Nullable Boolean getEnabled() {
return this.enabled;
}
/**
* Returns whether SSL is enabled from the first address, or the configured ssl
* enabled flag if no addresses have been set.
* @return whether ssl is enabled
* @see #setAddresses(List)
* @see #getEnabled() ()
*/
public boolean determineEnabled() {
boolean defaultEnabled = Boolean.TRUE.equals(getEnabled()) || this.bundle != null;
if (CollectionUtils.isEmpty(RabbitProperties.this.parsedAddresses)) {
return defaultEnabled;
}
Address address = RabbitProperties.this.parsedAddresses.get(0);
return address.determineSslEnabled(defaultEnabled);
}
public void setEnabled(@Nullable Boolean enabled) {
this.enabled = enabled;
}
public @Nullable String getBundle() {
return this.bundle;
}
public void setBundle(@Nullable String bundle) {
this.bundle = bundle;
}
public @Nullable String getKeyStore() {
return this.keyStore;
}
public void setKeyStore(@Nullable String keyStore) {
this.keyStore = keyStore;
}
public String getKeyStoreType() {
return this.keyStoreType;
}
public void setKeyStoreType(String keyStoreType) {
this.keyStoreType = keyStoreType;
}
public @Nullable String getKeyStorePassword() {
return this.keyStorePassword;
}
public void setKeyStorePassword(@Nullable String keyStorePassword) {
this.keyStorePassword = keyStorePassword;
}
public String getKeyStoreAlgorithm() {
return this.keyStoreAlgorithm;
}
public void setKeyStoreAlgorithm(String keyStoreAlgorithm) {
this.keyStoreAlgorithm = keyStoreAlgorithm;
}
public @Nullable String getTrustStore() {
return this.trustStore;
}
public void setTrustStore(@Nullable String trustStore) {
this.trustStore = trustStore;
}
public String getTrustStoreType() {
return this.trustStoreType;
}
public void setTrustStoreType(String trustStoreType) {
this.trustStoreType = trustStoreType;
}
public @Nullable String getTrustStorePassword() {
return this.trustStorePassword;
}
public void setTrustStorePassword(@Nullable String trustStorePassword) {
this.trustStorePassword = trustStorePassword;
}
public String getTrustStoreAlgorithm() {
return this.trustStoreAlgorithm;
}
public void setTrustStoreAlgorithm(String trustStoreAlgorithm) {
this.trustStoreAlgorithm = trustStoreAlgorithm;
}
public @Nullable String getAlgorithm() {
return this.algorithm;
}
public void setAlgorithm(@Nullable String sslAlgorithm) {
this.algorithm = sslAlgorithm;
}
public boolean isValidateServerCertificate() {
return this.validateServerCertificate;
}
public void setValidateServerCertificate(boolean validateServerCertificate) {
this.validateServerCertificate = validateServerCertificate;
}
public boolean isVerifyHostname() {
return this.verifyHostname;
}
public void setVerifyHostname(boolean verifyHostname) {
this.verifyHostname = verifyHostname;
}
}
public static | Ssl |
java | apache__rocketmq | client/src/test/java/org/apache/rocketmq/client/consumer/rebalance/AllocateMachineRoomNearByTest.java | {
"start": 1241,
"end": 8524
} | class ____ {
private static final String CID_PREFIX = "CID-";
private final String topic = "topic_test";
private final AllocateMachineRoomNearby.MachineRoomResolver machineRoomResolver = new AllocateMachineRoomNearby.MachineRoomResolver() {
@Override
public String brokerDeployIn(MessageQueue messageQueue) {
return messageQueue.getBrokerName().split("-")[0];
}
@Override
public String consumerDeployIn(String clientID) {
return clientID.split("-")[0];
}
};
private final AllocateMessageQueueStrategy allocateMessageQueueStrategy = new AllocateMachineRoomNearby(new AllocateMessageQueueAveragely(), machineRoomResolver);
@Before
public void init() {
}
@Test
public void test1() {
testWhenIDCSizeEquals(5,20,10);
testWhenIDCSizeEquals(5,20,20);
testWhenIDCSizeEquals(5,20,30);
testWhenIDCSizeEquals(5,20,0);
}
@Test
public void test2() {
testWhenConsumerIDCIsMore(5,1,10, 10, false);
testWhenConsumerIDCIsMore(5,1,10, 5, false);
testWhenConsumerIDCIsMore(5,1,10, 20, false);
testWhenConsumerIDCIsMore(5,1,10, 0, false);
}
@Test
public void test3() {
testWhenConsumerIDCIsLess(5,2,10, 10, false);
testWhenConsumerIDCIsLess(5,2,10, 5, false);
testWhenConsumerIDCIsLess(5,2,10, 20, false);
testWhenConsumerIDCIsLess(5,2,10, 0, false);
}
@Test
public void testRun10RandomCase() {
for (int i = 0; i < 10; i++) {
int consumerSize = new Random().nextInt(200) + 1;//1-200
int queueSize = new Random().nextInt(100) + 1;//1-100
int brokerIDCSize = new Random().nextInt(10) + 1;//1-10
int consumerIDCSize = new Random().nextInt(10) + 1;//1-10
if (brokerIDCSize == consumerIDCSize) {
testWhenIDCSizeEquals(brokerIDCSize,queueSize,consumerSize);
}
else if (brokerIDCSize > consumerIDCSize) {
testWhenConsumerIDCIsLess(brokerIDCSize,brokerIDCSize - consumerIDCSize, queueSize, consumerSize, false);
} else {
testWhenConsumerIDCIsMore(brokerIDCSize, consumerIDCSize - brokerIDCSize, queueSize, consumerSize, false);
}
}
}
public void testWhenIDCSizeEquals(int idcSize, int queueSize, int consumerSize) {
List<String> cidAll = prepareConsumer(idcSize, consumerSize);
List<MessageQueue> mqAll = prepareMQ(idcSize, queueSize);
List<MessageQueue> resAll = new ArrayList<>();
for (String currentID : cidAll) {
List<MessageQueue> res = allocateMessageQueueStrategy.allocate("Test-C-G",currentID,mqAll,cidAll);
for (MessageQueue mq : res) {
Assert.assertTrue(machineRoomResolver.brokerDeployIn(mq).equals(machineRoomResolver.consumerDeployIn(currentID)));
}
resAll.addAll(res);
}
Assert.assertTrue(hasAllocateAllQ(cidAll,mqAll,resAll));
}
public void testWhenConsumerIDCIsMore(int brokerIDCSize, int consumerMore, int queueSize, int consumerSize, boolean print) {
Set<String> brokerIDCWithConsumer = new TreeSet<>();
List<String> cidAll = prepareConsumer(brokerIDCSize + consumerMore, consumerSize);
List<MessageQueue> mqAll = prepareMQ(brokerIDCSize, queueSize);
for (MessageQueue mq : mqAll) {
brokerIDCWithConsumer.add(machineRoomResolver.brokerDeployIn(mq));
}
List<MessageQueue> resAll = new ArrayList<>();
for (String currentID : cidAll) {
List<MessageQueue> res = allocateMessageQueueStrategy.allocate("Test-C-G",currentID,mqAll,cidAll);
for (MessageQueue mq : res) {
if (brokerIDCWithConsumer.contains(machineRoomResolver.brokerDeployIn(mq))) { //healthy idc, so only consumer in this idc should be allocated
Assert.assertTrue(machineRoomResolver.brokerDeployIn(mq).equals(machineRoomResolver.consumerDeployIn(currentID)));
}
}
resAll.addAll(res);
}
Assert.assertTrue(hasAllocateAllQ(cidAll,mqAll,resAll));
}
public void testWhenConsumerIDCIsLess(int brokerIDCSize, int consumerIDCLess, int queueSize, int consumerSize, boolean print) {
Set<String> healthyIDC = new TreeSet<>();
List<String> cidAll = prepareConsumer(brokerIDCSize - consumerIDCLess, consumerSize);
List<MessageQueue> mqAll = prepareMQ(brokerIDCSize, queueSize);
for (String cid : cidAll) {
healthyIDC.add(machineRoomResolver.consumerDeployIn(cid));
}
List<MessageQueue> resAll = new ArrayList<>();
Map<String, List<MessageQueue>> idc2Res = new TreeMap<>();
for (String currentID : cidAll) {
String currentIDC = machineRoomResolver.consumerDeployIn(currentID);
List<MessageQueue> res = allocateMessageQueueStrategy.allocate("Test-C-G",currentID,mqAll,cidAll);
if (!idc2Res.containsKey(currentIDC)) {
idc2Res.put(currentIDC, new ArrayList<>());
}
idc2Res.get(currentIDC).addAll(res);
resAll.addAll(res);
}
for (String consumerIDC : healthyIDC) {
List<MessageQueue> resInOneIDC = idc2Res.get(consumerIDC);
List<MessageQueue> mqInThisIDC = createMessageQueueList(consumerIDC,queueSize);
Assert.assertTrue(resInOneIDC.containsAll(mqInThisIDC));
}
Assert.assertTrue(hasAllocateAllQ(cidAll,mqAll,resAll));
}
private boolean hasAllocateAllQ(List<String> cidAll,List<MessageQueue> mqAll, List<MessageQueue> allocatedResAll) {
if (cidAll.isEmpty()) {
return allocatedResAll.isEmpty();
}
return mqAll.containsAll(allocatedResAll) && allocatedResAll.containsAll(mqAll) && mqAll.size() == allocatedResAll.size();
}
private List<String> createConsumerIdList(String machineRoom, int size) {
List<String> consumerIdList = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
consumerIdList.add(machineRoom + "-" + CID_PREFIX + String.valueOf(i));
}
return consumerIdList;
}
private List<MessageQueue> createMessageQueueList(String machineRoom, int size) {
List<MessageQueue> messageQueueList = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
MessageQueue mq = new MessageQueue(topic, machineRoom + "-brokerName", i);
messageQueueList.add(mq);
}
return messageQueueList;
}
private List<MessageQueue> prepareMQ(int brokerIDCSize, int queueSize) {
List<MessageQueue> mqAll = new ArrayList<>();
for (int i = 1; i <= brokerIDCSize; i++) {
mqAll.addAll(createMessageQueueList("IDC" + i, queueSize));
}
return mqAll;
}
private List<String> prepareConsumer(int idcSize, int consumerSize) {
List<String> cidAll = new ArrayList<>();
for (int i = 1; i <= idcSize; i++) {
cidAll.addAll(createConsumerIdList("IDC" + i, consumerSize));
}
return cidAll;
}
}
| AllocateMachineRoomNearByTest |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/cluster/topology/RequestsUnitTests.java | {
"start": 1256,
"end": 3712
} | class ____ {
@Test
void shouldCreateTopologyView() throws Exception {
RedisURI redisURI = RedisURI.create("localhost", 6379);
Requests clusterNodesRequests = new Requests();
String clusterNodesOutput = "1 127.0.0.1:7380 master,myself - 0 1401258245007 2 disconnected 8000-11999\n";
clusterNodesRequests.addRequest(redisURI, getCommand(clusterNodesOutput));
Requests infoClientRequests = new Requests();
String infoClientOutput = "# Clients\r\nconnected_clients:100\r\nclient_longest_output_list:0\r\nclient_biggest_input_buf:0\r\nblocked_clients:0";
infoClientRequests.addRequest(redisURI, getCommand(infoClientOutput));
NodeTopologyView nodeTopologyView = NodeTopologyView.from(redisURI, clusterNodesRequests, infoClientRequests);
assertThat(nodeTopologyView.isAvailable()).isTrue();
assertThat(nodeTopologyView.getConnectedClients()).isEqualTo(100);
assertThat(nodeTopologyView.getPartitions()).hasSize(1);
assertThat(nodeTopologyView.getClusterNodes()).isEqualTo(clusterNodesOutput);
assertThat(nodeTopologyView.getInfo()).isEqualTo(infoClientOutput);
}
@Test
void shouldCreateTopologyViewWithoutClientCount() throws Exception {
RedisURI redisURI = RedisURI.create("localhost", 6379);
Requests clusterNodesRequests = new Requests();
String clusterNodesOutput = "1 127.0.0.1:7380 master,myself - 0 1401258245007 2 disconnected 8000-11999\n";
clusterNodesRequests.addRequest(redisURI, getCommand(clusterNodesOutput));
Requests clientListRequests = new Requests();
NodeTopologyView nodeTopologyView = NodeTopologyView.from(redisURI, clusterNodesRequests, clientListRequests);
assertThat(nodeTopologyView.isAvailable()).isFalse();
assertThat(nodeTopologyView.getConnectedClients()).isEqualTo(0);
assertThat(nodeTopologyView.getPartitions()).isEmpty();
assertThat(nodeTopologyView.getClusterNodes()).isNull();
}
private TimedAsyncCommand getCommand(String response) {
Command<String, String, String> command = new Command<>(CommandType.TYPE, new StatusOutput<>(StringCodec.UTF8));
TimedAsyncCommand timedAsyncCommand = new TimedAsyncCommand(command);
command.getOutput().set(ByteBuffer.wrap(response.getBytes()));
timedAsyncCommand.complete();
return timedAsyncCommand;
}
}
| RequestsUnitTests |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableTakeUntil.java | {
"start": 3112,
"end": 4081
} | class ____ extends AtomicReference<Subscription> implements FlowableSubscriber<Object> {
private static final long serialVersionUID = -3592821756711087922L;
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.setOnce(this, s, Long.MAX_VALUE);
}
@Override
public void onNext(Object t) {
SubscriptionHelper.cancel(this);
onComplete();
}
@Override
public void onError(Throwable t) {
SubscriptionHelper.cancel(upstream);
HalfSerializer.onError(downstream, t, TakeUntilMainSubscriber.this, error);
}
@Override
public void onComplete() {
SubscriptionHelper.cancel(upstream);
HalfSerializer.onComplete(downstream, TakeUntilMainSubscriber.this, error);
}
}
}
}
| OtherSubscriber |
java | greenrobot__greendao | greendao-api/src/main/java/org/greenrobot/greendao/annotation/Keep.java | {
"start": 375,
"end": 533
} | class ____.
* The user is responsible to write and support any code which is required for greenDAO.
* </p>
* <p>
* Don't use this annotation on a | modification |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/resilience/retry/AbstractRetryInterceptor.java | {
"start": 4120,
"end": 4339
} | class ____ the current target object
* @return the retry specification as a {@link MethodRetrySpec}
*/
protected abstract @Nullable MethodRetrySpec getRetrySpec(Method method, Class<?> targetClass);
/**
* Inner | of |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/NClobCharArrayTest.java | {
"start": 603,
"end": 1269
} | class ____ {
@Test
public void test(EntityManagerFactoryScope scope) {
Integer productId = scope.fromTransaction( entityManager -> {
final Product product = new Product();
product.setId(1);
product.setName("Mobile phone");
product.setWarranty("My product warranty".toCharArray());
entityManager.persist(product);
return product.getId();
});
scope.inTransaction( entityManager -> {
Product product = entityManager.find(Product.class, productId);
assertArrayEquals("My product warranty".toCharArray(), product.getWarranty());
});
}
//tag::basic-nclob-char-array-example[]
@Entity(name = "Product")
public static | NClobCharArrayTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/asyncprocessing/operators/AbstractAsyncStateStreamOperatorTest.java | {
"start": 30392,
"end": 32978
} | class ____ extends AbstractAsyncStateStreamOperator<Long>
implements TwoInputStreamOperator<Long, Long, Long>,
Triggerable<Integer, VoidNamespace> {
private transient InternalTimerService<VoidNamespace> timerService;
private FunctionWithException<Watermark, Watermark, Exception> preProcessFunction;
private ThrowingConsumer<Watermark, Exception> postProcessFunction;
public void setPreProcessFunction(
FunctionWithException<Watermark, Watermark, Exception> preProcessFunction) {
this.preProcessFunction = preProcessFunction;
}
public void setPostProcessFunction(
ThrowingConsumer<Watermark, Exception> postProcessFunction) {
this.postProcessFunction = postProcessFunction;
}
public void output(Long o) {
output.collect(new StreamRecord<>(o));
}
@Override
public void open() throws Exception {
super.open();
this.timerService =
getInternalTimerService("test-timers", VoidNamespaceSerializer.INSTANCE, this);
}
@Override
public Watermark preProcessWatermark(Watermark watermark) throws Exception {
return preProcessFunction == null ? watermark : preProcessFunction.apply(watermark);
}
@Override
public Watermark postProcessWatermark(Watermark watermark) throws Exception {
if (postProcessFunction != null) {
postProcessFunction.accept(watermark);
}
return watermark;
}
@Override
public void onEventTime(InternalTimer<Integer, VoidNamespace> timer) throws Exception {
assertThat(getCurrentKey()).isEqualTo(timer.getKey());
output.collect(new StreamRecord<>(timer.getTimestamp()));
}
@Override
public void onProcessingTime(InternalTimer<Integer, VoidNamespace> timer) throws Exception {
assertThat(getCurrentKey()).isEqualTo(timer.getKey());
}
@Override
public void processElement1(StreamRecord<Long> element) throws Exception {
timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, element.getValue());
}
@Override
public void processElement2(StreamRecord<Long> element) throws Exception {
timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, element.getValue());
}
}
/** {@link KeySelector} for tests. */
public static | WatermarkTestingOperator |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/ShutdownListenerBuildItem.java | {
"start": 315,
"end": 643
} | class ____ extends MultiBuildItem {
final ShutdownListener shutdownListener;
public ShutdownListenerBuildItem(ShutdownListener shutdownListener) {
this.shutdownListener = shutdownListener;
}
public ShutdownListener getShutdownListener() {
return shutdownListener;
}
}
| ShutdownListenerBuildItem |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractorTest.java | {
"start": 3631,
"end": 18233
} | class ____ {
@SuppressWarnings({"rawtypes", "unchecked"})
@Test
void testBasicType() {
// use getGroupReduceReturnTypes()
RichGroupReduceFunction<?, ?> function =
new RichGroupReduceFunction<Boolean, Boolean>() {
private static final long serialVersionUID = 1L;
@Override
public void reduce(Iterable<Boolean> values, Collector<Boolean> out)
throws Exception {
// nothing to do
}
};
TypeInformation<?> ti =
TypeExtractor.getGroupReduceReturnTypes(function, (TypeInformation) Types.BOOLEAN);
assertThat(ti.isBasicType()).isTrue();
assertThat(ti).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO);
assertThat(ti.getTypeClass()).isEqualTo(Boolean.class);
// use getForClass()
assertThat(TypeExtractor.getForClass(Boolean.class).isBasicType()).isTrue();
assertThat(TypeExtractor.getForClass(Boolean.class)).isEqualTo(ti);
// use getForObject()
assertThat(TypeExtractor.getForObject(true)).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testTupleWithBasicTypes() throws Exception {
// use getMapReturnTypes()
RichMapFunction<?, ?> function =
new RichMapFunction<
Tuple9<
Integer,
Long,
Double,
Float,
Boolean,
String,
Character,
Short,
Byte>,
Tuple9<
Integer,
Long,
Double,
Float,
Boolean,
String,
Character,
Short,
Byte>>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple9<
Integer,
Long,
Double,
Float,
Boolean,
String,
Character,
Short,
Byte>
map(
Tuple9<
Integer,
Long,
Double,
Float,
Boolean,
String,
Character,
Short,
Byte>
value)
throws Exception {
return null;
}
};
TypeInformation<?> ti =
TypeExtractor.getMapReturnTypes(
function,
(TypeInformation)
TypeInformation.of(
new TypeHint<
Tuple9<
Integer,
Long,
Double,
Float,
Boolean,
String,
Character,
Short,
Byte>>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(9);
assertThat(ti).isInstanceOf(TupleTypeInfo.class);
List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
((TupleTypeInfo) ti).getFlatFields("f3", 0, ffd);
assertThat(ffd).hasSize(1);
assertThat(ffd.get(0).getPosition()).isEqualTo(3);
TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
assertThat(tti.getTypeClass()).isEqualTo(Tuple9.class);
for (int i = 0; i < 9; i++) {
assertThat(tti.getTypeAt(i) instanceof BasicTypeInfo).isTrue();
}
assertThat(tti.getTypeAt(0)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tti.getTypeAt(1)).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(tti.getTypeAt(2)).isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(tti.getTypeAt(3)).isEqualTo(BasicTypeInfo.FLOAT_TYPE_INFO);
assertThat(tti.getTypeAt(4)).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO);
assertThat(tti.getTypeAt(5)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(tti.getTypeAt(6)).isEqualTo(BasicTypeInfo.CHAR_TYPE_INFO);
assertThat(tti.getTypeAt(7)).isEqualTo(BasicTypeInfo.SHORT_TYPE_INFO);
assertThat(tti.getTypeAt(8)).isEqualTo(BasicTypeInfo.BYTE_TYPE_INFO);
// use getForObject()
Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte> t =
new Tuple9<Integer, Long, Double, Float, Boolean, String, Character, Short, Byte>(
1, 1L, 1.0, 1.0F, false, "Hello World", 'w', (short) 1, (byte) 1);
assertThat(TypeExtractor.getForObject(t) instanceof TupleTypeInfo).isTrue();
TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) TypeExtractor.getForObject(t);
assertThat(tti2.getTypeAt(0)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tti2.getTypeAt(1)).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(tti2.getTypeAt(2)).isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(tti2.getTypeAt(3)).isEqualTo(BasicTypeInfo.FLOAT_TYPE_INFO);
assertThat(tti2.getTypeAt(4)).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO);
assertThat(tti2.getTypeAt(5)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(tti2.getTypeAt(6)).isEqualTo(BasicTypeInfo.CHAR_TYPE_INFO);
assertThat(tti2.getTypeAt(7)).isEqualTo(BasicTypeInfo.SHORT_TYPE_INFO);
assertThat(tti2.getTypeAt(8)).isEqualTo(BasicTypeInfo.BYTE_TYPE_INFO);
// test that getForClass does not work
try {
TypeExtractor.getForClass(Tuple9.class);
fail("Exception expected here");
} catch (InvalidTypesException e) {
// that is correct
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testTupleWithTuples() {
// use getFlatMapReturnTypes()
RichFlatMapFunction<?, ?> function =
new RichFlatMapFunction<
Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>>,
Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>>>() {
private static final long serialVersionUID = 1L;
@Override
public void flatMap(
Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>> value,
Collector<Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>>>
out)
throws Exception {
// nothing to do
}
};
TypeInformation<?> ti =
TypeExtractor.getFlatMapReturnTypes(
function,
(TypeInformation)
TypeInformation.of(
new TypeHint<
Tuple3<
Tuple1<String>,
Tuple1<Integer>,
Tuple2<Long, Long>>>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(3);
assertThat(ti).isInstanceOf(TupleTypeInfo.class);
List<FlatFieldDescriptor> ffd = new ArrayList<FlatFieldDescriptor>();
((TupleTypeInfo) ti).getFlatFields("f0.f0", 0, ffd);
assertThat(ffd.get(0).getPosition()).isZero();
ffd.clear();
((TupleTypeInfo) ti).getFlatFields("f0.f0", 0, ffd);
assertThat(ffd.get(0).getType() instanceof BasicTypeInfo).isTrue();
assertThat(ffd.get(0).getType().getTypeClass()).isEqualTo(String.class);
ffd.clear();
((TupleTypeInfo) ti).getFlatFields("f1.f0", 0, ffd);
assertThat(ffd.get(0).getPosition()).isOne();
ffd.clear();
TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti;
assertThat(tti.getTypeClass()).isEqualTo(Tuple3.class);
assertThat(tti.getTypeAt(0).isTupleType()).isTrue();
assertThat(tti.getTypeAt(1).isTupleType()).isTrue();
assertThat(tti.getTypeAt(2).isTupleType()).isTrue();
assertThat(tti.getTypeAt(0).getTypeClass()).isEqualTo(Tuple1.class);
assertThat(tti.getTypeAt(1).getTypeClass()).isEqualTo(Tuple1.class);
assertThat(tti.getTypeAt(2).getTypeClass()).isEqualTo(Tuple2.class);
assertThat(tti.getTypeAt(0).getArity()).isOne();
assertThat(tti.getTypeAt(1).getArity()).isOne();
assertThat(tti.getTypeAt(2).getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) tti.getTypeAt(0)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti.getTypeAt(1)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti.getTypeAt(2)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti.getTypeAt(2)).getTypeAt(1))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
// use getForObject()
Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>> t =
new Tuple3<Tuple1<String>, Tuple1<Integer>, Tuple2<Long, Long>>(
new Tuple1<String>("hello"),
new Tuple1<Integer>(1),
new Tuple2<Long, Long>(2L, 3L));
assertThat(TypeExtractor.getForObject(t) instanceof TupleTypeInfo).isTrue();
TupleTypeInfo<?> tti2 = (TupleTypeInfo<?>) TypeExtractor.getForObject(t);
assertThat(tti2.getTypeAt(0).getArity()).isOne();
assertThat(tti2.getTypeAt(1).getArity()).isOne();
assertThat(tti2.getTypeAt(2).getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) tti2.getTypeAt(0)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti2.getTypeAt(1)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti2.getTypeAt(2)).getTypeAt(0))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) tti2.getTypeAt(2)).getTypeAt(1))
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testTuple0() {
// use getFlatMapReturnTypes()
RichFlatMapFunction<?, ?> function =
new RichFlatMapFunction<Tuple0, Tuple0>() {
private static final long serialVersionUID = 1L;
@Override
public void flatMap(Tuple0 value, Collector<Tuple0> out) throws Exception {
// nothing to do
}
};
TypeInformation<?> ti =
TypeExtractor.getFlatMapReturnTypes(
function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple0>() {}));
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isZero();
assertThat(ti).isInstanceOf(TupleTypeInfo.class);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
void testSubclassOfTuple() {
// use getJoinReturnTypes()
RichFlatJoinFunction<?, ?, ?> function =
new RichFlatJoinFunction<CustomTuple, String, CustomTuple>() {
private static final long serialVersionUID = 1L;
@Override
public void join(CustomTuple first, String second, Collector<CustomTuple> out)
throws Exception {
out.collect(null);
}
};
TypeInformation<?> ti =
TypeExtractor.getFlatJoinReturnTypes(
function,
(TypeInformation)
TypeInformation.of(new TypeHint<Tuple2<String, Integer>>() {}),
(TypeInformation) Types.STRING);
assertThat(ti.isTupleType()).isTrue();
assertThat(ti.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(0)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) ti).getTypeAt(1)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) ti).getTypeClass()).isEqualTo(CustomTuple.class);
// use getForObject()
CustomTuple t = new CustomTuple("hello", 1);
TypeInformation<?> ti2 = TypeExtractor.getForObject(t);
assertThat(ti2.isTupleType()).isTrue();
assertThat(ti2.getArity()).isEqualTo(2);
assertThat(((TupleTypeInfo<?>) ti2).getTypeAt(0)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) ti2).getTypeAt(1)).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(((TupleTypeInfo<?>) ti2).getTypeClass()).isEqualTo(CustomTuple.class);
}
public static | TypeExtractorTest |
java | processing__processing4 | core/src/processing/opengl/PGL.java | {
"start": 1937,
"end": 37763
} | interface ____ a primary surface PGraphics */
public boolean primaryPGL;
// ........................................................
// Parameters
public static int REQUESTED_DEPTH_BITS = 24;
public static int REQUESTED_STENCIL_BITS = 8;
public static int REQUESTED_ALPHA_BITS = 8;
/** Switches between the use of regular and direct buffers. */
protected static boolean USE_DIRECT_BUFFERS = true;
protected static int MIN_DIRECT_BUFFER_SIZE = 1;
/** Enables/disables mipmap use. */
protected static boolean MIPMAPS_ENABLED = true;
/** Initial sizes for arrays of input and tessellated data. */
protected static int DEFAULT_IN_VERTICES = 64;
protected static int DEFAULT_IN_EDGES = 128;
protected static int DEFAULT_IN_TEXTURES = 64;
protected static int DEFAULT_TESS_VERTICES = 64;
protected static int DEFAULT_TESS_INDICES = 128;
/** Maximum lights by default is 8, the minimum defined by OpenGL. */
protected static int MAX_LIGHTS = 8;
/** Maximum index value of a tessellated vertex. GLES restricts the vertex
* indices to be of type unsigned short. Since Java only supports signed
* shorts as primitive type we have 2^15 = 32768 as the maximum number of
* vertices that can be referred to within a single VBO.
*/
protected static int MAX_VERTEX_INDEX = 32767;
protected static int MAX_VERTEX_INDEX1 = MAX_VERTEX_INDEX + 1;
/** Count of tessellated fill, line or point vertices that will
* trigger a flush in the immediate mode. It doesn't necessarily
* be equal to MAX_VERTEX_INDEX1, since the number of vertices can
* be effectively much large since the renderer uses offsets to
* refer to vertices beyond the MAX_VERTEX_INDEX limit.
*/
protected static int FLUSH_VERTEX_COUNT = MAX_VERTEX_INDEX1;
/** Minimum/maximum dimensions of a texture used to hold font data. */
protected static int MIN_FONT_TEX_SIZE = 256;
protected static int MAX_FONT_TEX_SIZE = 1024;
/** Minimum stroke weight needed to apply the full path stroking
* algorithm that properly generates caps and joins.
*/
protected static float MIN_CAPS_JOINS_WEIGHT = 2f;
/** Maximum length of linear paths to be stroked with the
* full algorithm that generates accurate caps and joins.
*/
protected static int MAX_CAPS_JOINS_LENGTH = 5000;
/** Minimum array size to use arrayCopy method(). */
protected static int MIN_ARRAYCOPY_SIZE = 2;
/** Factor used to displace the stroke vertices towards the camera in
* order to make sure the lines are always on top of the fill geometry */
protected static float STROKE_DISPLACEMENT = 0.999f;
// ........................................................
// These parameters are left public so advanced users can experiment with different
// configurations of buffer object streaming, buffer usage modes and access policies.
/** Controls the use of buffer object streaming:
* https://www.khronos.org/opengl/wiki/Buffer_Object_Streaming
* In combination with use direct buffers,
* the only advantage of enabling it in immediate mode would be to reduce memory footprint
* since the direct vertex buffers would not be allocated, simply mapped from the OpenGL
* objects and thus only the vertex arrays would be created.
* In the case of the retained mode (PShape), memory footprint would be reduced (for the same
* reason) but it may enable some speed-ups when editing a geometry in within a being/end
* tessellation update block. */
static public boolean bufferStreamingImmediate = false;
static public boolean bufferStreamingRetained = true;
/** Controls the usage of the buffer data store:
* https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glBufferData.xhtml
* Supported options include STATIC_DRAW, DYNAMIC_DRAW, STREAM_DRAW, and STREAM_READ.
*/
static public int bufferUsageRetained;
static public int bufferUsageImmediate;
/** Controls the access to the mapped buffer object's data store (when using buffer streaming).
* https://www.khronos.org/registry/OpenGL-Refpages/gl4/html/glMapBuffer.xhtml
* Supported options are READ_ONLY, WRITE_ONLY, and READ_WRITE.
*/
static public int bufferMapAccess;
// ........................................................
// Variables to handle single-buffered situations (i.e.: Android)
protected IntBuffer firstFrame;
protected static boolean SINGLE_BUFFERED = false;
// ........................................................
// FBO layer
protected boolean fboLayerEnabled = false;
protected boolean fboLayerCreated = false;
protected boolean fboLayerEnabledReq = false;
protected boolean fboLayerDisableReq = false;
protected boolean fbolayerResetReq = false;
public int reqNumSamples;
protected int numSamples;
protected IntBuffer glColorFbo;
protected IntBuffer glColorTex;
protected IntBuffer glDepthStencil;
protected IntBuffer glDepth;
protected IntBuffer glStencil;
protected IntBuffer glMultiFbo;
protected IntBuffer glMultiColor;
protected IntBuffer glMultiDepthStencil;
protected IntBuffer glMultiDepth;
protected IntBuffer glMultiStencil;
protected int fboWidth, fboHeight;
protected int backTex, frontTex;
/** Flags used to handle the creation of a separate front texture */
protected boolean usingFrontTex = false;
protected boolean needSepFrontTex = false;
/**
* Defines if FBO Layer is allowed in the given environment.
* Using FBO can cause a fatal error during runtime for
* Intel HD Graphics 3000 chipsets (commonly used on older MacBooks)
* <a href="https://github.com/processing/processing/issues/4104">#4104</a>
* Changed to private because needs to be accessed via isFboAllowed().
* <a href="https://github.com/processing/processing4/pull/76">#76</a> and
* <a href="https://github.com/processing/processing4/issues/50">#50</a>
*/
private Boolean fboAllowed = true;
// ........................................................
// Texture rendering
protected boolean loadedTex2DShader = false;
protected int tex2DShaderProgram;
protected int tex2DVertShader;
protected int tex2DFragShader;
protected int tex2DShaderContext;
protected int tex2DVertLoc;
protected int tex2DTCoordLoc;
protected int tex2DSamplerLoc;
protected int tex2DGeoVBO;
protected boolean loadedTexRectShader = false;
protected int texRectShaderProgram;
protected int texRectVertShader;
protected int texRectFragShader;
protected int texRectShaderContext;
protected int texRectVertLoc;
protected int texRectTCoordLoc;
protected int texRectSamplerLoc;
protected int texRectGeoVBO;
protected float[] texCoords = {
// X, Y, U, V
-1.0f, -1.0f, 0.0f, 0.0f,
+1.0f, -1.0f, 1.0f, 0.0f,
-1.0f, +1.0f, 0.0f, 1.0f,
+1.0f, +1.0f, 1.0f, 1.0f
};
protected FloatBuffer texData;
protected static final String SHADER_PREPROCESSOR_DIRECTIVE =
"#ifdef GL_ES\n" +
"precision mediump float;\n" +
"precision mediump int;\n" +
"#endif\n";
protected static String[] texVertShaderSource = {
"attribute vec2 position;",
"attribute vec2 texCoord;",
"varying vec2 vertTexCoord;",
"void main() {",
" gl_Position = vec4(position, 0, 1);",
" vertTexCoord = texCoord;",
"}"
};
protected static String[] tex2DFragShaderSource = {
SHADER_PREPROCESSOR_DIRECTIVE,
"uniform sampler2D texMap;",
"varying vec2 vertTexCoord;",
"void main() {",
" gl_FragColor = texture2D(texMap, vertTexCoord.st);",
"}"
};
protected static String[] texRectFragShaderSource = {
SHADER_PREPROCESSOR_DIRECTIVE,
"uniform sampler2DRect texMap;",
"varying vec2 vertTexCoord;",
"void main() {",
" gl_FragColor = texture2DRect(texMap, vertTexCoord.st);",
"}"
};
/** Which texturing targets are enabled */
protected boolean[] texturingTargets = { false, false };
/** Used to keep track of which textures are bound to each target */
protected int maxTexUnits;
protected int activeTexUnit = 0;
protected int[][] boundTextures;
// ........................................................
// Framerate handling
protected float targetFps = 60;
protected float currentFps = 60;
protected boolean setFps = false;
// ........................................................
// Utility buffers
protected ByteBuffer byteBuffer;
protected IntBuffer intBuffer;
protected IntBuffer viewBuffer;
protected IntBuffer colorBuffer;
protected FloatBuffer depthBuffer;
protected ByteBuffer stencilBuffer;
//........................................................
// Rendering information
/** Used to register amount of geometry rendered in each frame. */
protected int geomCount = 0;
protected int pgeomCount;
/** Used to register calls to background. */
protected boolean clearColor = false;
protected boolean pclearColor;
protected boolean clearDepth = false;
protected boolean pclearDepth;
protected boolean clearStencil = false;
protected boolean pclearStencil;
// ........................................................
// Error messages
public static final String WIKI =
" Read http://wiki.processing.org/w/OpenGL_Issues for help.";
public static final String FRAMEBUFFER_ERROR =
"Framebuffer error (%1$s), rendering will probably not work as expected" + WIKI;
public static final String MISSING_FBO_ERROR =
"Framebuffer objects are not supported by this hardware (or driver)" + WIKI;
public static final String MISSING_GLSL_ERROR =
"GLSL shaders are not supported by this hardware (or driver)" + WIKI;
public static final String MISSING_GLFUNC_ERROR =
"GL function %1$s is not available on this hardware (or driver)" + WIKI;
public static final String UNSUPPORTED_GLPROF_ERROR =
"Unsupported OpenGL profile.";
public static final String TEXUNIT_ERROR =
"Number of texture units not supported by this hardware (or driver)" + WIKI;
public static final String NONPRIMARY_ERROR =
"The renderer is trying to call a PGL function that can only be called on a primary PGL. " +
"This is most likely due to a bug in the renderer's code, please report it with an " +
"issue on Processing's github page https://github.com/processing/processing/issues?state=open " +
"if using any of the built-in OpenGL renderers. If you are using a contributed " +
"library, contact the library's developers.";
protected static final String DEPTH_READING_NOT_ENABLED_ERROR =
"Reading depth and stencil values from this multisampled buffer is not enabled. " +
"You can enable it by calling hint(ENABLE_DEPTH_READING) once. " +
"If your sketch becomes too slow, disable multisampling with noSmooth() instead.";
// ........................................................
// Constants
/** Size of different types in bytes */
protected static int SIZEOF_SHORT = Short.SIZE / 8;
protected static int SIZEOF_INT = Integer.SIZE / 8;
protected static int SIZEOF_FLOAT = Float.SIZE / 8;
protected static int SIZEOF_BYTE = Byte.SIZE / 8;
protected static int SIZEOF_INDEX = SIZEOF_SHORT;
protected static int INDEX_TYPE = 0x1403; // GL_UNSIGNED_SHORT
/** Machine Epsilon for float precision. */
protected static float FLOAT_EPS;
// Calculation of the Machine Epsilon for float precision. From:
// http://en.wikipedia.org/wiki/Machine_epsilon#Approximation_using_Java
static {
float eps = 1.0f;
do {
eps /= 2.0f;
} while ((float)(1.0 + (eps / 2.0)) != 1.0);
FLOAT_EPS = eps;
}
/**
* Set to true if the host system is big endian (PowerPC, MIPS, SPARC), false
* if little endian (x86 Intel for Mac or PC).
*/
protected static boolean BIG_ENDIAN =
ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN;
// ........................................................
// Present mode
// ........................................................
// Present mode
protected boolean presentMode = false;
protected boolean showStopButton = true;
public float presentX;
public float presentY;
protected IntBuffer closeButtonTex;
protected int stopButtonColor;
protected int stopButtonWidth = 28;
protected int stopButtonHeight = 12;
protected int stopButtonX = 21; // The position of the close button is relative to the
protected int closeButtonY = 21; // lower left corner
protected static int[] closeButtonPix = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, -1, -1, -1, 0, 0, 0, -1, -1, -1, -1, -1, 0, 0, 0, -1,
-1, -1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0,
0, 0, 0, -1, -1, 0, -1, -1, 0, 0, -1, -1, 0, -1, -1, 0, 0, -1, 0, 0, 0, 0, 0,
0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, -1,
-1, -1, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, -1, 0, 0, 0, -1,
0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, -1, 0, 0, -1,
0, 0, 0, -1, 0, 0, -1, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, -1, 0, -1,
-1, 0, 0, -1, -1, 0, -1, -1, 0, 0, 0, -1, -1, -1, 0, 0, 0, 0, 0, -1, -1, -1,
0, 0, 0, -1, -1, -1, 0, 0, 0, -1, -1, -1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0};
///////////////////////////////////////////////////////////////
// Initialization, finalization
public PGL() { }
public PGL(PGraphicsOpenGL pg) {
this.graphics = pg;
if (glColorTex == null) {
glColorFbo = allocateIntBuffer(1);
glColorTex = allocateIntBuffer(2);
glDepthStencil = allocateIntBuffer(1);
glDepth = allocateIntBuffer(1);
glStencil = allocateIntBuffer(1);
glMultiFbo = allocateIntBuffer(1);
glMultiColor = allocateIntBuffer(1);
glMultiDepthStencil = allocateIntBuffer(1);
glMultiDepth = allocateIntBuffer(1);
glMultiStencil = allocateIntBuffer(1);
}
byteBuffer = allocateByteBuffer(1);
intBuffer = allocateIntBuffer(1);
viewBuffer = allocateIntBuffer(4);
}
public void dispose() {
destroyFBOLayer();
}
public void setPrimary(boolean primary) {
primaryPGL = primary;
}
static public int smoothToSamples(int smooth) {
if (smooth == 0) {
// smooth(0) is noSmooth(), which is 1x sampling
return 1;
} else if (smooth == 1) {
// smooth(1) means "default smoothing", which is 2x for OpenGL
return 2;
} else {
// smooth(N) can be used for 4x, 8x, etc
return smooth;
}
}
abstract public Object getNative();
abstract protected void setFrameRate(float fps);
abstract protected void initSurface(int antialias);
abstract protected void reinitSurface();
abstract protected void registerListeners();
protected int getReadFramebuffer() {
return fboLayerEnabled ? glColorFbo.get(0) : 0;
}
protected int getDrawFramebuffer() {
if (fboLayerEnabled) return 1 < numSamples ? glMultiFbo.get(0) :
glColorFbo.get(0);
else return 0;
}
protected int getDefaultDrawBuffer() {
return fboLayerEnabled ? COLOR_ATTACHMENT0 : BACK;
}
protected int getDefaultReadBuffer() {
return fboLayerEnabled ? COLOR_ATTACHMENT0 : FRONT;
}
protected boolean isFBOBacked() {
return fboLayerEnabled;
}
@Deprecated
public void requestFBOLayer() {
enableFBOLayer();
}
public void enableFBOLayer() {
fboLayerEnabledReq = true;
}
public void disableFBOLayer() {
fboLayerDisableReq = true;
}
public void resetFBOLayer() {
fbolayerResetReq = true;
}
protected boolean isMultisampled() {
return 1 < numSamples;
}
abstract protected int getDepthBits();
abstract protected int getStencilBits();
protected boolean getDepthTest() {
intBuffer.rewind();
getBooleanv(DEPTH_TEST, intBuffer);
return intBuffer.get(0) != 0;
}
protected boolean getDepthWriteMask() {
intBuffer.rewind();
getBooleanv(DEPTH_WRITEMASK, intBuffer);
return intBuffer.get(0) != 0;
}
protected Texture wrapBackTexture(Texture texture) {
if (texture == null) {
texture = new Texture(graphics);
texture.init(graphics.width, graphics.height,
glColorTex.get(backTex), TEXTURE_2D, RGBA,
fboWidth, fboHeight, NEAREST, NEAREST,
CLAMP_TO_EDGE, CLAMP_TO_EDGE);
texture.invertedY(true);
texture.colorBuffer(true);
graphics.setCache(graphics, texture);
} else {
texture.glName = glColorTex.get(backTex);
}
return texture;
}
protected Texture wrapFrontTexture(Texture texture) {
if (texture == null) {
texture = new Texture(graphics);
texture.init(graphics.width, graphics.height,
glColorTex.get(frontTex), TEXTURE_2D, RGBA,
fboWidth, fboHeight, NEAREST, NEAREST,
CLAMP_TO_EDGE, CLAMP_TO_EDGE);
texture.invertedY(true);
texture.colorBuffer(true);
} else {
texture.glName = glColorTex.get(frontTex);
}
return texture;
}
protected void bindFrontTexture() {
usingFrontTex = true;
if (!texturingIsEnabled(TEXTURE_2D)) {
enableTexturing(TEXTURE_2D);
}
bindTexture(TEXTURE_2D, glColorTex.get(frontTex));
}
protected void unbindFrontTexture() {
if (textureIsBound(TEXTURE_2D, glColorTex.get(frontTex))) {
// We don't want to unbind another texture
// that might be bound instead of this one.
if (!texturingIsEnabled(TEXTURE_2D)) {
enableTexturing(TEXTURE_2D);
bindTexture(TEXTURE_2D, 0);
disableTexturing(TEXTURE_2D);
} else {
bindTexture(TEXTURE_2D, 0);
}
}
}
protected void syncBackTexture() {
if (usingFrontTex) needSepFrontTex = true;
if (1 < numSamples) {
bindFramebufferImpl(READ_FRAMEBUFFER, glMultiFbo.get(0));
bindFramebufferImpl(DRAW_FRAMEBUFFER, glColorFbo.get(0));
int mask = COLOR_BUFFER_BIT;
if (graphics.getHint(PConstants.ENABLE_BUFFER_READING)) {
mask |= DEPTH_BUFFER_BIT | STENCIL_BUFFER_BIT;
}
blitFramebuffer(0, 0, fboWidth, fboHeight,
0, 0, fboWidth, fboHeight,
mask, NEAREST);
}
}
abstract protected float getPixelScale();
///////////////////////////////////////////////////////////
// Present mode
public void initPresentMode(float x, float y, int stopColor) {
presentMode = true;
showStopButton = stopColor != 0;
stopButtonColor = stopColor;
presentX = x;
presentY = y;
enableFBOLayer();
}
public boolean presentMode() {
return presentMode;
}
public float presentX() {
return presentX;
}
public float presentY() {
return presentY;
}
public boolean insideStopButton(float x, float y) {
if (!showStopButton) return false;
return stopButtonX < x && x < stopButtonX + stopButtonWidth &&
-(closeButtonY + stopButtonHeight) < y && y < -closeButtonY;
}
///////////////////////////////////////////////////////////
// Frame rendering
protected void clearDepthStencil() {
if (!pclearDepth && !pclearStencil) {
depthMask(true);
clearDepth(1);
clearStencil(0);
clear(DEPTH_BUFFER_BIT | STENCIL_BUFFER_BIT);
} else if (!pclearDepth) {
depthMask(true);
clearDepth(1);
clear(DEPTH_BUFFER_BIT);
} else if (!pclearStencil) {
clearStencil(0);
clear(STENCIL_BUFFER_BIT);
}
}
protected void clearBackground(float r, float g, float b, float a,
boolean depth, boolean stencil) {
clearColor(r, g, b, a);
if (depth && stencil) {
clearDepth(1);
clearStencil(0);
clear(DEPTH_BUFFER_BIT | STENCIL_BUFFER_BIT | COLOR_BUFFER_BIT);
if (0 < sketch.frameCount) {
clearDepth = true;
clearStencil = true;
}
} else if (depth) {
clearDepth(1);
clear(DEPTH_BUFFER_BIT | COLOR_BUFFER_BIT);
if (0 < sketch.frameCount) {
clearDepth = true;
}
} else if (stencil) {
clearStencil(0);
clear(STENCIL_BUFFER_BIT | COLOR_BUFFER_BIT);
if (0 < sketch.frameCount) {
clearStencil = true;
}
} else {
clear(PGL.COLOR_BUFFER_BIT);
}
if (0 < sketch.frameCount) {
clearColor = true;
}
}
protected void beginRender() {
if (sketch == null) {
sketch = graphics.parent;
}
pgeomCount = geomCount;
geomCount = 0;
pclearColor = clearColor;
clearColor = false;
pclearDepth = clearDepth;
clearDepth = false;
pclearStencil = clearStencil;
clearStencil = false;
if (SINGLE_BUFFERED && sketch.frameCount == 1) {
restoreFirstFrame();
}
if (fboLayerEnabledReq) {
fboLayerEnabled = true;
fboLayerEnabledReq = false;
}
if (fboLayerEnabled) {
if (fbolayerResetReq) {
destroyFBOLayer();
fbolayerResetReq = false;
}
if (!fboLayerCreated) {
createFBOLayer();
}
// Draw to the back texture
bindFramebufferImpl(FRAMEBUFFER, glColorFbo.get(0));
framebufferTexture2D(FRAMEBUFFER, COLOR_ATTACHMENT0,
TEXTURE_2D, glColorTex.get(backTex), 0);
if (1 < numSamples) {
bindFramebufferImpl(FRAMEBUFFER, glMultiFbo.get(0));
}
if (sketch.frameCount == 0) {
// No need to draw back color buffer because we are in the first frame.
int argb = graphics.backgroundColor;
float ba = ((argb >> 24) & 0xff) / 255.0f;
float br = ((argb >> 16) & 0xff) / 255.0f;
float bg = ((argb >> 8) & 0xff) / 255.0f;
float bb = ((argb) & 0xff) / 255.0f;
clearColor(br, bg, bb, ba);
clear(COLOR_BUFFER_BIT);
} else if (!pclearColor || !sketch.isLooping()) {
// Render previous back texture (now is the front) as background,
// because no background() is being used ("incremental drawing")
int x = 0;
int y = 0;
if (presentMode) {
x = (int)presentX;
y = (int)presentY;
}
float scale = getPixelScale();
drawTexture(TEXTURE_2D, glColorTex.get(frontTex), fboWidth, fboHeight,
x, y, graphics.width, graphics.height,
0, 0, (int)(scale * graphics.width), (int)(scale * graphics.height),
0, 0, graphics.width, graphics.height);
}
}
}
protected void endRender(int windowColor) {
if (fboLayerEnabled) {
syncBackTexture();
// Draw the contents of the back texture to the screen framebuffer.
bindFramebufferImpl(FRAMEBUFFER, 0);
if (presentMode) {
float wa = ((windowColor >> 24) & 0xff) / 255.0f;
float wr = ((windowColor >> 16) & 0xff) / 255.0f;
float wg = ((windowColor >> 8) & 0xff) / 255.0f;
float wb = (windowColor & 0xff) / 255.0f;
clearDepth(1);
clearColor(wr, wg, wb, wa);
clear(COLOR_BUFFER_BIT | DEPTH_BUFFER_BIT);
if (showStopButton) {
if (closeButtonTex == null) {
closeButtonTex = allocateIntBuffer(1);
genTextures(1, closeButtonTex);
bindTexture(TEXTURE_2D, closeButtonTex.get(0));
texParameteri(TEXTURE_2D, TEXTURE_MIN_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_MAG_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_S, CLAMP_TO_EDGE);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_T, CLAMP_TO_EDGE);
texImage2D(TEXTURE_2D, 0, RGBA, stopButtonWidth, stopButtonHeight, 0, RGBA, UNSIGNED_BYTE, null);
int[] color = new int[closeButtonPix.length];
PApplet.arrayCopy(closeButtonPix, color);
// Multiply the texture by the button color
float ba = ((stopButtonColor >> 24) & 0xFF) / 255f;
float br = ((stopButtonColor >> 16) & 0xFF) / 255f;
float bg = ((stopButtonColor >> 8) & 0xFF) / 255f;
float bb = (stopButtonColor & 0xFF) / 255f;
for (int i = 0; i < color.length; i++) {
int c = closeButtonPix[i];
int a = (int)(ba * ((c >> 24) & 0xFF));
int r = (int)(br * ((c >> 16) & 0xFF));
int g = (int)(bg * ((c >> 8) & 0xFF));
int b = (int)(bb * (c & 0xFF));
color[i] = javaToNativeARGB((a << 24) | (r << 16) | (g << 8) | b);
}
IntBuffer buf = allocateIntBuffer(color);
copyToTexture(TEXTURE_2D, RGBA, closeButtonTex.get(0), 0, 0, stopButtonWidth, stopButtonHeight, buf);
bindTexture(TEXTURE_2D, 0);
}
drawTexture(TEXTURE_2D, closeButtonTex.get(0), stopButtonWidth, stopButtonHeight,
0, 0, stopButtonX + stopButtonWidth, closeButtonY + stopButtonHeight,
0, stopButtonHeight, stopButtonWidth, 0,
stopButtonX, closeButtonY, stopButtonX + stopButtonWidth, closeButtonY + stopButtonHeight);
}
} else {
clearDepth(1);
clearColor(0, 0, 0, 0);
clear(COLOR_BUFFER_BIT | DEPTH_BUFFER_BIT);
}
// Render current back texture to screen, without blending.
disable(BLEND);
int x = 0;
int y = 0;
if (presentMode) {
x = (int)presentX;
y = (int)presentY;
}
float scale = getPixelScale();
drawTexture(TEXTURE_2D, glColorTex.get(backTex),
fboWidth, fboHeight,
x, y, graphics.width, graphics.height,
0, 0, (int)(scale * graphics.width), (int)(scale * graphics.height),
0, 0, graphics.width, graphics.height);
// Swapping front and back textures.
int temp = frontTex;
frontTex = backTex;
backTex = temp;
if (fboLayerDisableReq) {
fboLayerEnabled = false;
fboLayerDisableReq = false;
}
} else {
if (SINGLE_BUFFERED && sketch.frameCount == 0) {
saveFirstFrame();
}
if (isFboAllowed()) {
if (!clearColor && 0 < sketch.frameCount || !sketch.isLooping()) {
enableFBOLayer();
if (SINGLE_BUFFERED) {
createFBOLayer();
}
}
}
}
}
protected abstract void getGL(PGL pgl);
protected abstract boolean canDraw();
protected abstract void requestFocus();
protected abstract void requestDraw();
protected abstract void swapBuffers();
public boolean threadIsCurrent() {
return Thread.currentThread() == glThread;
}
public void setThread(Thread thread) {
glThread = thread;
}
protected void beginGL() { }
protected void endGL() { }
private void createFBOLayer() {
float scale = getPixelScale();
if (hasNpotTexSupport()) {
fboWidth = (int)(scale * graphics.width);
fboHeight = (int)(scale * graphics.height);
} else {
fboWidth = nextPowerOfTwo((int)(scale * graphics.width));
fboHeight = nextPowerOfTwo((int)(scale * graphics.height));
}
if (hasFboMultisampleSupport()) {
int maxs = maxSamples();
numSamples = PApplet.min(reqNumSamples, maxs);
} else {
numSamples = 1;
}
boolean multisample = 1 < numSamples;
boolean packed = hasPackedDepthStencilSupport();
int depthBits = PApplet.min(REQUESTED_DEPTH_BITS, getDepthBits());
int stencilBits = PApplet.min(REQUESTED_STENCIL_BITS, getStencilBits());
genTextures(2, glColorTex);
for (int i = 0; i < 2; i++) {
bindTexture(TEXTURE_2D, glColorTex.get(i));
texParameteri(TEXTURE_2D, TEXTURE_MIN_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_MAG_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_S, CLAMP_TO_EDGE);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_T, CLAMP_TO_EDGE);
texImage2D(TEXTURE_2D, 0, RGBA, fboWidth, fboHeight, 0,
RGBA, UNSIGNED_BYTE, null);
initTexture(TEXTURE_2D, RGBA, fboWidth, fboHeight, graphics.backgroundColor);
}
bindTexture(TEXTURE_2D, 0);
backTex = 0;
frontTex = 1;
genFramebuffers(1, glColorFbo);
bindFramebufferImpl(FRAMEBUFFER, glColorFbo.get(0));
framebufferTexture2D(FRAMEBUFFER, COLOR_ATTACHMENT0, TEXTURE_2D,
glColorTex.get(backTex), 0);
if (!multisample || graphics.getHint(PConstants.ENABLE_BUFFER_READING)) {
// If not multisampled, this is the only depth and stencil buffer.
// If multisampled and depth reading enabled, these are going to
// hold downsampled depth and stencil buffers.
createDepthAndStencilBuffer(false, depthBits, stencilBits, packed);
}
if (multisample) {
// Creating multisampled FBO
genFramebuffers(1, glMultiFbo);
bindFramebufferImpl(FRAMEBUFFER, glMultiFbo.get(0));
// color render buffer...
genRenderbuffers(1, glMultiColor);
bindRenderbuffer(RENDERBUFFER, glMultiColor.get(0));
renderbufferStorageMultisample(RENDERBUFFER, numSamples,
RGBA8, fboWidth, fboHeight);
framebufferRenderbuffer(FRAMEBUFFER, COLOR_ATTACHMENT0,
RENDERBUFFER, glMultiColor.get(0));
// Creating multisampled depth and stencil buffers
createDepthAndStencilBuffer(true, depthBits, stencilBits, packed);
}
int status = validateFramebuffer();
if (status == FRAMEBUFFER_INCOMPLETE_MULTISAMPLE && 1 < numSamples) {
System.err.println("Continuing with multisampling disabled");
reqNumSamples = 1;
destroyFBOLayer();
// try again
createFBOLayer();
return;
}
// Clear all buffers.
clearDepth(1);
clearStencil(0);
int argb = graphics.backgroundColor;
float ba = ((argb >> 24) & 0xff) / 255.0f;
float br = ((argb >> 16) & 0xff) / 255.0f;
float bg = ((argb >> 8) & 0xff) / 255.0f;
float bb = ((argb) & 0xff) / 255.0f;
clearColor(br, bg, bb, ba);
clear(DEPTH_BUFFER_BIT | STENCIL_BUFFER_BIT | COLOR_BUFFER_BIT);
bindFramebufferImpl(FRAMEBUFFER, 0);
initFBOLayer();
fboLayerCreated = true;
}
protected abstract void initFBOLayer();
protected void saveFirstFrame() {
firstFrame = allocateDirectIntBuffer(graphics.width * graphics.height);
if (hasReadBuffer()) readBuffer(BACK);
readPixelsImpl(0, 0, graphics.width, graphics.height, RGBA, UNSIGNED_BYTE, firstFrame);
}
protected void restoreFirstFrame() {
if (firstFrame == null) return;
IntBuffer tex = allocateIntBuffer(1);
genTextures(1, tex);
int w, h;
float scale = getPixelScale();
if (hasNpotTexSupport()) {
w = (int)(scale * graphics.width);
h = (int)(scale * graphics.height);
} else {
w = nextPowerOfTwo((int)(scale * graphics.width));
h = nextPowerOfTwo((int)(scale * graphics.height));
}
bindTexture(TEXTURE_2D, tex.get(0));
texParameteri(TEXTURE_2D, TEXTURE_MIN_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_MAG_FILTER, NEAREST);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_S, CLAMP_TO_EDGE);
texParameteri(TEXTURE_2D, TEXTURE_WRAP_T, CLAMP_TO_EDGE);
texImage2D(TEXTURE_2D, 0, RGBA, w, h, 0, RGBA, UNSIGNED_BYTE, null);
texSubImage2D(TEXTURE_2D, 0, 0, 0, graphics.width, graphics.height, RGBA, UNSIGNED_BYTE, firstFrame);
drawTexture(TEXTURE_2D, tex.get(0), w, h,
0, 0, graphics.width, graphics.height,
0, 0, (int)(scale * graphics.width), (int)(scale * graphics.height),
0, 0, graphics.width, graphics.height);
deleteTextures(1, tex);
firstFrame.clear();
firstFrame = null;
}
protected void destroyFBOLayer() {
if (threadIsCurrent() && fboLayerCreated) {
deleteFramebuffers(1, glColorFbo);
deleteTextures(2, glColorTex);
deleteRenderbuffers(1, glDepthStencil);
deleteRenderbuffers(1, glDepth);
deleteRenderbuffers(1, glStencil);
deleteFramebuffers(1, glMultiFbo);
deleteRenderbuffers(1, glMultiColor);
deleteRenderbuffers(1, glMultiDepthStencil);
deleteRenderbuffers(1, glMultiDepth);
deleteRenderbuffers(1, glMultiStencil);
}
fboLayerCreated = false;
}
private void createDepthAndStencilBuffer(boolean multisample, int depthBits,
int stencilBits, boolean packed) {
// Creating depth and stencil buffers
if (packed && depthBits == 24 && stencilBits == 8) {
// packed depth+stencil buffer
IntBuffer depthStencilBuf =
multisample ? glMultiDepthStencil : glDepthStencil;
genRenderbuffers(1, depthStencilBuf);
bindRenderbuffer(RENDERBUFFER, depthStencilBuf.get(0));
if (multisample) {
renderbufferStorageMultisample(RENDERBUFFER, numSamples,
DEPTH24_STENCIL8, fboWidth, fboHeight);
} else {
renderbufferStorage(RENDERBUFFER, DEPTH24_STENCIL8,
fboWidth, fboHeight);
}
framebufferRenderbuffer(FRAMEBUFFER, DEPTH_ATTACHMENT, RENDERBUFFER,
depthStencilBuf.get(0));
framebufferRenderbuffer(FRAMEBUFFER, STENCIL_ATTACHMENT, RENDERBUFFER,
depthStencilBuf.get(0));
} else {
// separate depth and stencil buffers
if (0 < depthBits) {
int depthComponent = DEPTH_COMPONENT16;
if (depthBits == 32) {
depthComponent = DEPTH_COMPONENT32;
} else if (depthBits == 24) {
depthComponent = DEPTH_COMPONENT24;
//} else if (depthBits == 16) {
//depthComponent = DEPTH_COMPONENT16;
}
IntBuffer depthBuf = multisample ? glMultiDepth : glDepth;
genRenderbuffers(1, depthBuf);
bindRenderbuffer(RENDERBUFFER, depthBuf.get(0));
if (multisample) {
renderbufferStorageMultisample(RENDERBUFFER, numSamples,
depthComponent, fboWidth, fboHeight);
} else {
renderbufferStorage(RENDERBUFFER, depthComponent,
fboWidth, fboHeight);
}
framebufferRenderbuffer(FRAMEBUFFER, DEPTH_ATTACHMENT,
RENDERBUFFER, depthBuf.get(0));
}
if (0 < stencilBits) {
int stencilIndex = STENCIL_INDEX1;
if (stencilBits == 8) {
stencilIndex = STENCIL_INDEX8;
} else if (stencilBits == 4) {
stencilIndex = STENCIL_INDEX4;
//} else if (stencilBits == 1) {
//stencilIndex = STENCIL_INDEX1;
}
IntBuffer stencilBuf = multisample ? glMultiStencil : glStencil;
genRenderbuffers(1, stencilBuf);
bindRenderbuffer(RENDERBUFFER, stencilBuf.get(0));
if (multisample) {
renderbufferStorageMultisample(RENDERBUFFER, numSamples,
stencilIndex, fboWidth, fboHeight);
} else {
renderbufferStorage(RENDERBUFFER, stencilIndex,
fboWidth, fboHeight);
}
framebufferRenderbuffer(FRAMEBUFFER, STENCIL_ATTACHMENT,
RENDERBUFFER, stencilBuf.get(0));
}
}
}
///////////////////////////////////////////////////////////
// Context | for |
java | dropwizard__dropwizard | dropwizard-core/src/test/java/io/dropwizard/core/ApplicationTest.java | {
"start": 277,
"end": 351
} | class ____ extends Configuration {
}
private static | FakeConfiguration |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/util/FileLock.java | {
"start": 1198,
"end": 3328
} | class ____ {
private static final Logger LOGGER = LoggerFactory.getLogger(FileLock.class);
private final File file;
private final FileChannel channel;
private java.nio.channels.FileLock flock;
public FileLock(File file) throws IOException {
this.file = file;
this.channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.READ,
StandardOpenOption.WRITE);
}
public File file() {
return file;
}
/**
* Lock the file or throw an exception if the lock is already held
*/
public synchronized void lock() throws IOException {
LOGGER.trace("Acquiring lock on {}", file.getAbsolutePath());
flock = channel.lock();
}
/**
* Try to lock the file and return true if the locking succeeds
*/
public synchronized boolean tryLock() throws IOException {
LOGGER.trace("Acquiring lock on {}", file.getAbsolutePath());
try {
// weirdly this method will return null if the lock is held by another
// process, but will throw an exception if the lock is held by this process
// so we have to handle both cases
flock = channel.tryLock();
return flock != null;
} catch (OverlappingFileLockException e) {
return false;
}
}
/**
* Unlock the lock if it is held
*/
public synchronized void unlock() throws IOException {
LOGGER.trace("Releasing lock on {}", file.getAbsolutePath());
if (flock != null) {
flock.release();
}
}
/**
* Destroy this lock, closing the associated FileChannel
*/
public synchronized void destroy() throws IOException {
unlock();
if (file.exists() && file.delete()) {
LOGGER.trace("Deleted {}", file.getAbsolutePath());
}
channel.close();
}
/**
* Unlock the file and close the associated FileChannel
*/
public synchronized void unlockAndClose() throws IOException {
unlock();
channel.close();
}
}
| FileLock |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/BaseSubscriber.java | {
"start": 1049,
"end": 1964
} | class ____ a {@link Subscriber} implementation that lets the user
* perform a {@link #request(long)} and {@link #cancel()} on it directly. As the targeted
* use case is to manually handle requests, the {@link #hookOnSubscribe(Subscription)} and
* {@link #hookOnNext(Object)} hooks are expected to be implemented, but they nonetheless
* default to an unbounded request at subscription time. If you need to define a {@link Context}
* for this {@link BaseSubscriber}, simply override its {@link #currentContext()} method.
* <p>
* Override the other optional hooks {@link #hookOnComplete()},
* {@link #hookOnError(Throwable)} and {@link #hookOnCancel()}
* to customize the base behavior. You also have a termination hook,
* {@link #hookFinally(SignalType)}.
* <p>
* Most of the time, exceptions triggered inside hooks are propagated to
* {@link #onError(Throwable)} (unless there is a fatal exception). The | for |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/expression/LiteralAsParameter.java | {
"start": 634,
"end": 1499
} | class ____<T> implements SelfRenderingExpression {
private final Literal literal;
private final String parameterMarker;
public LiteralAsParameter(Literal literal, String parameterMarker) {
this.literal = literal;
this.parameterMarker = parameterMarker;
}
@Override
public void renderToSql(SqlAppender sqlAppender, SqlAstTranslator<?> walker, SessionFactoryImplementor sessionFactory) {
final Size size = literal.getExpressionType() instanceof SqlTypedMapping sqlTypedMapping
? sqlTypedMapping.toSize()
: null;
literal.getJdbcMapping().getJdbcType().appendWriteExpression(
parameterMarker,
sqlAppender,
sessionFactory.getJdbcServices().getDialect()
);
}
@Override
public JdbcMappingContainer getExpressionType() {
return literal.getExpressionType();
}
public Literal getLiteral() {
return literal;
}
}
| LiteralAsParameter |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java | {
"start": 653,
"end": 1135
} | class ____ {
/**
* A sorted list of all known index versions
*/
public static final NavigableSet<IndexVersion> ALL_VERSIONS = Collections.unmodifiableNavigableSet(
new TreeSet<>(IndexVersions.getAllVersions())
);
/**
* A sorted list of all known index versions that can be written to
*/
public static final NavigableSet<IndexVersion> ALL_WRITE_VERSIONS = ALL_VERSIONS.tailSet(IndexVersions.MINIMUM_COMPATIBLE, true);
}
| KnownIndexVersions |
java | apache__kafka | storage/src/main/java/org/apache/kafka/storage/internals/checkpoint/LazyOffsetCheckpoints.java | {
"start": 1074,
"end": 1914
} | class ____ implements OffsetCheckpoints {
private final Map<String, LazyOffsetCheckpointMap> lazyCheckpointsByLogDir;
public LazyOffsetCheckpoints(Map<String, OffsetCheckpointFile> checkpointsByLogDir) {
lazyCheckpointsByLogDir = checkpointsByLogDir.entrySet().stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> new LazyOffsetCheckpointMap(entry.getValue())));
}
@Override
public Optional<Long> fetch(String logDir, TopicPartition topicPartition) {
LazyOffsetCheckpointMap offsetCheckpointFile = lazyCheckpointsByLogDir.get(logDir);
if (offsetCheckpointFile == null) {
throw new IllegalArgumentException("No checkpoint file for log dir " + logDir);
}
return offsetCheckpointFile.fetch(topicPartition);
}
static | LazyOffsetCheckpoints |
java | apache__camel | core/camel-cloud/src/main/java/org/apache/camel/impl/cloud/HealthyServiceFilter.java | {
"start": 1061,
"end": 1315
} | class ____ implements ServiceFilter {
@Override
public List<ServiceDefinition> apply(Exchange exchange, List<ServiceDefinition> services) {
return services.stream().filter(s -> s.getHealth().isHealthy()).toList();
}
}
| HealthyServiceFilter |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/BridgeMethodResolverTests.java | {
"start": 30425,
"end": 30605
} | interface ____<D extends DomainObjectSuper> {
<T> void doSomething(final D domainObject, final T value);
}
@SuppressWarnings("unused")
private abstract static | IGenericInterface |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleErrorTests.java | {
"start": 800,
"end": 1573
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(ToDoubleTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ToDouble(source, args.get(0));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(
typeErrorMessage(
false,
validPerPosition,
signature,
(v, p) -> "boolean or counter_double or counter_integer or counter_long or datetime or numeric or string"
)
);
}
}
| ToDoubleErrorTests |
java | apache__camel | components/camel-thymeleaf/src/test/java/org/apache/camel/component/thymeleaf/ThymeleafUrlResolverAllParamsTest.java | {
"start": 1852,
"end": 5563
} | class ____ extends ThymeleafAbstractBaseTest {
@Test
public void testThymeleaf() throws Exception {
stubFor(get("/dontcare.html").willReturn(ok(fragment())));
MockEndpoint mock = getMockEndpoint(MOCK_RESULT);
mock.expectedMessageCount(1);
mock.message(0).body().contains(YOU_WILL_NOTIFIED);
mock.message(0).header(ThymeleafConstants.THYMELEAF_TEMPLATE).isNull();
mock.message(0).header(FIRST_NAME).isEqualTo(JANE);
template.request(DIRECT_START, urlTemplateHeaderProcessor);
mock.assertIsSatisfied();
ThymeleafEndpoint thymeleafEndpoint = context.getEndpoint(
"thymeleaf:dontcare?allowTemplateFromHeader=true&allowContextMapAll=true&cacheTimeToLive=500&cacheable=false&encoding=UTF-8&order=1&prefix=&suffix=.html&resolver=URL&templateMode=HTML",
ThymeleafEndpoint.class);
assertAll("properties",
() -> assertNotNull(thymeleafEndpoint),
() -> assertTrue(thymeleafEndpoint.isAllowContextMapAll()),
() -> assertFalse(thymeleafEndpoint.getCacheable()),
() -> assertEquals(CACHE_TIME_TO_LIVE, thymeleafEndpoint.getCacheTimeToLive()),
() -> assertNull(thymeleafEndpoint.getCheckExistence()),
() -> assertEquals(UTF_8_ENCODING, thymeleafEndpoint.getEncoding()),
() -> assertEquals(ExchangePattern.InOut, thymeleafEndpoint.getExchangePattern()),
() -> assertEquals(ORDER, thymeleafEndpoint.getOrder()),
() -> assertEquals("", thymeleafEndpoint.getPrefix()),
() -> assertEquals(ThymeleafResolverType.URL, thymeleafEndpoint.getResolver()),
() -> assertEquals(HTML_SUFFIX, thymeleafEndpoint.getSuffix()),
() -> assertNotNull(thymeleafEndpoint.getTemplateEngine()),
() -> assertEquals(HTML, thymeleafEndpoint.getTemplateMode()));
assertEquals(1, thymeleafEndpoint.getTemplateEngine().getTemplateResolvers().size());
ITemplateResolver resolver = thymeleafEndpoint.getTemplateEngine().getTemplateResolvers().stream().findFirst().get();
assertTrue(resolver instanceof UrlTemplateResolver);
UrlTemplateResolver templateResolver = (UrlTemplateResolver) resolver;
assertAll("templateResolver",
() -> assertFalse(templateResolver.isCacheable()),
() -> assertEquals(CACHE_TIME_TO_LIVE, templateResolver.getCacheTTLMs()),
() -> assertEquals(UTF_8_ENCODING, templateResolver.getCharacterEncoding()),
() -> assertFalse(templateResolver.getCheckExistence()),
() -> assertEquals(ORDER, templateResolver.getOrder()),
() -> assertEquals("", templateResolver.getPrefix()),
() -> assertEquals(HTML_SUFFIX, templateResolver.getSuffix()),
() -> assertEquals(TemplateMode.HTML, templateResolver.getTemplateMode()));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(DIRECT_START)
.to("thymeleaf:dontcare?allowTemplateFromHeader=true&allowContextMapAll=true&cacheTimeToLive=500&cacheable=false&encoding=UTF-8&order=1&prefix=&suffix=.html&resolver=URL&templateMode=HTML")
.to(MOCK_RESULT);
}
};
}
protected String fragment() {
return """
<span th:fragment="test" th:remove="tag">
You will be notified when your order ships.
</span>
""";
}
}
| ThymeleafUrlResolverAllParamsTest |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/OpenTelemetryAnnotationsWebSocketsTest.java | {
"start": 2309,
"end": 18387
} | class ____ {
private static final String CUSTOM_SPAN_BOUNCE_ECHO = "custom.bounce.echo";
private static final String CUSTOM_SPAN_ON_CLOSE = "custom.bounce.close";
private static final String CUSTOM_SPAN_ON_ERROR = "custom.end.error";
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(OtelBounceEndpoint.class, WSClient.class, InMemorySpanExporterProducer.class,
OtelBounceClient.class, Endpoint.class)
.addAsResource(new StringAsset("""
quarkus.otel.bsp.export.timeout=1s
quarkus.otel.bsp.schedule.delay=50
"""), "application.properties"))
.setForcedDependencies(
List.of(Dependency.of("io.quarkus", "quarkus-opentelemetry-deployment", Version.getVersion())));
@TestHTTPResource("/bounce/vm")
URI bounceUri;
@TestHTTPResource("/")
URI baseUri;
@TestHTTPResource("end")
URI endUri;
@Inject
Vertx vertx;
@Inject
InMemorySpanExporter spanExporter;
@Inject
WebSocketConnector<OtelBounceClient> connector;
@BeforeEach
public void resetSpans() {
spanExporter.reset();
OtelBounceEndpoint.connectionId = null;
OtelBounceEndpoint.endpointId = null;
OtelBounceEndpoint.MESSAGES.clear();
OtelBounceClient.MESSAGES.clear();
OtelBounceClient.CLOSED_LATCH = new CountDownLatch(1);
OtelBounceEndpoint.CLOSED_LATCH = new CountDownLatch(1);
}
@Test
public void testServerEndpointTracesOnly() {
assertEquals(0, spanExporter.getFinishedSpanItems().size());
try (WSClient client = new WSClient(vertx)) {
client.connect(new WebSocketConnectOptions(), bounceUri);
var response = client.sendAndAwaitReply("How U Livin'").toString();
assertEquals("How U Livin'", response);
}
waitForTracesToArrive(5);
// out-of-the-box instrumentation - server traces
var initialRequestSpan = getSpanByName("GET /bounce/:grail", SpanKind.SERVER);
var connectionOpenedSpan = getSpanByName("OPEN /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionOpenedSpan));
assertEquals(initialRequestSpan.getSpanId(), connectionOpenedSpan.getLinks().get(0).getSpanContext().getSpanId());
var connectionClosedSpan = getSpanByName("CLOSE /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.connectionId, getConnectionIdAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.endpointId, getEndpointIdAttrVal(connectionClosedSpan));
assertEquals(1, connectionClosedSpan.getLinks().size());
assertEquals(connectionOpenedSpan.getSpanId(), connectionClosedSpan.getLinks().get(0).getSpanContext().getSpanId());
// custom span added as the server endpoint `@OnTextMessage` method is annotated with the @WithSpan
var customBounceServerSpan = getSpanByName(CUSTOM_SPAN_BOUNCE_ECHO, SpanKind.SERVER);
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("code.function.name")))
.endsWith("OtelBounceEndpoint.serverEcho");
// test that it is possible to add SpanAttribute to the onMessage method allowed arguments
assertEquals("How U Livin'",
customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("server-message-callback-param")));
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("connection")))
// WebConnection toString contains endpoint id, path, ...
.contains("bounce-server-endpoint-id")
.contains("/bounce/vm");
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("handshake-request")))
.contains("HandshakeRequest");
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("path-param"))).isEqualTo("vm");
var onCloseSpan = getSpanByName(CUSTOM_SPAN_ON_CLOSE, SpanKind.SERVER);
assertThat(onCloseSpan.getAttributes().get(AttributeKey.stringKey("close-reason"))).contains("code=1000");
}
@Test
public void testClientAndServerEndpointTraces() throws InterruptedException {
var clientConn = connector.baseUri(baseUri).pathParam("grail", "vm").connectAndAwait();
clientConn.sendTextAndAwait("Make It Bun Dem");
// assert client and server called
Awaitility.await().untilAsserted(() -> {
assertEquals(1, OtelBounceEndpoint.MESSAGES.size());
assertEquals("Make It Bun Dem", OtelBounceEndpoint.MESSAGES.get(0));
assertEquals(1, OtelBounceClient.MESSAGES.size());
assertEquals("Make It Bun Dem", OtelBounceClient.MESSAGES.get(0));
});
clientConn.closeAndAwait();
// assert connection closed and client/server were notified
assertTrue(OtelBounceClient.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
assertTrue(OtelBounceEndpoint.CLOSED_LATCH.await(5, TimeUnit.SECONDS));
waitForTracesToArrive(8);
// out-of-the-box instrumentation - server traces
var initialRequestSpan = getSpanByName("GET /bounce/:grail", SpanKind.SERVER);
var connectionOpenedSpan = getSpanByName("OPEN /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionOpenedSpan));
assertEquals(initialRequestSpan.getSpanId(), connectionOpenedSpan.getLinks().get(0).getSpanContext().getSpanId());
var connectionClosedSpan = getSpanByName("CLOSE /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.connectionId, getConnectionIdAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.endpointId, getEndpointIdAttrVal(connectionClosedSpan));
assertEquals(1, connectionClosedSpan.getLinks().size());
assertEquals(connectionOpenedSpan.getSpanId(), connectionClosedSpan.getLinks().get(0).getSpanContext().getSpanId());
// custom span added as the server endpoint `@OnTextMessage` method is annotated with the @WithSpan
var customBounceServerSpan = getSpanByName(CUSTOM_SPAN_BOUNCE_ECHO, SpanKind.SERVER);
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("code.function.name")))
.endsWith("OtelBounceEndpoint.serverEcho");
// test that it is possible to add SpanAttribute to the onMessage method allowed arguments
assertEquals("Make It Bun Dem",
customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("server-message-callback-param")));
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("connection")))
// WebConnection toString contains endpoint id, path, ...
.contains("bounce-server-endpoint-id")
.contains("/bounce/vm");
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("handshake-request")))
.contains("HandshakeRequest");
assertThat(customBounceServerSpan.getAttributes().get(AttributeKey.stringKey("path-param"))).isEqualTo("vm");
var onCloseSpan = getSpanByName(CUSTOM_SPAN_ON_CLOSE, SpanKind.SERVER);
assertThat(onCloseSpan.getAttributes().get(AttributeKey.stringKey("close-reason"))).contains("code=1000");
// out-of-the-box instrumentation - client traces
connectionOpenedSpan = getSpanByName("OPEN /bounce/{grail}", SpanKind.CLIENT);
assertEquals("/bounce/{grail}", getUriAttrVal(connectionOpenedSpan));
assertTrue(connectionOpenedSpan.getLinks().isEmpty());
connectionClosedSpan = getSpanByName("CLOSE /bounce/{grail}", SpanKind.CLIENT);
assertEquals("/bounce/{grail}", getUriAttrVal(connectionClosedSpan));
assertNotNull(getConnectionIdAttrVal(connectionClosedSpan));
assertNotNull(getClientIdAttrVal(connectionClosedSpan));
assertEquals(1, connectionClosedSpan.getLinks().size());
assertEquals(connectionOpenedSpan.getSpanId(), connectionClosedSpan.getLinks().get(0).getSpanContext().getSpanId());
// custom span added as the client endpoint `@OnTextMessage` method is annotated with the @WithSpan
var customBounceClientSpan = getSpanByName(CUSTOM_SPAN_BOUNCE_ECHO, SpanKind.CLIENT);
assertThat(customBounceClientSpan.getAttributes().get(AttributeKey.stringKey("code.function.name")))
.endsWith("OtelBounceClient.clientEcho");
// test that it is possible to add SpanAttribute to the onMessage method allowed arguments
assertEquals("Make It Bun Dem",
customBounceClientSpan.getAttributes().get(AttributeKey.stringKey("client-message-callback-param")));
assertThat(customBounceClientSpan.getAttributes().get(AttributeKey.stringKey("connection")))
.contains("bounce-client-id");
assertThat(customBounceClientSpan.getAttributes().get(AttributeKey.stringKey("handshake-request")))
.contains("HandshakeRequest");
assertThat(customBounceClientSpan.getAttributes().get(AttributeKey.stringKey("path-param"))).contains("vm");
}
@Test
public void testServerTracesWhenErrorOnMessage() {
assertEquals(0, spanExporter.getFinishedSpanItems().size());
try (WSClient client = new WSClient(vertx)) {
client.connect(new WebSocketConnectOptions(), bounceUri);
var response = client.sendAndAwaitReply("It's Alright, Ma").toString();
assertEquals("It's Alright, Ma", response);
response = client.sendAndAwaitReply("I'm Only Bleeding").toString();
assertEquals("I'm Only Bleeding", response);
client.sendAndAwait("throw-exception");
Awaitility.await().atMost(Duration.ofSeconds(5)).until(client::isClosed);
assertEquals(WebSocketCloseStatus.INTERNAL_SERVER_ERROR.code(), client.closeStatusCode());
}
waitForTracesToArrive(7);
// out-of-the-box instrumentation - server traces
var initialRequestSpan = getSpanByName("GET /bounce/:grail", SpanKind.SERVER);
var connectionOpenedSpan = getSpanByName("OPEN /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionOpenedSpan));
assertEquals(initialRequestSpan.getSpanId(), connectionOpenedSpan.getLinks().get(0).getSpanContext().getSpanId());
var connectionClosedSpan = getSpanByName("CLOSE /bounce/:grail", SpanKind.SERVER);
assertEquals("/bounce/:grail", getUriAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.connectionId, getConnectionIdAttrVal(connectionClosedSpan));
assertEquals(OtelBounceEndpoint.endpointId, getEndpointIdAttrVal(connectionClosedSpan));
assertEquals(1, connectionClosedSpan.getLinks().size());
assertEquals(connectionOpenedSpan.getSpanId(), connectionClosedSpan.getLinks().get(0).getSpanContext().getSpanId());
// custom span added as the server endpoint `@OnTextMessage` method is annotated with the @WithSpan
assertThat(getSpansByName(CUSTOM_SPAN_BOUNCE_ECHO, SpanKind.SERVER).toList())
.hasSize(3)
.allMatch(span -> {
var callbackParam = span.getAttributes().get(AttributeKey.stringKey("server-message-callback-param"));
return List.of("throw-exception", "I'm Only Bleeding", "It's Alright, Ma").contains(callbackParam);
})
.allMatch(span -> {
String codeFunctionName = span.getAttributes().get(AttributeKey.stringKey("code.function.name"));
return codeFunctionName != null && codeFunctionName.endsWith("OtelBounceEndpoint.serverEcho");
});
var onCloseSpan = getSpanByName(CUSTOM_SPAN_ON_CLOSE, SpanKind.SERVER);
assertThat(onCloseSpan.getAttributes().get(AttributeKey.stringKey("close-reason")))
.contains("code=1011")
.contains("Failing 'serverEcho' to test behavior when an exception was thrown");
}
@Test
public void testSpanAttributeOnError() {
try (WSClient client = new WSClient(vertx)) {
client.connect(endUri);
client.waitForMessages(1);
assertEquals("ready", client.getMessages().get(0).toString());
client.sendAndAwait("hello");
client.waitForMessages(2);
assertEquals("unauthorized", client.getMessages().get(1).toString());
}
waitForTracesToArrive(4);
// out-of-the-box instrumentation - server traces
var connectionOpenedSpan = getSpanByName("OPEN /end", SpanKind.SERVER);
assertEquals("/end", getUriAttrVal(connectionOpenedSpan));
var getSpan = getSpanByName("GET /end", SpanKind.SERVER);
assertEquals("/end", getUriAttrVal(getSpan));
var connectionClosedSpan = getSpanByName("CLOSE /end", SpanKind.SERVER);
assertEquals("/end", getUriAttrVal(connectionClosedSpan));
// custom span added as the server endpoint `@OnError` method is annotated with the @WithSpan
var onErrorSpan = getSpanByName(CUSTOM_SPAN_ON_ERROR, SpanKind.SERVER);
assertThat(onErrorSpan.getAttributes().get(AttributeKey.stringKey("code.function.name")))
.endsWith("Endpoint.error");
// custom attribute for the error callback argument
assertEquals(UnauthorizedException.class.getName(),
onErrorSpan.getAttributes().get(AttributeKey.stringKey("custom.error")));
}
private String getConnectionIdAttrVal(SpanData connectionOpenedSpan) {
return connectionOpenedSpan
.getAttributes()
.get(AttributeKey.stringKey("connection.id"));
}
private String getClientIdAttrVal(SpanData connectionOpenedSpan) {
return connectionOpenedSpan
.getAttributes()
.get(AttributeKey.stringKey("connection.client.id"));
}
private String getUriAttrVal(SpanData connectionOpenedSpan) {
return connectionOpenedSpan.getAttributes().get(URL_PATH);
}
private String getEndpointIdAttrVal(SpanData connectionOpenedSpan) {
return connectionOpenedSpan
.getAttributes()
.get(AttributeKey.stringKey("connection.endpoint.id"));
}
private void waitForTracesToArrive(int expectedTracesCount) {
Awaitility.await()
.atMost(Duration.ofSeconds(5))
.untilAsserted(() -> assertEquals(expectedTracesCount, spanExporter.getFinishedSpanItems().size(),
() -> "expected " + expectedTracesCount + " spans to arrive, got "
+ spanExporter.getFinishedSpanItems().size() + " spans: "
+ spanExporter.getFinishedSpanItems().toString()));
}
private SpanData getSpanByName(String name, SpanKind kind) {
return getSpansByName(name, kind)
.findFirst()
.orElseThrow(() -> new AssertionError(
"Expected span name '" + name + "' and kind '" + kind + "' not found: "
+ spanExporter.getFinishedSpanItems()));
}
private Stream<SpanData> getSpansByName(String name, SpanKind kind) {
return spanExporter.getFinishedSpanItems()
.stream()
.filter(sd -> name.equals(sd.getName()))
.filter(sd -> sd.getKind() == kind);
}
@WebSocket(path = "/bounce/{grail}", endpointId = "bounce-server-endpoint-id")
public static | OpenTelemetryAnnotationsWebSocketsTest |
java | quarkusio__quarkus | integration-tests/infinispan-client/src/main/java/io/quarkus/it/infinispan/client/CacheSetup.java | {
"start": 1525,
"end": 7282
} | class ____ {
private static final Log log = LogFactory.getLog(CacheSetup.class);
public static final String DEFAULT_CACHE = "default";
public static final String MAGAZINE_CACHE = "magazine";
public static final String BOOKS_CACHE = "books";
public static final String AUTHORS_CACHE = "authors";
public static final String BOOKS_TX_CACHE = "bookstx";
@Inject
RemoteCacheManager cacheManager;
@Inject
@InfinispanClientName("another")
RemoteCacheManager anotherCacheManager;
@Inject
@Remote(AUTHORS_CACHE)
RemoteCache<String, Author> authors;
@Inject
@InfinispanClientName("another")
@Remote(AUTHORS_CACHE)
RemoteCache<String, Author> authorsSite2;
private final Map<String, Book> matches = new ConcurrentHashMap<>();
void onStart(@Observes StartupEvent ev) {
RemoteCache<String, Book> defaultCache = cacheManager.getCache(DEFAULT_CACHE);
RemoteCache<String, Magazine> magazineCache = cacheManager.getCache(MAGAZINE_CACHE);
RemoteCache<String, Book> booksCache = cacheManager.getCache(BOOKS_CACHE);
RemoteCache<String, Book> anotherBooksCache = anotherCacheManager.getCache(BOOKS_CACHE);
RemoteCache<String, Book> txBooksCache = cacheManager.getCache(BOOKS_TX_CACHE);
defaultCache.addClientListener(new EventPrintListener());
ContinuousQuery<String, Book> continuousQuery = Search.getContinuousQuery(defaultCache);
QueryFactory queryFactory = Search.getQueryFactory(defaultCache);
Query query = queryFactory.create("from book_sample.Book where publicationYear > 2011");
ContinuousQueryListener<String, Book> listener = new ContinuousQueryListener<String, Book>() {
@Override
public void resultJoining(String key, Book value) {
log.warn("Adding key: " + key + " for book: " + value);
matches.put(key, value);
}
@Override
public void resultLeaving(String key) {
log.warn("Removing key: " + key);
matches.remove(key);
}
@Override
public void resultUpdated(String key, Book value) {
log.warn("Entry updated: " + key);
}
};
continuousQuery.addContinuousQueryListener(query, listener);
log.info("Added continuous query listener");
Author gMartin = new Author("George", "Martin");
Author sonM = new Author("Son", "Martin");
Author rowling = new Author("J. K. Rowling", "Rowling");
Book hp1Book = new Book("Philosopher's Stone", "Harry Potter and the Philosopher's Stone", 1997,
Collections.singleton(rowling), Type.FANTASY, new BigDecimal("50.99"));
Book hp2Book = new Book("Chamber of Secrets", "Harry Potter and the Chamber of Secrets", 1998,
Collections.singleton(rowling), Type.FANTASY, new BigDecimal("50.99"));
Book hp3Book = new Book("Prisoner of Azkaban", "Harry Potter and the Prisoner of Azkaban", 1999,
Collections.singleton(rowling), Type.FANTASY, new BigDecimal("50.99"));
Book got1Book = new Book("Game of Thrones", "Lots of people perish", 2010, Collections.singleton(gMartin),
Type.FANTASY, new BigDecimal("23.99"));
Book got2Book = new Book("Game of Thrones Path 2", "They win?", 2023,
Collections.singleton(sonM), Type.FANTASY, new BigDecimal("54.99"));
defaultCache.put("book1", got1Book);
defaultCache.put("book2", got2Book);
Magazine mag1 = new Magazine("MAD", YearMonth.of(1952, 10), Collections.singletonList("Blob named Melvin"));
Magazine mag2 = new Magazine("TIME", YearMonth.of(1923, 3),
Arrays.asList("First helicopter", "Change in divorce law", "Adam's Rib movie released",
"German Reparation Payments"));
Magazine map3 = new Magazine("TIME", YearMonth.of(1997, 4),
Arrays.asList("Yep, I'm gay", "Backlash against HMOS", "False Hope on Breast Cancer?"));
magazineCache.put("first-mad", mag1);
magazineCache.put("first-time", mag2);
magazineCache.put("popular-time", map3);
authors.put("aut-1", gMartin);
authors.put("aut-2", sonM);
authorsSite2.put("aut-3", rowling);
booksCache.put("hp-1", hp1Book);
booksCache.put("hp-2", hp2Book);
booksCache.put("hp-3", hp3Book);
anotherBooksCache.put("hp-1", hp1Book);
TransactionManager transactionManager = txBooksCache.getTransactionManager();
if (transactionManager == null) {
throw new IllegalStateException("TransactionManager should not be null");
}
try {
transactionManager.begin();
txBooksCache.put("hp-1", hp1Book);
txBooksCache.put("hp-2", hp2Book);
txBooksCache.put("hp-3", hp3Book);
transactionManager.commit();
} catch (Exception e) {
log.error("Error committing transaction", e);
throw new RuntimeException(e);
}
try {
transactionManager.begin();
txBooksCache.put("got-1", got1Book);
txBooksCache.put("got-2", got2Book);
transactionManager.rollback();
} catch (Exception e) {
log.error("Error rollback transaction", e);
throw new RuntimeException(e);
}
// tx book cache size is 3
if (txBooksCache.size() > 3) {
throw new RuntimeException("Transactions did not work.");
}
}
public Map<String, Book> getMatches() {
return matches;
}
@ClientListener
static | CacheSetup |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/functions/UserDefinedFunctionHelperTest.java | {
"start": 15819,
"end": 16029
} | class ____ extends AsyncScalarFunction {
private void eval(CompletableFuture<Integer> future, int i) {}
}
/** Implementation method is private. */
public static | PrivateMethodAsyncScalarFunction |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/core/DockerException.java | {
"start": 693,
"end": 840
} | class ____ docker exceptions.
*
* @author Moritz Halbritter
* @author Andy Wilkinson
* @author Phillip Webb
* @since 3.1.0
*/
public abstract | for |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/standard/TokenKind.java | {
"start": 746,
"end": 2137
} | enum ____ {
// ordered by priority - operands first
LITERAL_INT,
LITERAL_LONG,
LITERAL_HEXINT,
LITERAL_HEXLONG,
LITERAL_STRING,
LITERAL_REAL,
LITERAL_REAL_FLOAT,
LPAREN("("),
RPAREN(")"),
COMMA(","),
IDENTIFIER,
COLON(":"),
HASH("#"),
RSQUARE("]"),
LSQUARE("["),
LCURLY("{"),
RCURLY("}"),
DOT("."),
PLUS("+"),
STAR("*"),
MINUS("-"),
SELECT_FIRST("^["),
SELECT_LAST("$["),
QMARK("?"),
PROJECT("!["),
DIV("/"),
GE(">="),
GT(">"),
LE("<="),
LT("<"),
EQ("=="),
NE("!="),
MOD("%"),
NOT("!"),
ASSIGN("="),
INSTANCEOF("instanceof"),
MATCHES("matches"),
BETWEEN("between"),
SELECT("?["),
POWER("^"),
ELVIS("?:"),
SAFE_NAVI("?."),
BEAN_REF("@"),
FACTORY_BEAN_REF("&"),
SYMBOLIC_OR("||"),
SYMBOLIC_AND("&&"),
INC("++"),
DEC("--");
final char[] tokenChars;
private final boolean hasPayload; // is there more to this token than simply the kind
private TokenKind(String tokenString) {
this.tokenChars = tokenString.toCharArray();
this.hasPayload = (this.tokenChars.length == 0);
}
private TokenKind() {
this("");
}
@Override
public String toString() {
return (name() + (this.tokenChars.length !=0 ? "(" + new String(this.tokenChars) +")" : ""));
}
public boolean hasPayload() {
return this.hasPayload;
}
public int getLength() {
return this.tokenChars.length;
}
}
| TokenKind |
java | quarkusio__quarkus | independent-projects/tools/analytics-common/src/main/java/io/quarkus/analytics/dto/config/NoopRemoteConfig.java | {
"start": 173,
"end": 842
} | class ____ implements AnalyticsRemoteConfig {
private static final Duration DONT_CHECK_ANYMORE = Duration.ofDays(365);
public static final NoopRemoteConfig INSTANCE = new NoopRemoteConfig();
private NoopRemoteConfig() {
// singleton
}
@Override
public boolean isActive() {
return false;
}
@Override
public List<String> getDenyAnonymousIds() {
return Collections.emptyList();
}
@Override
public List<String> getDenyQuarkusVersions() {
return Collections.emptyList();
}
@Override
public Duration getRefreshInterval() {
return DONT_CHECK_ANYMORE;
}
}
| NoopRemoteConfig |
java | apache__flink | flink-streaming-java/src/main/java/org/apache/flink/streaming/api/datastream/MultipleConnectedStreams.java | {
"start": 1381,
"end": 1909
} | class ____ {
protected final StreamExecutionEnvironment environment;
public MultipleConnectedStreams(StreamExecutionEnvironment env) {
this.environment = requireNonNull(env);
}
public StreamExecutionEnvironment getExecutionEnvironment() {
return environment;
}
public <OUT> SingleOutputStreamOperator<OUT> transform(
AbstractMultipleInputTransformation<OUT> transform) {
return new SingleOutputStreamOperator<>(environment, transform);
}
}
| MultipleConnectedStreams |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/authn/FailedAuthenticationResponseBuilderTests.java | {
"start": 845,
"end": 2715
} | class ____ extends IdpSamlTestCase {
private SamlIdentityProvider idp;
private XmlValidator validator;
private SamlFactory samlFactory;
@Before
public void setupSaml() throws Exception {
SamlInit.initialize();
samlFactory = new SamlFactory();
validator = new XmlValidator("saml-schema-protocol-2.0.xsd");
idp = mock(SamlIdentityProvider.class);
when(idp.getEntityId()).thenReturn("https://cloud.elastic.co/saml/idp");
}
public void testSimpleErrorResponseIsValid() throws Exception {
final Clock clock = Clock.systemUTC();
final FailedAuthenticationResponseMessageBuilder builder = new FailedAuthenticationResponseMessageBuilder(samlFactory, clock, idp);
final Response response = builder.setAcsUrl(
"https://" + randomAlphaOfLengthBetween(4, 8) + "." + randomAlphaOfLengthBetween(4, 8) + "/saml/acs"
).setPrimaryStatusCode(StatusCode.REQUESTER).setInResponseTo(randomAlphaOfLength(12)).build();
final String xml = super.toString(response);
validator.validate(xml);
}
public void testErrorResponseWithCodeIsValid() throws Exception {
final Clock clock = Clock.systemUTC();
final FailedAuthenticationResponseMessageBuilder builder = new FailedAuthenticationResponseMessageBuilder(samlFactory, clock, idp);
final Response response = builder.setAcsUrl(
"https://" + randomAlphaOfLengthBetween(4, 8) + "." + randomAlphaOfLengthBetween(4, 8) + "/saml/acs"
)
.setPrimaryStatusCode(StatusCode.REQUESTER)
.setInResponseTo(randomAlphaOfLength(12))
.setSecondaryStatusCode(StatusCode.INVALID_NAMEID_POLICY)
.build();
final String xml = super.toString(response);
validator.validate(xml);
}
}
| FailedAuthenticationResponseBuilderTests |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/ValueAnnotationsDeserTest.java | {
"start": 1003,
"end": 1334
} | class ____ implements RootString
{
final String _contents;
public RootStringImpl(String x) { _contents = x; }
@Override
public String contents() { return _contents; }
public String contents2() { return _contents; }
}
@JsonDeserialize(as=RootInterfaceImpl.class)
| RootStringImpl |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/rsocket/service/DestinationVariableArgumentResolver.java | {
"start": 1196,
"end": 1975
} | class ____ implements RSocketServiceArgumentResolver {
@Override
public boolean resolve(
@Nullable Object argument, MethodParameter parameter, RSocketRequestValues.Builder requestValues) {
DestinationVariable annot = parameter.getParameterAnnotation(DestinationVariable.class);
if (annot == null) {
return false;
}
if (argument != null) {
if (argument instanceof Collection<?> collection) {
collection.forEach(requestValues::addRouteVariable);
return true;
}
else if (argument instanceof Object[] arguments) {
for (Object variable : arguments) {
requestValues.addRouteVariable(variable);
}
return true;
}
else {
requestValues.addRouteVariable(argument);
}
}
return true;
}
}
| DestinationVariableArgumentResolver |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/Spr11202Tests.java | {
"start": 1840,
"end": 1879
} | class ____ {
}
protected static | Wrapper |
java | dropwizard__dropwizard | dropwizard-configuration/src/main/java/io/dropwizard/configuration/EnvironmentVariableSubstitutor.java | {
"start": 238,
"end": 1992
} | class ____ extends StringSubstitutor {
/**
* Constructs a new environment variable substitutor with strict checking and no substitution done in variables.
*/
public EnvironmentVariableSubstitutor() {
this(true, false);
}
/**
* Constructs a new environment variable substitutor with no substitution done in variables.
*
* @param strict whether to use strict variable checking
*/
public EnvironmentVariableSubstitutor(boolean strict) {
this(strict, false);
}
/**
* Constructs a new environment variable substitutor.
*
* @param strict {@code true} if looking up undefined environment variables should throw a
* {@link UndefinedEnvironmentVariableException}, {@code false} otherwise.
* @param substitutionInVariables a flag whether substitution is done in variable names.
* @see org.apache.commons.text.StringSubstitutor#setEnableSubstitutionInVariables(boolean)
*/
public EnvironmentVariableSubstitutor(boolean strict, boolean substitutionInVariables) {
super(System::getenv);
this.setEnableUndefinedVariableException(strict);
this.setEnableSubstitutionInVariables(substitutionInVariables);
}
@Override
protected boolean substitute(TextStringBuilder buf, int offset, int length) {
try {
return super.substitute(buf, offset, length);
} catch (IllegalArgumentException e) {
if (e.getMessage() != null && e.getMessage().contains("Cannot resolve variable")) {
throw new UndefinedEnvironmentVariableException(e.getMessage());
}
throw e;
}
}
}
| EnvironmentVariableSubstitutor |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/converter/DefaultArgumentConverter.java | {
"start": 1934,
"end": 3755
} | class ____ implements ArgumentConverter {
public static final DefaultArgumentConverter INSTANCE = new DefaultArgumentConverter();
private DefaultArgumentConverter() {
}
@Override
public final @Nullable Object convert(@Nullable Object source, ParameterContext context) {
Class<?> targetType = context.getParameter().getType();
ClassLoader classLoader = getClassLoader(context.getDeclaringExecutable().getDeclaringClass());
return convert(source, targetType, classLoader);
}
@Override
public final @Nullable Object convert(@Nullable Object source, FieldContext context)
throws ArgumentConversionException {
Class<?> targetType = context.getField().getType();
ClassLoader classLoader = getClassLoader(context.getField().getDeclaringClass());
return convert(source, targetType, classLoader);
}
public final @Nullable Object convert(@Nullable Object source, Class<?> targetType, ClassLoader classLoader) {
if (source == null) {
if (targetType.isPrimitive()) {
throw new ArgumentConversionException(
"Cannot convert null to primitive value of type " + targetType.getTypeName());
}
return null;
}
if (ReflectionUtils.isAssignableTo(source, targetType)) {
return source;
}
if (source instanceof String string) {
try {
return convert(string, targetType, classLoader);
}
catch (ConversionException ex) {
throw new ArgumentConversionException(ex.getMessage(), ex);
}
}
throw new ArgumentConversionException("No built-in converter for source type %s and target type %s".formatted(
source.getClass().getTypeName(), targetType.getTypeName()));
}
@Nullable
Object convert(@Nullable String source, Class<?> targetType, ClassLoader classLoader) {
return ConversionSupport.convert(source, targetType, classLoader);
}
}
| DefaultArgumentConverter |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/VertxHttpEndpointBuilderFactory.java | {
"start": 31933,
"end": 35592
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final VertxHttpHeaderNameBuilder INSTANCE = new VertxHttpHeaderNameBuilder();
/**
* The http method.
*
* The option is a: {@code io.vertx.core.http.HttpMethod} type.
*
* Group: producer
*
* @return the name of the header {@code HttpMethod}.
*/
public String httpMethod() {
return "CamelHttpMethod";
}
/**
* The HTTP response code from the external server.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code HttpResponseCode}.
*/
public String httpResponseCode() {
return "CamelHttpResponseCode";
}
/**
* The HTTP response text from the external server.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code HttpResponseText}.
*/
public String httpResponseText() {
return "CamelHttpResponseText";
}
/**
* The HTTP content type. Is set on both the IN and OUT message to
* provide a content type, such as text/html.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code Content-Type}.
*/
public String contentType() {
return "Content-Type";
}
/**
* URI parameters. Will override existing URI parameters set directly on
* the endpoint.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code HttpQuery}.
*/
public String httpQuery() {
return "CamelHttpQuery";
}
/**
* URI to call. Will override the existing URI set directly on the
* endpoint. This URI is the URI of the http server to call. Its not the
* same as the Camel endpoint URI, where you can configure endpoint
* options such as security etc. This header does not support that, its
* only the URI of the http server.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code HttpUri}.
*/
public String httpUri() {
return "CamelHttpUri";
}
/**
* Request URI's path, the header will be used to build the request URI
* with the HTTP_URI.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code HttpPath}.
*/
public String httpPath() {
return "CamelHttpPath";
}
/**
* The HTTP content encoding. Is set to provide a content encoding, such
* as gzip.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code Content-Encoding}.
*/
public String contentEncoding() {
return "Content-Encoding";
}
}
static VertxHttpEndpointBuilder endpointBuilder(String componentName, String path) {
| VertxHttpHeaderNameBuilder |
java | apache__camel | core/camel-main/src/generated/java/org/apache/camel/main/SpringCloudConfigConfigurationConfigurer.java | {
"start": 709,
"end": 8419
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.vault.SpringCloudConfigConfiguration target = (org.apache.camel.vault.SpringCloudConfigConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "awsvaultconfiguration":
case "awsVaultConfiguration": target.setAwsVaultConfiguration(property(camelContext, org.apache.camel.vault.AwsVaultConfiguration.class, value)); return true;
case "azurevaultconfiguration":
case "azureVaultConfiguration": target.setAzureVaultConfiguration(property(camelContext, org.apache.camel.vault.AzureVaultConfiguration.class, value)); return true;
case "cyberarkvaultconfiguration":
case "cyberArkVaultConfiguration": target.setCyberArkVaultConfiguration(property(camelContext, org.apache.camel.vault.CyberArkVaultConfiguration.class, value)); return true;
case "gcpvaultconfiguration":
case "gcpVaultConfiguration": target.setGcpVaultConfiguration(property(camelContext, org.apache.camel.vault.GcpVaultConfiguration.class, value)); return true;
case "hashicorpvaultconfiguration":
case "hashicorpVaultConfiguration": target.setHashicorpVaultConfiguration(property(camelContext, org.apache.camel.vault.HashicorpVaultConfiguration.class, value)); return true;
case "ibmsecretsmanagervaultconfiguration":
case "iBMSecretsManagerVaultConfiguration": target.setIBMSecretsManagerVaultConfiguration(property(camelContext, org.apache.camel.vault.IBMSecretsManagerVaultConfiguration.class, value)); return true;
case "kubernetesconfigmapvaultconfiguration":
case "kubernetesConfigMapVaultConfiguration": target.setKubernetesConfigMapVaultConfiguration(property(camelContext, org.apache.camel.vault.KubernetesConfigMapVaultConfiguration.class, value)); return true;
case "kubernetesvaultconfiguration":
case "kubernetesVaultConfiguration": target.setKubernetesVaultConfiguration(property(camelContext, org.apache.camel.vault.KubernetesVaultConfiguration.class, value)); return true;
case "label": target.setLabel(property(camelContext, java.lang.String.class, value)); return true;
case "password": target.setPassword(property(camelContext, java.lang.String.class, value)); return true;
case "profile": target.setProfile(property(camelContext, java.lang.String.class, value)); return true;
case "refreshenabled":
case "refreshEnabled": target.setRefreshEnabled(property(camelContext, boolean.class, value)); return true;
case "refreshperiod":
case "refreshPeriod": target.setRefreshPeriod(property(camelContext, long.class, value)); return true;
case "springcloudconfigconfiguration":
case "springCloudConfigConfiguration": target.setSpringCloudConfigConfiguration(property(camelContext, org.apache.camel.vault.SpringCloudConfigConfiguration.class, value)); return true;
case "token": target.setToken(property(camelContext, java.lang.String.class, value)); return true;
case "uris": target.setUris(property(camelContext, java.lang.String.class, value)); return true;
case "username": target.setUsername(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "awsvaultconfiguration":
case "awsVaultConfiguration": return org.apache.camel.vault.AwsVaultConfiguration.class;
case "azurevaultconfiguration":
case "azureVaultConfiguration": return org.apache.camel.vault.AzureVaultConfiguration.class;
case "cyberarkvaultconfiguration":
case "cyberArkVaultConfiguration": return org.apache.camel.vault.CyberArkVaultConfiguration.class;
case "gcpvaultconfiguration":
case "gcpVaultConfiguration": return org.apache.camel.vault.GcpVaultConfiguration.class;
case "hashicorpvaultconfiguration":
case "hashicorpVaultConfiguration": return org.apache.camel.vault.HashicorpVaultConfiguration.class;
case "ibmsecretsmanagervaultconfiguration":
case "iBMSecretsManagerVaultConfiguration": return org.apache.camel.vault.IBMSecretsManagerVaultConfiguration.class;
case "kubernetesconfigmapvaultconfiguration":
case "kubernetesConfigMapVaultConfiguration": return org.apache.camel.vault.KubernetesConfigMapVaultConfiguration.class;
case "kubernetesvaultconfiguration":
case "kubernetesVaultConfiguration": return org.apache.camel.vault.KubernetesVaultConfiguration.class;
case "label": return java.lang.String.class;
case "password": return java.lang.String.class;
case "profile": return java.lang.String.class;
case "refreshenabled":
case "refreshEnabled": return boolean.class;
case "refreshperiod":
case "refreshPeriod": return long.class;
case "springcloudconfigconfiguration":
case "springCloudConfigConfiguration": return org.apache.camel.vault.SpringCloudConfigConfiguration.class;
case "token": return java.lang.String.class;
case "uris": return java.lang.String.class;
case "username": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.vault.SpringCloudConfigConfiguration target = (org.apache.camel.vault.SpringCloudConfigConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "awsvaultconfiguration":
case "awsVaultConfiguration": return target.getAwsVaultConfiguration();
case "azurevaultconfiguration":
case "azureVaultConfiguration": return target.getAzureVaultConfiguration();
case "cyberarkvaultconfiguration":
case "cyberArkVaultConfiguration": return target.getCyberArkVaultConfiguration();
case "gcpvaultconfiguration":
case "gcpVaultConfiguration": return target.getGcpVaultConfiguration();
case "hashicorpvaultconfiguration":
case "hashicorpVaultConfiguration": return target.getHashicorpVaultConfiguration();
case "ibmsecretsmanagervaultconfiguration":
case "iBMSecretsManagerVaultConfiguration": return target.getIBMSecretsManagerVaultConfiguration();
case "kubernetesconfigmapvaultconfiguration":
case "kubernetesConfigMapVaultConfiguration": return target.getKubernetesConfigMapVaultConfiguration();
case "kubernetesvaultconfiguration":
case "kubernetesVaultConfiguration": return target.getKubernetesVaultConfiguration();
case "label": return target.getLabel();
case "password": return target.getPassword();
case "profile": return target.getProfile();
case "refreshenabled":
case "refreshEnabled": return target.isRefreshEnabled();
case "refreshperiod":
case "refreshPeriod": return target.getRefreshPeriod();
case "springcloudconfigconfiguration":
case "springCloudConfigConfiguration": return target.getSpringCloudConfigConfiguration();
case "token": return target.getToken();
case "uris": return target.getUris();
case "username": return target.getUsername();
default: return null;
}
}
}
| SpringCloudConfigConfigurationConfigurer |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/aggregator/AggregateExpressionSizeFallbackTest.java | {
"start": 1094,
"end": 2130
} | class ____ extends ContextTestSupport {
@Test
public void testAggregateExpressionSizeFallback() throws Exception {
getMockEndpoint("mock:aggregated").expectedBodiesReceived("A+B+C");
Map<String, Object> headers = new HashMap<>();
headers.put("id", 123);
template.sendBodyAndHeaders("direct:start", "A", headers);
template.sendBodyAndHeaders("direct:start", "B", headers);
template.sendBodyAndHeaders("direct:start", "C", headers);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").aggregate(header("id"), new BodyInAggregatingStrategy())
// if no mySize header it will fallback to the 3 in size
.completionSize(header("mySize")).completionSize(3).to("mock:aggregated");
}
};
}
}
| AggregateExpressionSizeFallbackTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/SealedTypesWithTypedDeserializationTest.java | {
"start": 1130,
"end": 1415
} | class ____ extends Animal
{
public int boneCount;
@JsonCreator
public Dog(@JsonProperty("name") String name) {
super(name);
}
public void setBoneCount(int i) { boneCount = i; }
}
@JsonTypeName("kitty")
static final | Dog |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/StubMvcResult.java | {
"start": 1106,
"end": 3221
} | class ____ implements MvcResult {
private MockHttpServletRequest request;
private Object handler;
private HandlerInterceptor[] interceptors;
private Exception resolvedException;
private ModelAndView mav;
private FlashMap flashMap;
private MockHttpServletResponse response;
public StubMvcResult(MockHttpServletRequest request,
Object handler,
HandlerInterceptor[] interceptors,
Exception resolvedException,
ModelAndView mav,
FlashMap flashMap,
MockHttpServletResponse response) {
this.request = request;
this.handler = handler;
this.interceptors = interceptors;
this.resolvedException = resolvedException;
this.mav = mav;
this.flashMap = flashMap;
this.response = response;
}
@Override
public MockHttpServletRequest getRequest() {
return request;
}
@Override
public Object getHandler() {
return handler;
}
@Override
public HandlerInterceptor[] getInterceptors() {
return interceptors;
}
@Override
public Exception getResolvedException() {
return resolvedException;
}
@Override
public ModelAndView getModelAndView() {
return mav;
}
@Override
public FlashMap getFlashMap() {
return flashMap;
}
@Override
public MockHttpServletResponse getResponse() {
return response;
}
public ModelAndView getMav() {
return mav;
}
public void setMav(ModelAndView mav) {
this.mav = mav;
}
public void setRequest(MockHttpServletRequest request) {
this.request = request;
}
public void setHandler(Object handler) {
this.handler = handler;
}
public void setInterceptors(HandlerInterceptor[] interceptors) {
this.interceptors = interceptors;
}
public void setResolvedException(Exception resolvedException) {
this.resolvedException = resolvedException;
}
public void setFlashMap(FlashMap flashMap) {
this.flashMap = flashMap;
}
public void setResponse(MockHttpServletResponse response) {
this.response = response;
}
@Override
public @Nullable Object getAsyncResult() {
return null;
}
@Override
public @Nullable Object getAsyncResult(long timeToWait) {
return null;
}
}
| StubMvcResult |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/RemoteSyncHandler.java | {
"start": 795,
"end": 12155
} | class ____ implements Handler<HttpServerRequest> {
public static final String QUARKUS_PASSWORD = "X-Quarkus-Password";
private static final Logger log = Logger.getLogger(RemoteSyncHandler.class);
public static final String APPLICATION_QUARKUS = "application/quarkus-live-reload";
public static final String QUARKUS_SESSION = "X-Quarkus-Session";
public static final String QUARKUS_ERROR = "X-Quarkus-Error";
public static final String QUARKUS_SESSION_COUNT = "X-Quarkus-Count";
public static final String CONNECT = "/connect";
public static final String DEV = "/dev";
public static final String PROBE = "/probe"; //used to check that the server is back up after restart
final String password;
final Handler<HttpServerRequest> next;
final HotReplacementContext hotReplacementContext;
final String rootPath;
//all these are static to allow the handler to be recreated on hot reload
//which makes lifecycle management a lot easier
static volatile String currentSession;
//incrementing counter to prevent replay attacks
static volatile int currentSessionCounter;
static volatile long currentSessionTimeout;
static volatile Throwable remoteProblem;
static volatile boolean checkForChanges;
public RemoteSyncHandler(String password, Handler<HttpServerRequest> next, HotReplacementContext hotReplacementContext,
String rootPath) {
this.password = password;
this.next = next;
this.hotReplacementContext = hotReplacementContext;
this.rootPath = rootPath;
}
public static void doPreScan() {
if (currentSession == null) {
return;
}
synchronized (RemoteSyncHandler.class) {
checkForChanges = true;
//if there is a current dev request this will unblock it
RemoteSyncHandler.class.notifyAll();
try {
RemoteSyncHandler.class.wait(30000);
} catch (InterruptedException e) {
log.error("interrupted", e);
}
}
}
@Override
public void handle(HttpServerRequest event) {
long time = System.currentTimeMillis();
if (time > currentSessionTimeout) {
currentSession = null;
currentSessionCounter = 0;
}
final String type = event.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (APPLICATION_QUARKUS.equals(type)) {
currentSessionTimeout = time + 60000;
VertxCoreRecorder.getVertx().get().executeBlocking(new Callable<Void>() {
@Override
public Void call() {
handleRequest(event);
return null;
}
});
return;
}
next.handle(event);
}
private void handleRequest(HttpServerRequest event) {
if (event.method().equals(HttpMethod.PUT)) {
handlePut(event);
} else if (event.method().equals(HttpMethod.DELETE)) {
handleDelete(event);
} else if (event.method().equals(HttpMethod.POST)) {
if (event.path().endsWith(DEV)) {
handleDev(event);
} else if (event.path().endsWith(CONNECT)) {
handleConnect(event);
} else if (event.path().endsWith(PROBE)) {
event.response().end();
} else {
event.response().putHeader(QUARKUS_ERROR, "Unknown path " + event.path()
+ " make sure your remote dev URL is pointing to the context root for your Quarkus instance, and not to a sub path.")
.setStatusCode(404).end();
}
} else {
event.response()
.putHeader(QUARKUS_ERROR, "Unknown method " + event.method() + " this is not a valid remote dev request")
.setStatusCode(405).end();
}
}
private void handleDev(HttpServerRequest event) {
event.bodyHandler(new Handler<Buffer>() {
@Override
public void handle(Buffer b) {
if (checkSession(event, b.getBytes())) {
return;
}
VertxCoreRecorder.getVertx().get().executeBlocking(new Callable<Void>() {
@Override
public Void call() {
try {
Throwable problem = (Throwable) new ObjectInputStream(new ByteArrayInputStream(b.getBytes()))
.readObject();
//update the problem if it has changed
if (problem != null || remoteProblem != null) {
remoteProblem = problem;
hotReplacementContext.setRemoteProblem(problem);
}
synchronized (RemoteSyncHandler.class) {
RemoteSyncHandler.class.notifyAll();
RemoteSyncHandler.class.wait(10000);
if (checkForChanges) {
checkForChanges = false;
event.response().setStatusCode(200);
} else {
event.response().setStatusCode(204);
}
event.response().end();
}
} catch (RejectedExecutionException e) {
//everything is shut down
//likely in the middle of a restart
event.connection().close();
} catch (Exception e) {
log.error("Connect failed", e);
event.response().setStatusCode(500).end();
}
return null;
}
});
}
}).exceptionHandler(new Handler<Throwable>() {
@Override
public void handle(Throwable t) {
log.error("dev request failed", t);
event.response().setStatusCode(500).end();
}
}).resume();
}
private void handleConnect(HttpServerRequest event) {
event.bodyHandler(new Handler<Buffer>() {
@Override
public void handle(Buffer b) {
try {
String rp = event.headers().get(QUARKUS_PASSWORD);
String bodyHash = HashUtil.sha256(b.getBytes());
String compare = HashUtil.sha256(bodyHash + password);
if (!compare.equals(rp)) {
log.error("Incorrect password");
event.response().putHeader(QUARKUS_ERROR, "Incorrect password").setStatusCode(401).end();
return;
}
SecureRandom r = new SecureRandom();
byte[] sessionId = new byte[40];
r.nextBytes(sessionId);
currentSession = Base64.getEncoder().encodeToString(sessionId);
currentSessionCounter = 0;
RemoteDevState state = (RemoteDevState) new ObjectInputStream(new ByteArrayInputStream(b.getBytes()))
.readObject();
remoteProblem = state.getAugmentProblem();
if (state.getAugmentProblem() != null) {
hotReplacementContext.setRemoteProblem(state.getAugmentProblem());
}
Set<String> files = hotReplacementContext.syncState(state.getFileHashes());
event.response().headers().set(QUARKUS_SESSION, currentSession);
event.response().end(String.join(";", files));
} catch (Exception e) {
log.error("Connect failed", e);
event.response().setStatusCode(500).end();
}
}
}).exceptionHandler(new Handler<Throwable>() {
@Override
public void handle(Throwable t) {
log.error("Connect failed", t);
event.response().setStatusCode(500).end();
}
}).resume();
}
private void handlePut(HttpServerRequest event) {
event.bodyHandler(new Handler<Buffer>() {
@Override
public void handle(Buffer buffer) {
if (checkSession(event, buffer.getBytes())) {
return;
}
try {
String path = stripRootPath(event.path());
hotReplacementContext.updateFile(path, buffer.getBytes());
} catch (Exception e) {
log.error("Failed to update file", e);
}
event.response().end();
}
}).exceptionHandler(new Handler<Throwable>() {
@Override
public void handle(Throwable error) {
log.error("Failed writing live reload data", error);
event.response().setStatusCode(500);
event.response().end();
}
}).resume();
}
private String stripRootPath(String path) {
return path.startsWith(rootPath)
? path.substring(rootPath.length())
: path;
}
private void handleDelete(HttpServerRequest event) {
if (checkSession(event, event.path().getBytes(StandardCharsets.UTF_8)))
return;
hotReplacementContext.updateFile(event.path(), null);
event.response().end();
}
private boolean checkSession(HttpServerRequest event, byte[] data) {
String ses = event.headers().get(QUARKUS_SESSION);
String sessionCount = event.headers().get(QUARKUS_SESSION_COUNT);
if (sessionCount == null) {
log.error("No session count provided");
//not really sure what status code makes sense here
//Non-Authoritative Information seems as good as any
event.response().setStatusCode(203).end();
return true;
}
int sc = Integer.parseInt(sessionCount);
if (!Objects.equals(ses, currentSession) ||
sc <= currentSessionCounter) {
log.error("Invalid session");
//not really sure what status code makes sense here
//Non-Authoritative Information seems as good as any
event.response().setStatusCode(203).end();
return true;
}
currentSessionCounter = sc;
String dataHash = "";
if (data != null) {
dataHash = HashUtil.sha256(data);
}
String rp = event.headers().get(QUARKUS_PASSWORD);
String compare = HashUtil.sha256(dataHash + ses + sc + password);
if (!compare.equals(rp)) {
log.error("Incorrect password");
event.response().setStatusCode(401).end();
return true;
}
return false;
}
public void close() {
synchronized (RemoteSyncHandler.class) {
RemoteSyncHandler.class.notifyAll();
}
}
}
| RemoteSyncHandler |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/distributed/distro/entity/DistroData.java | {
"start": 784,
"end": 1572
} | class ____ {
private DistroKey distroKey;
private DataOperation type;
private byte[] content;
public DistroData() {
}
public DistroData(DistroKey distroKey, byte[] content) {
this.distroKey = distroKey;
this.content = content;
}
public DistroKey getDistroKey() {
return distroKey;
}
public void setDistroKey(DistroKey distroKey) {
this.distroKey = distroKey;
}
public DataOperation getType() {
return type;
}
public void setType(DataOperation type) {
this.type = type;
}
public byte[] getContent() {
return content;
}
public void setContent(byte[] content) {
this.content = content;
}
}
| DistroData |
java | spring-projects__spring-data-jpa | spring-data-envers/src/test/java/org/springframework/data/envers/sample/QCountry.java | {
"start": 1007,
"end": 1078
} | class ____ Country domain.
*
* @author Dmytro Iaroslavskyi
*/
public | for |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/support/logging/Resources.java | {
"start": 669,
"end": 749
} | class ____ simplify access to resources through the classloader.
*/
public final | to |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/events/standard/StandardCdiSupportTest.java | {
"start": 1155,
"end": 2011
} | class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@ExtendWith( Monitor.Resetter.class )
@CdiContainer(beanClasses = {Monitor.class, TheListener.class})
@ServiceRegistry(resolvableSettings = @ServiceRegistry.ResolvableSetting(
settingName = CDI_BEAN_MANAGER,
resolver = CdiContainerLinker.StandardResolver.class
))
@DomainModel(annotatedClasses = TheEntity.class)
@SessionFactory
public void testIt(SessionFactoryScope factoryScope) {
factoryScope.getSessionFactory();
// The CDI bean should have been built immediately...
assertTrue( Monitor.wasInstantiated() );
assertEquals( 0, Monitor.currentCount() );
factoryScope.inTransaction( (session) -> {
session.persist( new TheEntity( 1 ) );
} );
assertEquals( 1, Monitor.currentCount() );
}
}
| StandardCdiSupportTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterRpcSingleNS.java | {
"start": 1908,
"end": 2077
} | interface ____ the {@link Router} implemented by
* {@link RouterRpcServer}.
* Tests covering the functionality of RouterRPCServer with
* single nameService.
*/
public | of |
java | apache__camel | components/camel-gson/src/test/java/org/apache/camel/component/gson/GsonDataFormatTest.java | {
"start": 1282,
"end": 2255
} | class ____ {
@Mock
private Exchange exchange;
@Mock
private Message message;
@BeforeEach
public void setup() {
when(exchange.getIn()).thenReturn(message);
}
@Test
public void testString() throws Exception {
testJson("\"A string\"", "A string");
}
@Test
public void testMap() throws Exception {
testJson("{value=123}", Collections.singletonMap("value", 123.0));
}
@Test
public void testList() throws Exception {
testJson("[{value=123}]", Collections.singletonList(Collections.singletonMap("value", 123.0)));
}
private void testJson(String json, Object expected) throws Exception {
Object unmarshalled;
try (GsonDataFormat gsonDataFormat = new GsonDataFormat()) {
gsonDataFormat.doStart();
unmarshalled = gsonDataFormat.unmarshal(exchange, json);
assertEquals(expected, unmarshalled);
}
}
}
| GsonDataFormatTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/parser/XmlLogEventParserTest.java | {
"start": 1078,
"end": 5305
} | class ____ extends LogEventParserTest {
private XmlLogEventParser parser;
private static final String XML =
"<Event xmlns=\"http://logging.apache.org/log4j/2.0/events\"\n" + " timeMillis=\"1493121664118\"\n"
+ " level=\"INFO\"\n"
+ " loggerName=\"HelloWorld\"\n"
+ " endOfBatch=\"false\"\n"
+ " thread=\"main\"\n"
+ " loggerFqcn=\"org.apache.logging.log4j.spi.AbstractLogger\"\n"
+ " threadId=\"1\"\n"
+ " threadPriority=\"5\">\n"
+ " <Instant epochSecond=\"1493121664\" nanoOfSecond=\"118000000\"/>\n"
+ " <Marker name=\"child\">\n"
+ " <Parents>\n"
+ " <Marker name=\"parent\">\n"
+ " <Parents>\n"
+ " <Marker name=\"grandparent\"/>\n"
+ " </Parents>\n"
+ " </Marker>\n"
+ " </Parents>\n"
+ " </Marker>\n"
+ " <Message>Hello, world!</Message>\n"
+ " <ContextMap>\n"
+ " <item key=\"bar\" value=\"BAR\"/>\n"
+ " <item key=\"foo\" value=\"FOO\"/>\n"
+ " </ContextMap>\n"
+ " <ContextStack>\n"
+ " <ContextStackItem>one</ContextStackItem>\n"
+ " <ContextStackItem>two</ContextStackItem>\n"
+ " </ContextStack>\n"
+ " <Source\n"
+ " class=\"logtest.Main\"\n"
+ " method=\"main\"\n"
+ " file=\"Main.java\"\n"
+ " line=\"29\"/>\n"
+ " <Thrown commonElementCount=\"0\" message=\"error message\" name=\"java.lang.RuntimeException\">\n"
+ " <ExtendedStackTrace>\n"
+ " <ExtendedStackTraceItem\n"
+ " class=\"logtest.Main\"\n"
+ " method=\"main\"\n"
+ " file=\"Main.java\"\n"
+ " line=\"29\"\n"
+ " exact=\"true\"\n"
+ " location=\"classes/\"\n"
+ " version=\"?\"/>\n"
+ " </ExtendedStackTrace>\n"
+ " </Thrown>\n"
+ "</Event>";
@BeforeEach
void setup() {
parser = new XmlLogEventParser();
}
@Test
void testString() throws ParseException {
final LogEvent logEvent = parser.parseFrom(XML);
assertLogEvent(logEvent);
}
@Test
void testStringEmpty() {
assertThrows(ParseException.class, () -> parser.parseFrom(""));
}
@Test
void testStringInvalidXml() {
assertThrows(ParseException.class, () -> parser.parseFrom("foobar"));
}
@Test
void testEmptyObject() throws ParseException {
parser.parseFrom("<Event></Event>");
}
@Test
void testStringWrongPropertyType() {
assertThrows(
ParseException.class,
() -> parser.parseFrom("<Event><Instant epochSecond=\"bar\">foobar</Instant></Event>"));
}
@Test
void testTimeMillisIgnored() throws ParseException {
parser.parseFrom("<Event><timeMillis>foobar</timeMillis></Event>");
}
@Test
void testStringIgnoreInvalidProperty() throws ParseException {
parser.parseFrom("<Event><foo>bar</foo></Event>");
}
@Test
void testByteArray() throws ParseException {
final LogEvent logEvent = parser.parseFrom(XML.getBytes(StandardCharsets.UTF_8));
assertLogEvent(logEvent);
}
@Test
void testByteArrayOffsetLength() throws ParseException {
final byte[] bytes = ("abc" + XML + "def").getBytes(StandardCharsets.UTF_8);
final LogEvent logEvent = parser.parseFrom(bytes, 3, bytes.length - 6);
assertLogEvent(logEvent);
}
}
| XmlLogEventParserTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/injection/finalfield/FinalFieldInjectionTest.java | {
"start": 842,
"end": 1031
} | class ____ {
static final AtomicInteger COUNTER = new AtomicInteger(0);
public Head() {
COUNTER.incrementAndGet();
}
}
@Dependent
static | Head |
java | apache__camel | components/camel-ignite/src/generated/java/org/apache/camel/component/ignite/cache/IgniteCacheEndpointConfigurer.java | {
"start": 739,
"end": 7880
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
IgniteCacheEndpoint target = (IgniteCacheEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autounsubscribe":
case "autoUnsubscribe": target.setAutoUnsubscribe(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "cachepeekmode":
case "cachePeekMode": target.setCachePeekMode(property(camelContext, org.apache.ignite.cache.CachePeekMode.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "failifinexistentcache":
case "failIfInexistentCache": target.setFailIfInexistentCache(property(camelContext, boolean.class, value)); return true;
case "fireexistingqueryresults":
case "fireExistingQueryResults": target.setFireExistingQueryResults(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "oneexchangeperupdate":
case "oneExchangePerUpdate": target.setOneExchangePerUpdate(property(camelContext, boolean.class, value)); return true;
case "operation": target.setOperation(property(camelContext, org.apache.camel.component.ignite.cache.IgniteCacheOperation.class, value)); return true;
case "pagesize":
case "pageSize": target.setPageSize(property(camelContext, int.class, value)); return true;
case "propagateincomingbodyifnoreturnvalue":
case "propagateIncomingBodyIfNoReturnValue": target.setPropagateIncomingBodyIfNoReturnValue(property(camelContext, boolean.class, value)); return true;
case "query": target.setQuery(property(camelContext, org.apache.ignite.cache.query.Query.class, value)); return true;
case "remotefilter":
case "remoteFilter": target.setRemoteFilter(property(camelContext, org.apache.ignite.cache.CacheEntryEventSerializableFilter.class, value)); return true;
case "timeinterval":
case "timeInterval": target.setTimeInterval(property(camelContext, long.class, value)); return true;
case "treatcollectionsascacheobjects":
case "treatCollectionsAsCacheObjects": target.setTreatCollectionsAsCacheObjects(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autounsubscribe":
case "autoUnsubscribe": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "cachepeekmode":
case "cachePeekMode": return org.apache.ignite.cache.CachePeekMode.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "failifinexistentcache":
case "failIfInexistentCache": return boolean.class;
case "fireexistingqueryresults":
case "fireExistingQueryResults": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "oneexchangeperupdate":
case "oneExchangePerUpdate": return boolean.class;
case "operation": return org.apache.camel.component.ignite.cache.IgniteCacheOperation.class;
case "pagesize":
case "pageSize": return int.class;
case "propagateincomingbodyifnoreturnvalue":
case "propagateIncomingBodyIfNoReturnValue": return boolean.class;
case "query": return org.apache.ignite.cache.query.Query.class;
case "remotefilter":
case "remoteFilter": return org.apache.ignite.cache.CacheEntryEventSerializableFilter.class;
case "timeinterval":
case "timeInterval": return long.class;
case "treatcollectionsascacheobjects":
case "treatCollectionsAsCacheObjects": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
IgniteCacheEndpoint target = (IgniteCacheEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autounsubscribe":
case "autoUnsubscribe": return target.isAutoUnsubscribe();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "cachepeekmode":
case "cachePeekMode": return target.getCachePeekMode();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "failifinexistentcache":
case "failIfInexistentCache": return target.isFailIfInexistentCache();
case "fireexistingqueryresults":
case "fireExistingQueryResults": return target.isFireExistingQueryResults();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "oneexchangeperupdate":
case "oneExchangePerUpdate": return target.isOneExchangePerUpdate();
case "operation": return target.getOperation();
case "pagesize":
case "pageSize": return target.getPageSize();
case "propagateincomingbodyifnoreturnvalue":
case "propagateIncomingBodyIfNoReturnValue": return target.isPropagateIncomingBodyIfNoReturnValue();
case "query": return target.getQuery();
case "remotefilter":
case "remoteFilter": return target.getRemoteFilter();
case "timeinterval":
case "timeInterval": return target.getTimeInterval();
case "treatcollectionsascacheobjects":
case "treatCollectionsAsCacheObjects": return target.isTreatCollectionsAsCacheObjects();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "query": return javax.cache.Cache.Entry.class;
case "remotefilter":
case "remoteFilter": return java.lang.Object.class;
default: return null;
}
}
}
| IgniteCacheEndpointConfigurer |
java | netty__netty | transport/src/main/java/io/netty/channel/ChannelHandlerMask.java | {
"start": 1287,
"end": 9239
} | class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(ChannelHandlerMask.class);
// Using to mask which methods must be called for a ChannelHandler.
static final int MASK_EXCEPTION_CAUGHT = 1;
static final int MASK_CHANNEL_REGISTERED = 1 << 1;
static final int MASK_CHANNEL_UNREGISTERED = 1 << 2;
static final int MASK_CHANNEL_ACTIVE = 1 << 3;
static final int MASK_CHANNEL_INACTIVE = 1 << 4;
static final int MASK_CHANNEL_READ = 1 << 5;
static final int MASK_CHANNEL_READ_COMPLETE = 1 << 6;
static final int MASK_USER_EVENT_TRIGGERED = 1 << 7;
static final int MASK_CHANNEL_WRITABILITY_CHANGED = 1 << 8;
static final int MASK_BIND = 1 << 9;
static final int MASK_CONNECT = 1 << 10;
static final int MASK_DISCONNECT = 1 << 11;
static final int MASK_CLOSE = 1 << 12;
static final int MASK_DEREGISTER = 1 << 13;
static final int MASK_READ = 1 << 14;
static final int MASK_WRITE = 1 << 15;
static final int MASK_FLUSH = 1 << 16;
static final int MASK_ONLY_INBOUND = MASK_CHANNEL_REGISTERED |
MASK_CHANNEL_UNREGISTERED | MASK_CHANNEL_ACTIVE | MASK_CHANNEL_INACTIVE | MASK_CHANNEL_READ |
MASK_CHANNEL_READ_COMPLETE | MASK_USER_EVENT_TRIGGERED | MASK_CHANNEL_WRITABILITY_CHANGED;
private static final int MASK_ALL_INBOUND = MASK_EXCEPTION_CAUGHT | MASK_ONLY_INBOUND;
static final int MASK_ONLY_OUTBOUND = MASK_BIND | MASK_CONNECT | MASK_DISCONNECT |
MASK_CLOSE | MASK_DEREGISTER | MASK_READ | MASK_WRITE | MASK_FLUSH;
private static final int MASK_ALL_OUTBOUND = MASK_EXCEPTION_CAUGHT | MASK_ONLY_OUTBOUND;
private static final FastThreadLocal<Map<Class<? extends ChannelHandler>, Integer>> MASKS =
new FastThreadLocal<Map<Class<? extends ChannelHandler>, Integer>>() {
@Override
protected Map<Class<? extends ChannelHandler>, Integer> initialValue() {
return new WeakHashMap<Class<? extends ChannelHandler>, Integer>(32);
}
};
/**
* Return the {@code executionMask}.
*/
static int mask(Class<? extends ChannelHandler> clazz) {
// Try to obtain the mask from the cache first. If this fails calculate it and put it in the cache for fast
// lookup in the future.
Map<Class<? extends ChannelHandler>, Integer> cache = MASKS.get();
Integer mask = cache.get(clazz);
if (mask == null) {
mask = mask0(clazz);
cache.put(clazz, mask);
}
return mask;
}
/**
* Calculate the {@code executionMask}.
*/
private static int mask0(Class<? extends ChannelHandler> handlerType) {
int mask = MASK_EXCEPTION_CAUGHT;
try {
if (ChannelInboundHandler.class.isAssignableFrom(handlerType)) {
mask |= MASK_ALL_INBOUND;
if (isSkippable(handlerType, "channelRegistered", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_REGISTERED;
}
if (isSkippable(handlerType, "channelUnregistered", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_UNREGISTERED;
}
if (isSkippable(handlerType, "channelActive", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_ACTIVE;
}
if (isSkippable(handlerType, "channelInactive", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_INACTIVE;
}
if (isSkippable(handlerType, "channelRead", ChannelHandlerContext.class, Object.class)) {
mask &= ~MASK_CHANNEL_READ;
}
if (isSkippable(handlerType, "channelReadComplete", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_READ_COMPLETE;
}
if (isSkippable(handlerType, "channelWritabilityChanged", ChannelHandlerContext.class)) {
mask &= ~MASK_CHANNEL_WRITABILITY_CHANGED;
}
if (isSkippable(handlerType, "userEventTriggered", ChannelHandlerContext.class, Object.class)) {
mask &= ~MASK_USER_EVENT_TRIGGERED;
}
}
if (ChannelOutboundHandler.class.isAssignableFrom(handlerType)) {
mask |= MASK_ALL_OUTBOUND;
if (isSkippable(handlerType, "bind", ChannelHandlerContext.class,
SocketAddress.class, ChannelPromise.class)) {
mask &= ~MASK_BIND;
}
if (isSkippable(handlerType, "connect", ChannelHandlerContext.class, SocketAddress.class,
SocketAddress.class, ChannelPromise.class)) {
mask &= ~MASK_CONNECT;
}
if (isSkippable(handlerType, "disconnect", ChannelHandlerContext.class, ChannelPromise.class)) {
mask &= ~MASK_DISCONNECT;
}
if (isSkippable(handlerType, "close", ChannelHandlerContext.class, ChannelPromise.class)) {
mask &= ~MASK_CLOSE;
}
if (isSkippable(handlerType, "deregister", ChannelHandlerContext.class, ChannelPromise.class)) {
mask &= ~MASK_DEREGISTER;
}
if (isSkippable(handlerType, "read", ChannelHandlerContext.class)) {
mask &= ~MASK_READ;
}
if (isSkippable(handlerType, "write", ChannelHandlerContext.class,
Object.class, ChannelPromise.class)) {
mask &= ~MASK_WRITE;
}
if (isSkippable(handlerType, "flush", ChannelHandlerContext.class)) {
mask &= ~MASK_FLUSH;
}
}
if (isSkippable(handlerType, "exceptionCaught", ChannelHandlerContext.class, Throwable.class)) {
mask &= ~MASK_EXCEPTION_CAUGHT;
}
} catch (Exception e) {
// Should never reach here.
PlatformDependent.throwException(e);
}
return mask;
}
private static boolean isSkippable(
final Class<?> handlerType, final String methodName, final Class<?>... paramTypes) throws Exception {
return AccessController.doPrivileged(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
Method m;
try {
m = handlerType.getMethod(methodName, paramTypes);
} catch (NoSuchMethodException e) {
if (logger.isDebugEnabled()) {
logger.debug(
"Class {} missing method {}, assume we can not skip execution", handlerType, methodName, e);
}
return false;
}
return m.isAnnotationPresent(Skip.class);
}
});
}
private ChannelHandlerMask() { }
/**
* Indicates that the annotated event handler method in {@link ChannelHandler} will not be invoked by
* {@link ChannelPipeline} and so <strong>MUST</strong> only be used when the {@link ChannelHandler}
* method does nothing except forward to the next {@link ChannelHandler} in the pipeline.
* <p>
* Note that this annotation is not {@linkplain Inherited inherited}. If a user overrides a method annotated with
* {@link Skip}, it will not be skipped anymore. Similarly, the user can override a method not annotated with
* {@link Skip} and simply pass the event through to the next handler, which reverses the behavior of the
* supertype.
* </p>
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@ | ChannelHandlerMask |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StaticMockMemberTest.java | {
"start": 3515,
"end": 3817
} | class ____ {
// BUG: Diagnostic contains: StaticMockMember
@Mock private static String mockedPrivateString;
static String someStaticMethod() {
return mockedPrivateString;
}
}
""")
.doTest();
}
}
| Test |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/bind/support/WebRequestDataBinderTests.java | {
"start": 13816,
"end": 13997
} | class ____ {
private MyEnum myEnum;
public MyEnum getMyEnum() {
return myEnum;
}
public void setMyEnum(MyEnum myEnum) {
this.myEnum = myEnum;
}
}
public | EnumHolder |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/Mockito.java | {
"start": 62030,
"end": 62506
} | interface ____ reduce boilerplate.
* In particular, this approach will make it easier to test functions which use callbacks.
*
* The methods {@link AdditionalAnswers#answer(Answer1)}} and {@link AdditionalAnswers#answerVoid(VoidAnswer1)}
* can be used to create the answer. They rely on the related answer interfaces in org.mockito.stubbing that
* support answers up to 5 parameters.
*
* <p>
* Examples:
* <p>
* <pre class="code"><code class="java">
*
* // Example | can |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/resource/PathMatchingResourcePatternResolver.java | {
"start": 7528,
"end": 8193
} | class ____ locations. This is because a resource such as
* <pre class="code">
* com/mycompany/package1/service-context.xml
* </pre>
* may be in only one location, but when a path such as
* <pre class="code">
* classpath:com/mycompany/**/service-context.xml
* </pre>
* is used to try to resolve it, the resolver will work off the (first) URL
* returned by {@code getResource("com/mycompany");}. If this base package node
* exists in multiple classloader locations, the actual end resource may not be
* underneath. Therefore, preferably, use "{@code classpath*:}" with the same
* Ant-style pattern in such a case, which will search <i>all</i> | path |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/support/local/TransportLocalClusterStateActionTests.java | {
"start": 7521,
"end": 8385
} | class ____ extends TransportLocalClusterStateAction<Request, Response> {
static final String ACTION_NAME = "internal:testAction";
Action(TaskManager taskManager, ClusterService clusterService) {
super(ACTION_NAME, new ActionFilters(Set.of()), taskManager, clusterService, EsExecutors.DIRECT_EXECUTOR_SERVICE);
}
@Override
protected void localClusterStateOperation(Task task, Request request, ClusterState state, ActionListener<Response> listener)
throws Exception {
listener.onResponse(new Response());
}
@Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
Set<ClusterBlock> blocks = state.blocks().global();
return blocks.isEmpty() ? null : new ClusterBlockException(blocks);
}
}
}
| Action |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/DoubleFieldScript.java | {
"start": 4757,
"end": 5044
} | class ____ {
private final DoubleFieldScript script;
public Emit(DoubleFieldScript script) {
this.script = script;
}
public void emit(double v) {
script.checkMaxSize(script.count());
script.emit(v);
}
}
}
| Emit |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/references/Target.java | {
"start": 235,
"end": 939
} | class ____ {
private long prop1;
private Bar prop2;
private SomeType prop3;
private GenericWrapper<String> prop4;
public long getProp1() {
return prop1;
}
public void setProp1(long prop1) {
this.prop1 = prop1;
}
public Bar getProp2() {
return prop2;
}
public void setProp2(Bar prop2) {
this.prop2 = prop2;
}
public SomeType getProp3() {
return prop3;
}
public void setProp3(SomeType prop3) {
this.prop3 = prop3;
}
public GenericWrapper<String> getProp4() {
return prop4;
}
public void setProp4(GenericWrapper<String> prop4) {
this.prop4 = prop4;
}
}
| Target |
java | apache__camel | components/camel-cassandraql/src/test/java/org/apache/camel/component/cassandra/integration/CassandraComponentResumeStrategyIT.java | {
"start": 1452,
"end": 1537
} | class ____ extends BaseCassandra {
private static | CassandraComponentResumeStrategyIT |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java | {
"start": 28120,
"end": 28637
} | class ____ {
@GET("/foo/bar/{ping}/") //
Call<ResponseBody> method(@Path("ping") String ping) {
return null;
}
}
Request request = buildRequest(Example.class, "po ng");
assertThat(request.method()).isEqualTo("GET");
assertThat(request.headers().size()).isEqualTo(0);
assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/po%20ng/");
assertThat(request.body()).isNull();
}
@Test
public void getWithUnusedAndInvalidNamedPathParam() {
| Example |
java | apache__flink | flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testutils/formats/DummyInitializationContext.java | {
"start": 1273,
"end": 1721
} | class ____
implements SerializationSchema.InitializationContext,
DeserializationSchema.InitializationContext {
@Override
public MetricGroup getMetricGroup() {
return new UnregisteredMetricsGroup();
}
@Override
public UserCodeClassLoader getUserCodeClassLoader() {
return SimpleUserCodeClassLoader.create(DummyInitializationContext.class.getClassLoader());
}
}
| DummyInitializationContext |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cuk/Person.java | {
"start": 240,
"end": 1825
} | class ____ implements Serializable {
private Long id;
private String name;
private Address address;
private String userId;
private boolean deleted;
private Set accounts = new HashSet();
/**
* @return Returns the userId.
*/
public String getUserId() {
return userId;
}
/**
* @param userId The userId to set.
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* @return Returns the address.
*/
public Address getAddress() {
return address;
}
/**
* @param address The address to set.
*/
public void setAddress(Address address) {
this.address = address;
}
/**
* @return Returns the id.
*/
public Long getId() {
return id;
}
/**
* @param id The id to set.
*/
public void setId(Long id) {
this.id = id;
}
/**
* @return Returns the name.
*/
public String getName() {
return name;
}
/**
* @param name The name to set.
*/
public void setName(String name) {
this.name = name;
}
/**
* @return Returns the accounts.
*/
public Set getAccounts() {
return accounts;
}
/**
* @param accounts The accounts to set.
*/
public void setAccounts(Set accounts) {
this.accounts = accounts;
}
public boolean isDeleted() {
return deleted;
}
public void setDeleted(boolean deleted) {
this.deleted = deleted;
}
public boolean equals(Object other) {
if (other instanceof Person) {
Person that = (Person) other;
return that.isDeleted() == deleted && that.getUserId().equals(userId);
}
else {
return false;
}
}
public int hashCode() {
return userId.hashCode();
}
}
| Person |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 72408,
"end": 72646
} | class ____<@ImmutableTypeParameter T> {
<@ImmutableTypeParameter T> T f(T t) {
return t;
}
<@ImmutableTypeParameter T> void g(T a, T b) {}
@Immutable
| Test |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/V2Migration.java | {
"start": 1143,
"end": 1352
} | class ____ utility methods required for migrating S3A to AWS Java SDK V2.
* For more information on the upgrade, see HADOOP-18073.
*
* <p>in HADOOP-18382. Upgrade AWS SDK to V2 - Prerequisites,
* this | provides |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSinkTaskTest.java | {
"start": 5382,
"end": 94179
} | class ____ {
// These are fixed to keep this code simpler. In this example we assume byte[] raw values
// with mix of integer/string in Connect
private static final String TOPIC = "test";
private static final int PARTITION = 12;
private static final int PARTITION2 = 13;
private static final int PARTITION3 = 14;
private static final long FIRST_OFFSET = 45;
private static final Schema KEY_SCHEMA = Schema.INT32_SCHEMA;
private static final int KEY = 12;
private static final Schema VALUE_SCHEMA = Schema.STRING_SCHEMA;
private static final String VALUE = "VALUE";
private static final byte[] RAW_KEY = "key".getBytes();
private static final byte[] RAW_VALUE = "value".getBytes();
private static final TopicPartition TOPIC_PARTITION = new TopicPartition(TOPIC, PARTITION);
private static final TopicPartition TOPIC_PARTITION2 = new TopicPartition(TOPIC, PARTITION2);
private static final TopicPartition TOPIC_PARTITION3 = new TopicPartition(TOPIC, PARTITION3);
private static final Set<TopicPartition> INITIAL_ASSIGNMENT =
Set.of(TOPIC_PARTITION, TOPIC_PARTITION2);
private static final Map<String, String> TASK_PROPS = new HashMap<>();
static {
TASK_PROPS.put(SinkConnector.TOPICS_CONFIG, TOPIC);
TASK_PROPS.put(TaskConfig.TASK_CLASS_CONFIG, SinkTask.class.getName());
}
private static final TaskConfig TASK_CONFIG = new TaskConfig(TASK_PROPS);
private final ConnectorTaskId taskId = new ConnectorTaskId("job", 0);
private final ConnectorTaskId taskId1 = new ConnectorTaskId("job", 1);
private final TargetState initialState = TargetState.STARTED;
private MockTime time;
private WorkerSinkTask workerTask;
@Mock
private SinkTask sinkTask;
private final ArgumentCaptor<WorkerSinkTaskContext> sinkTaskContext = ArgumentCaptor.forClass(WorkerSinkTaskContext.class);
private WorkerConfig workerConfig;
private MockConnectMetrics metrics;
@Mock
private PluginClassLoader pluginLoader;
@Mock
private Converter keyConverter;
@Mock
private Converter valueConverter;
@Mock
private HeaderConverter headerConverter;
@Mock
private TransformationChain<ConsumerRecord<byte[], byte[]>, SinkRecord> transformationChain;
@Mock
private TaskStatus.Listener statusListener;
@Mock
private StatusBackingStore statusBackingStore;
@Mock
private KafkaConsumer<byte[], byte[]> consumer;
@Mock
private ErrorHandlingMetrics errorHandlingMetrics;
private final ArgumentCaptor<ConsumerRebalanceListener> rebalanceListener = ArgumentCaptor.forClass(ConsumerRebalanceListener.class);
private long recordsReturnedTp1;
private long recordsReturnedTp3;
@BeforeEach
public void setUp() {
time = new MockTime();
Map<String, String> workerProps = new HashMap<>();
workerProps.put(WorkerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
workerProps.put("key.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("value.converter", "org.apache.kafka.connect.json.JsonConverter");
workerProps.put("offset.storage.file.filename", "/tmp/connect.offsets");
workerConfig = new StandaloneConfig(workerProps);
metrics = new MockConnectMetrics(time);
recordsReturnedTp1 = 0;
recordsReturnedTp3 = 0;
}
private void createTask(TargetState initialState) {
createTask(initialState, keyConverter, valueConverter, headerConverter);
}
private void createTask(TargetState initialState, TransformationChain transformationChain, RetryWithToleranceOperator toleranceOperator) {
createTask(initialState, keyConverter, valueConverter, headerConverter, toleranceOperator, List::of, transformationChain);
}
private void createTask(TargetState initialState, Converter keyConverter, Converter valueConverter, HeaderConverter headerConverter) {
createTask(initialState, keyConverter, valueConverter, headerConverter, RetryWithToleranceOperatorTest.noneOperator(), List::of, transformationChain);
}
private void createTask(TargetState initialState, Converter keyConverter, Converter valueConverter, HeaderConverter headerConverter,
RetryWithToleranceOperator<ConsumerRecord<byte[], byte[]>> retryWithToleranceOperator,
Supplier<List<ErrorReporter<ConsumerRecord<byte[], byte[]>>>> errorReportersSupplier,
TransformationChain<ConsumerRecord<byte[], byte[]>, SinkRecord> transformationChain) {
createTask(taskId, sinkTask, statusListener, initialState, workerConfig, metrics,
keyConverter, valueConverter, errorHandlingMetrics, headerConverter,
transformationChain, consumer, pluginLoader, time,
retryWithToleranceOperator, statusBackingStore, errorReportersSupplier);
}
private void createTask(ConnectorTaskId taskId, SinkTask task, TaskStatus.Listener statusListener, TargetState initialState,
WorkerConfig workerConfig, ConnectMetrics connectMetrics, Converter keyConverter, Converter valueConverter,
ErrorHandlingMetrics errorMetrics, HeaderConverter headerConverter,
TransformationChain<ConsumerRecord<byte[], byte[]>, SinkRecord> transformationChain,
Consumer<byte[], byte[]> consumer, ClassLoader loader, Time time,
RetryWithToleranceOperator<ConsumerRecord<byte[], byte[]>> retryWithToleranceOperator,
StatusBackingStore statusBackingStore,
Supplier<List<ErrorReporter<ConsumerRecord<byte[], byte[]>>>> errorReportersSupplier) {
Plugin<Converter> keyConverterPlugin = connectMetrics.wrap(keyConverter, taskId, true);
Plugin<Converter> valueConverterPlugin = connectMetrics.wrap(valueConverter, taskId, false);
Plugin<HeaderConverter> headerConverterPlugin = connectMetrics.wrap(headerConverter, taskId);
workerTask = new WorkerSinkTask(
taskId, task, statusListener, initialState, workerConfig, ClusterConfigState.EMPTY, connectMetrics,
keyConverterPlugin, valueConverterPlugin, errorMetrics, headerConverterPlugin,
transformationChain, consumer, loader, time,
retryWithToleranceOperator, null, statusBackingStore, errorReportersSupplier, null, TestPlugins.noOpLoaderSwap());
}
@AfterEach
public void tearDown() {
if (metrics != null) metrics.stop();
}
@Test
public void testStartPaused() {
createTask(TargetState.PAUSED);
expectPollInitialAssignment();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
workerTask.iteration();
verifyPollInitialAssignment();
time.sleep(10000L);
verify(consumer).pause(INITIAL_ASSIGNMENT);
assertSinkMetricValue("partition-count", 2);
assertTaskMetricValue("status", "paused");
assertTaskMetricValue("running-ratio", 0.0);
assertTaskMetricValue("pause-ratio", 1.0);
assertTaskMetricValue("offset-commit-max-time-ms", Double.NaN);
}
@Test
public void testPause() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
// Pause
.thenThrow(new WakeupException())
// Offset commit as requested when pausing; No records returned by consumer.poll()
.thenAnswer(expectConsumerPoll(0))
// And unpause
.thenThrow(new WakeupException())
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // initial assignment
verifyPollInitialAssignment();
workerTask.iteration(); // fetch some data
// put should've been called twice now (initial assignment & poll)
verify(sinkTask, times(2)).put(anyList());
workerTask.transitionTo(TargetState.PAUSED);
time.sleep(10_000L);
assertSinkMetricValue("partition-count", 2);
assertSinkMetricValue("sink-record-read-total", 1.0);
assertSinkMetricValue("sink-record-send-total", 1.0);
assertSinkMetricValue("sink-record-active-count", 1.0);
assertSinkMetricValue("sink-record-active-count-max", 1.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.333333);
assertSinkMetricValue("offset-commit-seq-no", 0.0);
assertSinkMetricValue("offset-commit-completion-rate", 0.0);
assertSinkMetricValue("offset-commit-completion-total", 0.0);
assertSinkMetricValue("offset-commit-skip-rate", 0.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 1.0);
assertTaskMetricValue("batch-size-avg", 0.5);
assertTaskMetricValue("offset-commit-max-time-ms", Double.NaN);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 0.0);
workerTask.iteration(); // wakeup
// Pause
verify(statusListener).onPause(taskId);
verify(consumer).pause(INITIAL_ASSIGNMENT);
verify(consumer).wakeup();
// Offset commit as requested when pausing; No records returned by consumer.poll()
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
workerTask.iteration(); // now paused
time.sleep(30000L);
assertSinkMetricValue("offset-commit-seq-no", 1.0);
assertSinkMetricValue("offset-commit-completion-rate", 0.0333);
assertSinkMetricValue("offset-commit-completion-total", 1.0);
assertSinkMetricValue("offset-commit-skip-rate", 0.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "paused");
assertTaskMetricValue("running-ratio", 0.25);
assertTaskMetricValue("pause-ratio", 0.75);
verify(sinkTask, times(3)).put(anyList());
workerTask.transitionTo(TargetState.STARTED);
workerTask.iteration(); // wakeup
workerTask.iteration(); // now unpaused
// And unpause
verify(statusListener).onResume(taskId);
verify(consumer, times(2)).wakeup();
INITIAL_ASSIGNMENT.forEach(tp -> verify(consumer).resume(Set.of(tp)));
verify(sinkTask, times(4)).put(anyList());
}
@Test
public void testShutdown() throws Exception {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration();
verifyPollInitialAssignment();
sinkTaskContext.getValue().requestCommit(); // Force an offset commit
// second iteration
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
workerTask.iteration();
verify(sinkTask, times(2)).put(anyList());
doAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
return null;
}).when(consumer).close();
workerTask.stop();
verify(consumer).wakeup();
workerTask.close();
verify(sinkTask).stop();
verify(consumer).close();
verify(headerConverter).close();
}
@Test
public void testPollRedelivery() {
createTask(initialState);
expectTaskGetTopic();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
// If a retriable exception is thrown, we should redeliver the same batch, pausing the consumer in the meantime
.thenAnswer(expectConsumerPoll(1))
// Retry delivery should succeed
.thenAnswer(expectConsumerPoll(0))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
doNothing()
.doThrow(new RetriableException("retry"))
.doNothing()
.when(sinkTask).put(anyList());
workerTask.iteration();
time.sleep(10000L);
verifyPollInitialAssignment();
verify(sinkTask).put(anyList());
assertSinkMetricValue("partition-count", 2);
assertSinkMetricValue("sink-record-read-total", 0.0);
assertSinkMetricValue("sink-record-send-total", 0.0);
assertSinkMetricValue("sink-record-active-count", 0.0);
assertSinkMetricValue("sink-record-active-count-max", 0.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.0);
assertSinkMetricValue("offset-commit-seq-no", 0.0);
assertSinkMetricValue("offset-commit-completion-rate", 0.0);
assertSinkMetricValue("offset-commit-completion-total", 0.0);
assertSinkMetricValue("offset-commit-skip-rate", 0.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 0.0);
assertTaskMetricValue("batch-size-avg", 0.0);
assertTaskMetricValue("offset-commit-max-time-ms", Double.NaN);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 0.0);
// Pause
workerTask.iteration();
verify(consumer, times(3)).assignment();
verify(consumer).pause(INITIAL_ASSIGNMENT);
// Retry delivery should succeed
workerTask.iteration();
time.sleep(30000L);
verify(sinkTask, times(3)).put(anyList());
INITIAL_ASSIGNMENT.forEach(tp -> verify(consumer).resume(Set.of(tp)));
assertSinkMetricValue("sink-record-read-total", 1.0);
assertSinkMetricValue("sink-record-send-total", 1.0);
assertSinkMetricValue("sink-record-active-count", 1.0);
assertSinkMetricValue("sink-record-active-count-max", 1.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.5);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("batch-size-max", 1.0);
assertTaskMetricValue("batch-size-avg", 0.5);
// Expect commit
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
// Commit advance by one
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
// Nothing polled for this partition
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(workerCurrentOffsets);
sinkTaskContext.getValue().requestCommit();
time.sleep(10000L);
workerTask.iteration();
final ArgumentCaptor<OffsetCommitCallback> callback = ArgumentCaptor.forClass(OffsetCommitCallback.class);
verify(consumer).commitAsync(eq(workerCurrentOffsets), callback.capture());
callback.getValue().onComplete(workerCurrentOffsets, null);
verify(sinkTask, times(4)).put(anyList());
assertSinkMetricValue("offset-commit-completion-total", 1.0);
}
@Test
@SuppressWarnings("unchecked")
public void testPollRedeliveryWithConsumerRebalance() {
createTask(initialState);
expectTaskGetTopic();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
Set<TopicPartition> newAssignment = Set.of(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3);
when(consumer.assignment())
.thenReturn(INITIAL_ASSIGNMENT, INITIAL_ASSIGNMENT, INITIAL_ASSIGNMENT)
.thenReturn(newAssignment, newAssignment, newAssignment)
.thenReturn(Set.of(TOPIC_PARTITION3),
Set.of(TOPIC_PARTITION3),
Set.of(TOPIC_PARTITION3));
INITIAL_ASSIGNMENT.forEach(tp -> when(consumer.position(tp)).thenReturn(FIRST_OFFSET));
when(consumer.position(TOPIC_PARTITION3)).thenReturn(FIRST_OFFSET);
when(consumer.poll(any(Duration.class)))
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
})
.thenAnswer(expectConsumerPoll(1))
// Empty consumer poll (all partitions are paused) with rebalance; one new partition is assigned
.thenAnswer(invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(Set.of());
rebalanceListener.getValue().onPartitionsAssigned(Set.of(TOPIC_PARTITION3));
return ConsumerRecords.empty();
})
.thenAnswer(expectConsumerPoll(0))
// Non-empty consumer poll; all initially-assigned partitions are revoked in rebalance, and new partitions are allowed to resume
.thenAnswer(invocation -> {
ConsumerRecord<byte[], byte[]> newRecord = new ConsumerRecord<>(TOPIC, PARTITION3, FIRST_OFFSET, RAW_KEY, RAW_VALUE);
rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
rebalanceListener.getValue().onPartitionsAssigned(List.of());
return new ConsumerRecords<>(Map.of(TOPIC_PARTITION3, List.of(newRecord)),
Map.of(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET + 1, Optional.empty(), "")));
});
expectConversionAndTransformation(null, new RecordHeaders());
doNothing()
// If a retriable exception is thrown, we should redeliver the same batch, pausing the consumer in the meantime
.doThrow(new RetriableException("retry"))
.doThrow(new RetriableException("retry"))
.doThrow(new RetriableException("retry"))
.doNothing()
.when(sinkTask).put(any(Collection.class));
workerTask.iteration();
// Pause
workerTask.iteration();
verify(consumer).pause(INITIAL_ASSIGNMENT);
workerTask.iteration();
verify(sinkTask).open(Set.of(TOPIC_PARTITION3));
// All partitions are re-paused in order to pause any newly-assigned partitions so that redelivery efforts can continue
verify(consumer).pause(newAssignment);
workerTask.iteration();
final Map<TopicPartition, OffsetAndMetadata> offsets = INITIAL_ASSIGNMENT.stream()
.collect(Collectors.toMap(Function.identity(), tp -> new OffsetAndMetadata(FIRST_OFFSET)));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
newAssignment = Set.of(TOPIC_PARTITION3);
workerTask.iteration();
verify(sinkTask).close(INITIAL_ASSIGNMENT);
// All partitions are resumed, as all previously paused-for-redelivery partitions were revoked
newAssignment.forEach(tp -> verify(consumer).resume(Set.of(tp)));
}
@Test
public void testErrorInRebalancePartitionLoss() {
RuntimeException exception = new RuntimeException("Revocation error");
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsLost(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
});
doThrow(exception).when(sinkTask).close(INITIAL_ASSIGNMENT);
workerTask.iteration();
verifyPollInitialAssignment();
RuntimeException thrownException = assertThrows(RuntimeException.class, () -> workerTask.iteration());
assertEquals(exception, thrownException);
}
@Test
public void testErrorInRebalancePartitionRevocation() {
RuntimeException exception = new RuntimeException("Revocation error");
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
});
expectRebalanceRevocationError(exception);
workerTask.iteration();
verifyPollInitialAssignment();
RuntimeException thrownException = assertThrows(RuntimeException.class, () -> workerTask.iteration());
assertEquals(exception, thrownException);
}
@Test
public void testErrorInRebalancePartitionAssignment() {
RuntimeException exception = new RuntimeException("Assignment error");
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
});
workerTask.iteration();
verifyPollInitialAssignment();
expectRebalanceAssignmentError(exception);
try {
RuntimeException thrownException = assertThrows(RuntimeException.class, () -> workerTask.iteration());
assertEquals(exception, thrownException);
} finally {
verify(sinkTask).close(INITIAL_ASSIGNMENT);
}
}
@Test
public void testPartialRevocationAndAssignment() {
createTask(initialState);
when(consumer.assignment())
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(Set.of(TOPIC_PARTITION2))
.thenReturn(Set.of(TOPIC_PARTITION2))
.thenReturn(Set.of(TOPIC_PARTITION2, TOPIC_PARTITION3))
.thenReturn(Set.of(TOPIC_PARTITION2, TOPIC_PARTITION3))
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT);
INITIAL_ASSIGNMENT.forEach(tp -> when(consumer.position(tp)).thenReturn(FIRST_OFFSET));
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
when(consumer.poll(any(Duration.class)))
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
})
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(Set.of(TOPIC_PARTITION));
rebalanceListener.getValue().onPartitionsAssigned(Set.of());
return ConsumerRecords.empty();
})
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(Set.of());
rebalanceListener.getValue().onPartitionsAssigned(Set.of(TOPIC_PARTITION3));
return ConsumerRecords.empty();
})
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsLost(Set.of(TOPIC_PARTITION3));
rebalanceListener.getValue().onPartitionsAssigned(Set.of(TOPIC_PARTITION));
return ConsumerRecords.empty();
});
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
when(consumer.position(TOPIC_PARTITION3)).thenReturn(FIRST_OFFSET);
// First iteration--first call to poll, first consumer assignment
workerTask.iteration();
verifyPollInitialAssignment();
// Second iteration--second call to poll, partial consumer revocation
workerTask.iteration();
verify(sinkTask).close(Set.of(TOPIC_PARTITION));
verify(sinkTask, times(2)).put(List.of());
// Third iteration--third call to poll, partial consumer assignment
workerTask.iteration();
verify(sinkTask).open(Set.of(TOPIC_PARTITION3));
verify(sinkTask, times(3)).put(List.of());
// Fourth iteration--fourth call to poll, one partition lost; can't commit offsets for it, one new partition assigned
workerTask.iteration();
verify(sinkTask).close(Set.of(TOPIC_PARTITION3));
verify(sinkTask).open(Set.of(TOPIC_PARTITION));
verify(sinkTask, times(4)).put(List.of());
}
@Test
@SuppressWarnings("unchecked")
public void testPreCommitFailureAfterPartialRevocationAndAssignment() {
createTask(initialState);
expectTaskGetTopic();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
when(consumer.assignment())
.thenReturn(INITIAL_ASSIGNMENT, INITIAL_ASSIGNMENT)
.thenReturn(Set.of(TOPIC_PARTITION2))
.thenReturn(Set.of(TOPIC_PARTITION2))
.thenReturn(Set.of(TOPIC_PARTITION2))
.thenReturn(Set.of(TOPIC_PARTITION2, TOPIC_PARTITION3))
.thenReturn(Set.of(TOPIC_PARTITION2, TOPIC_PARTITION3))
.thenReturn(Set.of(TOPIC_PARTITION2, TOPIC_PARTITION3));
INITIAL_ASSIGNMENT.forEach(tp -> when(consumer.position(tp)).thenReturn(FIRST_OFFSET));
when(consumer.position(TOPIC_PARTITION3)).thenReturn(FIRST_OFFSET);
// First poll; assignment is [TP1, TP2]
when(consumer.poll(any(Duration.class)))
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
})
// Second poll; a single record is delivered from TP1
.thenAnswer(expectConsumerPoll(1))
// Third poll; assignment changes to [TP2]
.thenAnswer(invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(Set.of(TOPIC_PARTITION));
rebalanceListener.getValue().onPartitionsAssigned(Set.of());
return ConsumerRecords.empty();
})
// Fourth poll; assignment changes to [TP2, TP3]
.thenAnswer(invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(Set.of());
rebalanceListener.getValue().onPartitionsAssigned(Set.of(TOPIC_PARTITION3));
return ConsumerRecords.empty();
})
// Fifth poll; an offset commit takes place
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
// First iteration--first call to poll, first consumer assignment
workerTask.iteration();
// Second iteration--second call to poll, delivery of one record
workerTask.iteration();
// Third iteration--third call to poll, partial consumer revocation
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
doNothing().when(consumer).commitSync(offsets);
workerTask.iteration();
verify(sinkTask).close(Set.of(TOPIC_PARTITION));
verify(sinkTask, times(2)).put(List.of());
// Fourth iteration--fourth call to poll, partial consumer assignment
workerTask.iteration();
verify(sinkTask).open(Set.of(TOPIC_PARTITION3));
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
workerCurrentOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(workerCurrentOffsets)).thenThrow(new ConnectException("Failed to flush"));
// Fifth iteration--task-requested offset commit with failure in SinkTask::preCommit
sinkTaskContext.getValue().requestCommit();
workerTask.iteration();
verify(consumer).seek(TOPIC_PARTITION2, FIRST_OFFSET);
verify(consumer).seek(TOPIC_PARTITION3, FIRST_OFFSET);
}
@Test
public void testWakeupInCommitSyncCausesRetry() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
time.sleep(30000L);
workerTask.initializeAndStart();
time.sleep(30000L);
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
rebalanceListener.getValue().onPartitionsRevoked(INITIAL_ASSIGNMENT);
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
});
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // poll for initial assignment
time.sleep(30000L);
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
// first one raises wakeup
doThrow(new WakeupException())
// and succeed the second time
.doNothing()
.when(consumer).commitSync(offsets);
workerTask.iteration(); // first record delivered
workerTask.iteration(); // now rebalance with the wakeup triggered
time.sleep(30000L);
verify(sinkTask).close(INITIAL_ASSIGNMENT);
verify(sinkTask, times(2)).open(INITIAL_ASSIGNMENT);
INITIAL_ASSIGNMENT.forEach(tp -> verify(consumer).resume(Set.of(tp)));
verify(statusListener).onResume(taskId);
assertSinkMetricValue("partition-count", 2);
assertSinkMetricValue("sink-record-read-total", 1.0);
assertSinkMetricValue("sink-record-send-total", 1.0);
assertSinkMetricValue("sink-record-active-count", 0.0);
assertSinkMetricValue("sink-record-active-count-max", 1.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.33333);
assertSinkMetricValue("offset-commit-seq-no", 1.0);
assertSinkMetricValue("offset-commit-completion-total", 1.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 1.0);
assertTaskMetricValue("batch-size-avg", 1.0);
assertTaskMetricValue("offset-commit-max-time-ms", 0.0);
assertTaskMetricValue("offset-commit-avg-time-ms", 0.0);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 1.0);
}
@Test
@SuppressWarnings("unchecked")
public void testWakeupNotThrownDuringShutdown() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
// stop the task during its second iteration
workerTask.stop();
return new ConsumerRecords<>(Map.of(), Map.of());
});
expectConversionAndTransformation(null, new RecordHeaders());
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
// fail the first time
doThrow(new WakeupException())
// and succeed the second time
.doNothing()
.when(consumer).commitSync(offsets);
workerTask.execute();
assertEquals(0, workerTask.commitFailures());
verify(consumer).wakeup();
verify(sinkTask).close(any(Collection.class));
}
@Test
public void testRaisesFailedRetriableExceptionFromConvert() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
// stop the task during its second iteration
workerTask.stop();
return new ConsumerRecords<>(Map.of(), Map.of());
});
throwExceptionOnConversion(null, new RecordHeaders());
workerTask.iteration();
assertThrows(ConnectException.class, workerTask::execute);
}
@Test
public void testSkipsFailedRetriableExceptionFromConvert() {
createTask(initialState, keyConverter, valueConverter, headerConverter,
RetryWithToleranceOperatorTest.allOperator(), List::of, transformationChain);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
// stop the task during its second iteration
workerTask.stop();
return new ConsumerRecords<>(Map.of(), Map.of());
});
throwExceptionOnConversion(null, new RecordHeaders());
workerTask.iteration();
workerTask.execute();
verify(sinkTask, times(3)).put(List.of());
}
@Test
public void testRaisesFailedRetriableExceptionFromTransform() {
RetryWithToleranceOperator<RetriableException> retryWithToleranceOperator = RetryWithToleranceOperatorTest.noneOperator();
TransformationChain<RetriableException, SinkRecord> transformationChainRetriableException = getTransformationChain(
retryWithToleranceOperator, List.of(new RetriableException("Test")));
createTask(initialState, transformationChainRetriableException, retryWithToleranceOperator);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
// stop the task during its second iteration
workerTask.stop();
return new ConsumerRecords<>(Map.of(), Map.of());
});
expectConversion(null, new RecordHeaders());
workerTask.iteration();
assertThrows(ConnectException.class, workerTask::execute);
}
@Test
public void testSkipsFailedRetriableExceptionFromTransform() {
RetryWithToleranceOperator<RetriableException> retryWithToleranceOperator = RetryWithToleranceOperatorTest.allOperator();
TransformationChain<RetriableException, SinkRecord> transformationChainRetriableException = getTransformationChain(
retryWithToleranceOperator, List.of(new RetriableException("Test")));
createTask(initialState, transformationChainRetriableException, retryWithToleranceOperator);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(invocation -> {
// stop the task during its second iteration
workerTask.stop();
return new ConsumerRecords<>(Map.of(), Map.of());
});
expectConversion(null, new RecordHeaders());
workerTask.iteration();
workerTask.execute();
verify(sinkTask, times(3)).put(List.of());
}
@Test
public void testRequestCommit() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
// Initial assignment
time.sleep(30000L);
workerTask.iteration();
assertSinkMetricValue("partition-count", 2);
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
// First record delivered
workerTask.iteration();
assertSinkMetricValue("partition-count", 2);
assertSinkMetricValue("sink-record-read-total", 1.0);
assertSinkMetricValue("sink-record-send-total", 1.0);
assertSinkMetricValue("sink-record-active-count", 1.0);
assertSinkMetricValue("sink-record-active-count-max", 1.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.333333);
assertSinkMetricValue("offset-commit-seq-no", 0.0);
assertSinkMetricValue("offset-commit-completion-total", 0.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 1.0);
assertTaskMetricValue("batch-size-avg", 0.5);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 0.0);
// Grab the commit time prior to requesting a commit.
// This time should advance slightly after committing.
// KAFKA-8229
final long previousCommitValue = workerTask.getNextCommit();
sinkTaskContext.getValue().requestCommit();
assertTrue(sinkTaskContext.getValue().isCommitRequested());
assertNotEquals(offsets, workerTask.lastCommittedOffsets());
ArgumentCaptor<OffsetCommitCallback> callback = ArgumentCaptor.forClass(OffsetCommitCallback.class);
time.sleep(10000L);
workerTask.iteration(); // triggers the commit
verify(consumer).commitAsync(eq(offsets), callback.capture());
callback.getValue().onComplete(offsets, null);
time.sleep(10000L);
assertFalse(sinkTaskContext.getValue().isCommitRequested()); // should have been cleared
assertEquals(offsets, workerTask.lastCommittedOffsets());
assertEquals(0, workerTask.commitFailures());
// Assert the next commit time advances slightly, the amount it advances
// is the normal commit time less the two sleeps since it started each
// of those sleeps were 10 seconds.
// KAFKA-8229
assertEquals(previousCommitValue +
(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_DEFAULT - 10000L * 2),
workerTask.getNextCommit(),
"Should have only advanced by 40 seconds");
assertSinkMetricValue("partition-count", 2);
assertSinkMetricValue("sink-record-read-total", 1.0);
assertSinkMetricValue("sink-record-send-total", 1.0);
assertSinkMetricValue("sink-record-active-count", 0.0);
assertSinkMetricValue("sink-record-active-count-max", 1.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.2);
assertSinkMetricValue("offset-commit-seq-no", 1.0);
assertSinkMetricValue("offset-commit-completion-total", 1.0);
assertSinkMetricValue("offset-commit-skip-total", 0.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 1.0);
assertTaskMetricValue("batch-size-avg", 0.33333);
assertTaskMetricValue("offset-commit-max-time-ms", 0.0);
assertTaskMetricValue("offset-commit-avg-time-ms", 0.0);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 1.0);
}
@Test
public void testPreCommit() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(2))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // iter 1 -- initial assignment
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
assertEquals(workerStartingOffsets, workerTask.currentOffsets());
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 2));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> taskOffsets = new HashMap<>();
taskOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1)); // act like FIRST_OFFSET+2 has not yet been flushed by the task
taskOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET + 1)); // should be ignored because > current offset
taskOffsets.put(new TopicPartition(TOPIC, 3), new OffsetAndMetadata(FIRST_OFFSET)); // should be ignored because this partition is not assigned
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(taskOffsets);
workerTask.iteration(); // iter 2 -- deliver 2 records
final Map<TopicPartition, OffsetAndMetadata> committableOffsets = new HashMap<>();
committableOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
committableOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
assertEquals(workerCurrentOffsets, workerTask.currentOffsets());
assertEquals(workerStartingOffsets, workerTask.lastCommittedOffsets());
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit
// Expect extra invalid topic partition to be filtered, which causes the consumer assignment to be logged
ArgumentCaptor<OffsetCommitCallback> callback = ArgumentCaptor.forClass(OffsetCommitCallback.class);
verify(consumer).commitAsync(eq(committableOffsets), callback.capture());
callback.getValue().onComplete(committableOffsets, null);
assertEquals(committableOffsets, workerTask.lastCommittedOffsets());
}
@Test
public void testPreCommitFailure() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
// Put one message through the task to get some offsets to commit
.thenAnswer(expectConsumerPoll(2))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // iter 1 -- initial assignment
workerTask.iteration(); // iter 2 -- deliver 2 records
// iter 3
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 2));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(workerCurrentOffsets)).thenThrow(new ConnectException("Failed to flush"));
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit
verify(consumer).seek(TOPIC_PARTITION, FIRST_OFFSET);
verify(consumer).seek(TOPIC_PARTITION2, FIRST_OFFSET);
}
@Test
public void testIgnoredCommit() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
// iter 1
expectPollInitialAssignment()
// iter 2
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // iter 1 -- initial assignment
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
assertEquals(workerStartingOffsets, workerTask.currentOffsets());
assertEquals(workerStartingOffsets, workerTask.lastCommittedOffsets());
workerTask.iteration(); // iter 2 -- deliver 2 records
// iter 3
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(workerStartingOffsets);
sinkTaskContext.getValue().requestCommit();
// no actual consumer.commit() triggered
workerTask.iteration(); // iter 3 -- commit
}
// Test that the commitTimeoutMs timestamp is correctly computed and checked in WorkerSinkTask.iteration()
// when there is a long running commit in process. See KAFKA-4942 for more information.
@Test
public void testLongRunningCommitWithoutTimeout() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
// no actual consumer.commit() triggered
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation(null, new RecordHeaders());
final Map<TopicPartition, OffsetAndMetadata> workerStartingOffsets = new HashMap<>();
workerStartingOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET));
workerStartingOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
workerTask.iteration(); // iter 1 -- initial assignment
assertEquals(workerStartingOffsets, workerTask.currentOffsets());
assertEquals(workerStartingOffsets, workerTask.lastCommittedOffsets());
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
workerTask.iteration(); // iter 2 -- deliver 2 records
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
// iter 3 - note that we return the current offset to indicate they should be committed
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(workerCurrentOffsets);
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit in progress
// Make sure the "committing" flag didn't immediately get flipped back to false due to an incorrect timeout
assertTrue(workerTask.isCommitting(), "Expected worker to be in the process of committing offsets");
// Delay the result of trying to commit offsets to Kafka via the consumer.commitAsync method.
ArgumentCaptor<OffsetCommitCallback> offsetCommitCallbackArgumentCaptor =
ArgumentCaptor.forClass(OffsetCommitCallback.class);
verify(consumer).commitAsync(eq(workerCurrentOffsets), offsetCommitCallbackArgumentCaptor.capture());
final OffsetCommitCallback callback = offsetCommitCallbackArgumentCaptor.getValue();
callback.onComplete(workerCurrentOffsets, null);
assertEquals(workerCurrentOffsets, workerTask.currentOffsets());
assertEquals(workerCurrentOffsets, workerTask.lastCommittedOffsets());
assertFalse(workerTask.isCommitting());
}
@SuppressWarnings("unchecked")
@Test
public void testSinkTasksHandleCloseErrors() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
// Put one message through the task to get some offsets to commit
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation(null, new RecordHeaders());
doNothing()
.doAnswer(invocation -> {
workerTask.stop();
return null;
})
.when(sinkTask).put(anyList());
Throwable closeException = new RuntimeException();
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
// Throw another exception while closing the task's assignment
doThrow(closeException).when(sinkTask).close(any(Collection.class));
RuntimeException thrownException = assertThrows(RuntimeException.class, () -> workerTask.execute());
assertEquals(closeException, thrownException);
verify(consumer).wakeup();
}
@SuppressWarnings("unchecked")
@Test
public void testSuppressCloseErrors() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
// Put one message through the task to get some offsets to commit
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation(null, new RecordHeaders());
Throwable putException = new RuntimeException();
Throwable closeException = new RuntimeException();
doNothing()
// Throw an exception on the next put to trigger shutdown behavior
// This exception is the true "cause" of the failure
.doThrow(putException)
.when(sinkTask).put(anyList());
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
// Throw another exception while closing the task's assignment
doThrow(closeException).when(sinkTask).close(any(Collection.class));
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
RuntimeException thrownException = assertThrows(ConnectException.class, () -> workerTask.execute());
assertEquals(putException, thrownException.getCause(), "Exception from put should be the cause");
assertTrue(thrownException.getSuppressed().length > 0, "Exception from close should be suppressed");
assertEquals(closeException, thrownException.getSuppressed()[0]);
}
@Test
public void testTaskCancelPreventsFinalOffsetCommit() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
// Put one message through the task to get some offsets to commit
.thenAnswer(expectConsumerPoll(1))
// the second put will return after the task is stopped and cancelled (asynchronously)
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation(null, new RecordHeaders());
doNothing()
.doNothing()
.doAnswer(invocation -> {
workerTask.stop();
workerTask.cancel();
return null;
})
.when(sinkTask).put(anyList());
// task performs normal steps in advance of committing offsets
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 2));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
workerTask.execute();
// stop wakes up the consumer
verify(consumer).wakeup();
verify(sinkTask).close(any());
}
// Verify that when commitAsync is called but the supplied callback is not called by the consumer before a
// rebalance occurs, the async callback does not reset the last committed offset from the rebalance.
// See KAFKA-5731 for more information.
@Test
public void testCommitWithOutOfOrderCallback() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
// iter 1
Answer<ConsumerRecords<byte[], byte[]>> consumerPollRebalance = invocation -> {
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
};
// iter 2
expectTaskGetTopic();
expectConversionAndTransformation(null, new RecordHeaders());
final Map<TopicPartition, OffsetAndMetadata> workerCurrentOffsets = new HashMap<>();
workerCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
workerCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
final List<TopicPartition> originalPartitions = new ArrayList<>(INITIAL_ASSIGNMENT);
final List<TopicPartition> rebalancedPartitions = List.of(TOPIC_PARTITION, TOPIC_PARTITION2, TOPIC_PARTITION3);
final Map<TopicPartition, OffsetAndMetadata> rebalanceOffsets = new HashMap<>();
rebalanceOffsets.put(TOPIC_PARTITION, workerCurrentOffsets.get(TOPIC_PARTITION));
rebalanceOffsets.put(TOPIC_PARTITION2, workerCurrentOffsets.get(TOPIC_PARTITION2));
rebalanceOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET));
final Map<TopicPartition, OffsetAndMetadata> postRebalanceCurrentOffsets = new HashMap<>();
postRebalanceCurrentOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 3));
postRebalanceCurrentOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
postRebalanceCurrentOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET + 2));
// iter 3 - note that we return the current offset to indicate they should be committed
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(workerCurrentOffsets);
// We need to delay the result of trying to commit offsets to Kafka via the consumer.commitAsync
// method. We do this so that we can test that the callback is not called until after the rebalance
// changes the lastCommittedOffsets. To fake this for tests we have the commitAsync build a function
// that will call the callback with the appropriate parameters, and we'll run that function later.
final AtomicReference<Runnable> asyncCallbackRunner = new AtomicReference<>();
final AtomicBoolean asyncCallbackRan = new AtomicBoolean();
doAnswer(invocation -> {
final Map<TopicPartition, OffsetAndMetadata> offsets = invocation.getArgument(0);
final OffsetCommitCallback callback = invocation.getArgument(1);
asyncCallbackRunner.set(() -> {
callback.onComplete(offsets, null);
asyncCallbackRan.set(true);
});
return null;
}).when(consumer).commitAsync(eq(workerCurrentOffsets), any(OffsetCommitCallback.class));
// Expect the next poll to discover and perform the rebalance, THEN complete the previous callback handler,
// and then return one record for TP1 and one for TP3.
final AtomicBoolean rebalanced = new AtomicBoolean();
Answer<ConsumerRecords<byte[], byte[]>> consumerPollRebalanced = invocation -> {
// Rebalance always begins with revoking current partitions ...
rebalanceListener.getValue().onPartitionsRevoked(originalPartitions);
// Respond to the rebalance
Map<TopicPartition, Long> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, rebalanceOffsets.get(TOPIC_PARTITION).offset());
offsets.put(TOPIC_PARTITION2, rebalanceOffsets.get(TOPIC_PARTITION2).offset());
offsets.put(TOPIC_PARTITION3, rebalanceOffsets.get(TOPIC_PARTITION3).offset());
sinkTaskContext.getValue().offset(offsets);
rebalanceListener.getValue().onPartitionsAssigned(rebalancedPartitions);
rebalanced.set(true);
// Run the previous async commit handler
asyncCallbackRunner.get().run();
// And prep the two records to return
long timestamp = RecordBatch.NO_TIMESTAMP;
TimestampType timestampType = TimestampType.NO_TIMESTAMP_TYPE;
List<ConsumerRecord<byte[], byte[]>> records = new ArrayList<>();
records.add(new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturnedTp1 + 1, timestamp, timestampType,
0, 0, RAW_KEY, RAW_VALUE, new RecordHeaders(), Optional.empty()));
records.add(new ConsumerRecord<>(TOPIC, PARTITION3, FIRST_OFFSET + recordsReturnedTp3 + 1, timestamp, timestampType,
0, 0, RAW_KEY, RAW_VALUE, new RecordHeaders(), Optional.empty()));
recordsReturnedTp1 += 1;
recordsReturnedTp3 += 1;
final TopicPartition tp = new TopicPartition(TOPIC, PARTITION);
final OffsetAndMetadata nextOffsetAndMetadata = new OffsetAndMetadata(FIRST_OFFSET + recordsReturnedTp1 + 2, Optional.empty(), "");
return new ConsumerRecords<>(Map.of(tp, records), Map.of(tp, nextOffsetAndMetadata));
};
// onPartitionsRevoked
when(sinkTask.preCommit(workerCurrentOffsets)).thenReturn(workerCurrentOffsets);
// onPartitionsAssigned - step 1
final long offsetTp1 = rebalanceOffsets.get(TOPIC_PARTITION).offset();
final long offsetTp2 = rebalanceOffsets.get(TOPIC_PARTITION2).offset();
final long offsetTp3 = rebalanceOffsets.get(TOPIC_PARTITION3).offset();
// iter 4 - note that we return the current offset to indicate they should be committed
when(sinkTask.preCommit(postRebalanceCurrentOffsets)).thenReturn(postRebalanceCurrentOffsets);
// Setup mocks
when(consumer.assignment())
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(INITIAL_ASSIGNMENT)
.thenReturn(new HashSet<>(rebalancedPartitions))
.thenReturn(new HashSet<>(rebalancedPartitions))
.thenReturn(new HashSet<>(rebalancedPartitions))
.thenReturn(new HashSet<>(rebalancedPartitions))
.thenReturn(new HashSet<>(rebalancedPartitions));
when(consumer.position(TOPIC_PARTITION))
.thenReturn(FIRST_OFFSET)
.thenReturn(offsetTp1);
when(consumer.position(TOPIC_PARTITION2))
.thenReturn(FIRST_OFFSET)
.thenReturn(offsetTp2);
when(consumer.position(TOPIC_PARTITION3))
.thenReturn(offsetTp3);
when(consumer.poll(any(Duration.class)))
.thenAnswer(consumerPollRebalance)
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(consumerPollRebalanced)
.thenAnswer(expectConsumerPoll(1));
// Run the iterations
workerTask.iteration(); // iter 1 -- initial assignment
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
workerTask.iteration(); // iter 2 -- deliver records
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 3 -- commit in progress
assertSinkMetricValue("partition-count", 3);
assertSinkMetricValue("sink-record-read-total", 3.0);
assertSinkMetricValue("sink-record-send-total", 3.0);
assertSinkMetricValue("sink-record-active-count", 4.0);
assertSinkMetricValue("sink-record-active-count-max", 4.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.71429);
assertSinkMetricValue("offset-commit-seq-no", 2.0);
assertSinkMetricValue("offset-commit-completion-total", 1.0);
assertSinkMetricValue("offset-commit-skip-total", 1.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 2.0);
assertTaskMetricValue("batch-size-avg", 1.0);
assertTaskMetricValue("offset-commit-max-time-ms", 0.0);
assertTaskMetricValue("offset-commit-avg-time-ms", 0.0);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 1.0);
assertTrue(asyncCallbackRan.get());
assertTrue(rebalanced.get());
// Check that the offsets were not reset by the out-of-order async commit callback
assertEquals(postRebalanceCurrentOffsets, workerTask.currentOffsets());
assertEquals(rebalanceOffsets, workerTask.lastCommittedOffsets());
// onPartitionsRevoked
ArgumentCaptor<Collection<TopicPartition>> closeCaptor = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask).close(closeCaptor.capture());
Collection<TopicPartition> actualClosePartitions = closeCaptor.getValue();
assertEquals(workerCurrentOffsets.keySet(), new HashSet<>(actualClosePartitions));
verify(consumer).commitSync(anyMap());
// onPartitionsAssigned - step 2
verify(sinkTask).open(rebalancedPartitions);
// onPartitionsAssigned - step 3 rewind
verify(consumer).seek(TOPIC_PARTITION, offsetTp1);
verify(consumer).seek(TOPIC_PARTITION2, offsetTp2);
verify(consumer).seek(TOPIC_PARTITION3, offsetTp3);
time.sleep(WorkerConfig.OFFSET_COMMIT_TIMEOUT_MS_DEFAULT);
sinkTaskContext.getValue().requestCommit();
workerTask.iteration(); // iter 4 -- commit in progress
final ArgumentCaptor<OffsetCommitCallback> callback = ArgumentCaptor.forClass(OffsetCommitCallback.class);
verify(consumer).commitAsync(eq(postRebalanceCurrentOffsets), callback.capture());
callback.getValue().onComplete(postRebalanceCurrentOffsets, null);
// Check that the offsets were not reset by the out-of-order async commit callback
assertEquals(postRebalanceCurrentOffsets, workerTask.currentOffsets());
assertEquals(postRebalanceCurrentOffsets, workerTask.lastCommittedOffsets());
assertSinkMetricValue("partition-count", 3);
assertSinkMetricValue("sink-record-read-total", 4.0);
assertSinkMetricValue("sink-record-send-total", 4.0);
assertSinkMetricValue("sink-record-active-count", 0.0);
assertSinkMetricValue("sink-record-active-count-max", 4.0);
assertSinkMetricValue("sink-record-active-count-avg", 0.5555555);
assertSinkMetricValue("offset-commit-seq-no", 3.0);
assertSinkMetricValue("offset-commit-completion-total", 2.0);
assertSinkMetricValue("offset-commit-skip-total", 1.0);
assertTaskMetricValue("status", "running");
assertTaskMetricValue("running-ratio", 1.0);
assertTaskMetricValue("pause-ratio", 0.0);
assertTaskMetricValue("batch-size-max", 2.0);
assertTaskMetricValue("batch-size-avg", 1.0);
assertTaskMetricValue("offset-commit-max-time-ms", 0.0);
assertTaskMetricValue("offset-commit-avg-time-ms", 0.0);
assertTaskMetricValue("offset-commit-failure-percentage", 0.0);
assertTaskMetricValue("offset-commit-success-percentage", 1.0);
}
@Test
public void testDeliveryWithMutatingTransform() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectTaskGetTopic();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1))
.thenAnswer(expectConsumerPoll(0));
expectConversionAndTransformation("newtopic_", new RecordHeaders());
workerTask.iteration(); // initial assignment
workerTask.iteration(); // first record delivered
final Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
offsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
offsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET));
when(sinkTask.preCommit(offsets)).thenReturn(offsets);
sinkTaskContext.getValue().requestCommit();
assertTrue(sinkTaskContext.getValue().isCommitRequested());
assertNotEquals(offsets, workerTask.lastCommittedOffsets());
workerTask.iteration(); // triggers the commit
ArgumentCaptor<OffsetCommitCallback> callback = ArgumentCaptor.forClass(OffsetCommitCallback.class);
verify(consumer).commitAsync(eq(offsets), callback.capture());
callback.getValue().onComplete(offsets, null);
assertFalse(sinkTaskContext.getValue().isCommitRequested()); // should have been cleared
assertEquals(offsets, workerTask.lastCommittedOffsets());
assertEquals(0, workerTask.commitFailures());
assertEquals(1.0, metrics.currentMetricValueAsDouble(workerTask.taskMetricsGroup().metricGroup(), "batch-size-max"), 0.0001);
}
@Test
public void testMissingTimestampPropagation() {
createTask(initialState);
expectTaskGetTopic();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1, RecordBatch.NO_TIMESTAMP, TimestampType.CREATE_TIME, new RecordHeaders()));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // iter 1 -- initial assignment
workerTask.iteration(); // iter 2 -- deliver 1 record
@SuppressWarnings("unchecked")
ArgumentCaptor<Collection<SinkRecord>> records = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask, times(2)).put(records.capture());
SinkRecord record = records.getValue().iterator().next();
// we expect null for missing timestamp, the sentinel value of Record.NO_TIMESTAMP is Kafka's API
assertNull(record.timestamp());
assertEquals(TimestampType.CREATE_TIME, record.timestampType());
}
@Test
public void testTimestampPropagation() {
final Long timestamp = System.currentTimeMillis();
final TimestampType timestampType = TimestampType.CREATE_TIME;
createTask(initialState);
expectTaskGetTopic();
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1, timestamp, timestampType, new RecordHeaders()));
expectConversionAndTransformation(null, new RecordHeaders());
workerTask.iteration(); // iter 1 -- initial assignment
workerTask.iteration(); // iter 2 -- deliver 1 record
@SuppressWarnings("unchecked")
ArgumentCaptor<Collection<SinkRecord>> records = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask, times(2)).put(records.capture());
SinkRecord record = records.getValue().iterator().next();
assertEquals(timestamp, record.timestamp());
assertEquals(timestampType, record.timestampType());
}
@Test
public void testTopicsRegex() {
Map<String, String> props = new HashMap<>(TASK_PROPS);
props.remove("topics");
props.put("topics.regex", "te.*");
TaskConfig taskConfig = new TaskConfig(props);
createTask(TargetState.PAUSED);
workerTask.initialize(taskConfig);
workerTask.initializeAndStart();
ArgumentCaptor<Pattern> topicsRegex = ArgumentCaptor.forClass(Pattern.class);
verify(consumer).subscribe(topicsRegex.capture(), rebalanceListener.capture());
assertEquals("te.*", topicsRegex.getValue().pattern());
verify(sinkTask).initialize(sinkTaskContext.capture());
verify(sinkTask).start(props);
expectPollInitialAssignment();
workerTask.iteration();
time.sleep(10000L);
verify(consumer).pause(INITIAL_ASSIGNMENT);
}
@Test
public void testMetricsGroup() {
SinkTaskMetricsGroup group = new SinkTaskMetricsGroup(taskId, metrics);
SinkTaskMetricsGroup group1 = new SinkTaskMetricsGroup(taskId1, metrics);
for (int i = 0; i != 10; ++i) {
group.recordRead(1);
group.recordSend(2);
group.recordPut(3);
group.recordPartitionCount(4);
group.recordOffsetSequenceNumber(5);
}
Map<TopicPartition, OffsetAndMetadata> committedOffsets = new HashMap<>();
committedOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 1));
group.recordCommittedOffsets(committedOffsets);
Map<TopicPartition, OffsetAndMetadata> consumedOffsets = new HashMap<>();
consumedOffsets.put(TOPIC_PARTITION, new OffsetAndMetadata(FIRST_OFFSET + 10));
group.recordConsumedOffsets(consumedOffsets);
for (int i = 0; i != 20; ++i) {
group1.recordRead(1);
group1.recordSend(2);
group1.recordPut(30);
group1.recordPartitionCount(40);
group1.recordOffsetSequenceNumber(50);
}
committedOffsets = new HashMap<>();
committedOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET + 2));
committedOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET + 3));
group1.recordCommittedOffsets(committedOffsets);
consumedOffsets = new HashMap<>();
consumedOffsets.put(TOPIC_PARTITION2, new OffsetAndMetadata(FIRST_OFFSET + 20));
consumedOffsets.put(TOPIC_PARTITION3, new OffsetAndMetadata(FIRST_OFFSET + 30));
group1.recordConsumedOffsets(consumedOffsets);
assertEquals(0.333, metrics.currentMetricValueAsDouble(group.metricGroup(), "sink-record-read-rate"), 0.001d);
assertEquals(0.667, metrics.currentMetricValueAsDouble(group.metricGroup(), "sink-record-send-rate"), 0.001d);
assertEquals(9, metrics.currentMetricValueAsDouble(group.metricGroup(), "sink-record-active-count"), 0.001d);
assertEquals(4, metrics.currentMetricValueAsDouble(group.metricGroup(), "partition-count"), 0.001d);
assertEquals(5, metrics.currentMetricValueAsDouble(group.metricGroup(), "offset-commit-seq-no"), 0.001d);
assertEquals(3, metrics.currentMetricValueAsDouble(group.metricGroup(), "put-batch-max-time-ms"), 0.001d);
// Close the group
group.close();
for (MetricName metricName : group.metricGroup().metrics().metrics().keySet()) {
// Metrics for this group should no longer exist
assertFalse(group.metricGroup().groupId().includes(metricName));
}
// Sensors for this group should no longer exist
assertNull(group.metricGroup().metrics().getSensor("source-record-poll"));
assertNull(group.metricGroup().metrics().getSensor("source-record-write"));
assertNull(group.metricGroup().metrics().getSensor("poll-batch-time"));
assertEquals(0.667, metrics.currentMetricValueAsDouble(group1.metricGroup(), "sink-record-read-rate"), 0.001d);
assertEquals(1.333, metrics.currentMetricValueAsDouble(group1.metricGroup(), "sink-record-send-rate"), 0.001d);
assertEquals(45, metrics.currentMetricValueAsDouble(group1.metricGroup(), "sink-record-active-count"), 0.001d);
assertEquals(40, metrics.currentMetricValueAsDouble(group1.metricGroup(), "partition-count"), 0.001d);
assertEquals(50, metrics.currentMetricValueAsDouble(group1.metricGroup(), "offset-commit-seq-no"), 0.001d);
assertEquals(30, metrics.currentMetricValueAsDouble(group1.metricGroup(), "put-batch-max-time-ms"), 0.001d);
}
@Test
public void testHeaders() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
Headers headers = new RecordHeaders();
headers.add("header_key", "header_value".getBytes());
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1, headers));
expectConversionAndTransformation(null, headers);
workerTask.iteration(); // iter 1 -- initial assignment
workerTask.iteration(); // iter 2 -- deliver 1 record
@SuppressWarnings("unchecked")
ArgumentCaptor<Collection<SinkRecord>> recordCapture = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask, times(2)).put(recordCapture.capture());
assertEquals(1, recordCapture.getValue().size());
SinkRecord record = recordCapture.getValue().iterator().next();
assertEquals("header_value", record.headers().lastWithName("header_key").value());
}
@Test
public void testHeadersWithCustomConverter() {
StringConverter stringConverter = new StringConverter();
SampleConverterWithHeaders testConverter = new SampleConverterWithHeaders();
createTask(initialState, stringConverter, testConverter, stringConverter);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
String keyA = "a";
String valueA = "Árvíztűrő tükörfúrógép";
Headers headersA = new RecordHeaders();
String encodingA = "latin2";
headersA.add("encoding", encodingA.getBytes());
String keyB = "b";
String valueB = "Тестовое сообщение";
Headers headersB = new RecordHeaders();
String encodingB = "koi8_r";
headersB.add("encoding", encodingB.getBytes());
expectPollInitialAssignment()
.thenAnswer((Answer<ConsumerRecords<byte[], byte[]>>) invocation -> {
List<ConsumerRecord<byte[], byte[]>> records = List.of(
new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturnedTp1 + 1, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE,
0, 0, keyA.getBytes(), valueA.getBytes(encodingA), headersA, Optional.empty()),
new ConsumerRecord<>(TOPIC, PARTITION, FIRST_OFFSET + recordsReturnedTp1 + 2, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE,
0, 0, keyB.getBytes(), valueB.getBytes(encodingB), headersB, Optional.empty())
);
final OffsetAndMetadata nextOffsetAndMetadata = new OffsetAndMetadata(FIRST_OFFSET + recordsReturnedTp1 + 3, Optional.empty(), "");
final TopicPartition tp = new TopicPartition(TOPIC, PARTITION);
return new ConsumerRecords<>(Map.of(tp, records), Map.of(tp, nextOffsetAndMetadata));
});
expectTransformation(null);
workerTask.iteration(); // iter 1 -- initial assignment
workerTask.iteration(); // iter 2 -- deliver records
@SuppressWarnings("unchecked")
ArgumentCaptor<Collection<SinkRecord>> records = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask, times(2)).put(records.capture());
Iterator<SinkRecord> iterator = records.getValue().iterator();
SinkRecord recordA = iterator.next();
assertEquals(keyA, recordA.key());
assertEquals(valueA, recordA.value());
SinkRecord recordB = iterator.next();
assertEquals(keyB, recordB.key());
assertEquals(valueB, recordB.value());
}
@Test
public void testOriginalTopicWithTopicMutatingTransformations() {
createTask(initialState);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
verifyInitializeTask();
expectPollInitialAssignment()
.thenAnswer(expectConsumerPoll(1));
expectConversionAndTransformation("newtopic_", new RecordHeaders());
workerTask.iteration(); // initial assignment
workerTask.iteration(); // first record delivered
@SuppressWarnings("unchecked")
ArgumentCaptor<Collection<SinkRecord>> recordCapture = ArgumentCaptor.forClass(Collection.class);
verify(sinkTask, times(2)).put(recordCapture.capture());
assertEquals(1, recordCapture.getValue().size());
SinkRecord record = recordCapture.getValue().iterator().next();
assertEquals(TOPIC, record.originalTopic());
assertEquals("newtopic_" + TOPIC, record.topic());
}
@Test
public void testPartitionCountInCaseOfPartitionRevocation() {
MockConsumer<byte[], byte[]> mockConsumer = new MockConsumer<>(AutoOffsetResetStrategy.EARLIEST.name());
// Setting up Worker Sink Task to check metrics
createTask(taskId, sinkTask, statusListener, TargetState.PAUSED, workerConfig, metrics,
keyConverter, valueConverter, errorHandlingMetrics, headerConverter,
transformationChain, mockConsumer, pluginLoader, time,
RetryWithToleranceOperatorTest.noneOperator(), statusBackingStore, List::of);
mockConsumer.updateBeginningOffsets(
new HashMap<>() {{
put(TOPIC_PARTITION, 0L);
put(TOPIC_PARTITION2, 0L);
}}
);
workerTask.initialize(TASK_CONFIG);
workerTask.initializeAndStart();
// Initial Re-balance to assign INITIAL_ASSIGNMENT which is "TOPIC_PARTITION" and "TOPIC_PARTITION2"
mockConsumer.rebalance(INITIAL_ASSIGNMENT);
assertSinkMetricValue("partition-count", 2);
// Revoked "TOPIC_PARTITION" and second re-balance with "TOPIC_PARTITION2"
mockConsumer.rebalance(Set.of(TOPIC_PARTITION2));
assertSinkMetricValue("partition-count", 1);
// Closing the Worker Sink Task which will update the partition count as 0.
workerTask.close();
assertSinkMetricValue("partition-count", 0);
}
private void expectRebalanceRevocationError(RuntimeException e) {
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
doThrow(e).when(sinkTask).close(INITIAL_ASSIGNMENT);
}
private void expectRebalanceAssignmentError(RuntimeException e) {
when(sinkTask.preCommit(anyMap())).thenReturn(Map.of());
when(consumer.position(TOPIC_PARTITION)).thenReturn(FIRST_OFFSET);
when(consumer.position(TOPIC_PARTITION2)).thenReturn(FIRST_OFFSET);
doThrow(e).when(sinkTask).open(INITIAL_ASSIGNMENT);
}
private void verifyInitializeTask() {
verify(consumer).subscribe(eq(List.of(TOPIC)), rebalanceListener.capture());
verify(sinkTask).initialize(sinkTaskContext.capture());
verify(sinkTask).start(TASK_PROPS);
}
private OngoingStubbing<ConsumerRecords<byte[], byte[]>> expectPollInitialAssignment() {
when(consumer.assignment()).thenReturn(INITIAL_ASSIGNMENT);
INITIAL_ASSIGNMENT.forEach(tp -> when(consumer.position(tp)).thenReturn(FIRST_OFFSET));
return when(consumer.poll(any(Duration.class))).thenAnswer(
invocation -> {
rebalanceListener.getValue().onPartitionsAssigned(INITIAL_ASSIGNMENT);
return ConsumerRecords.empty();
}
);
}
private void verifyPollInitialAssignment() {
verify(sinkTask).open(INITIAL_ASSIGNMENT);
verify(consumer, atLeastOnce()).assignment();
verify(sinkTask).put(List.of());
}
private Answer<ConsumerRecords<byte[], byte[]>> expectConsumerPoll(final int numMessages) {
return expectConsumerPoll(numMessages, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE, new RecordHeaders());
}
private Answer<ConsumerRecords<byte[], byte[]>> expectConsumerPoll(final int numMessages, Headers headers) {
return expectConsumerPoll(numMessages, RecordBatch.NO_TIMESTAMP, TimestampType.NO_TIMESTAMP_TYPE, headers);
}
private Answer<ConsumerRecords<byte[], byte[]>> expectConsumerPoll(final int numMessages, final long timestamp, final TimestampType timestampType, Headers headers) {
return invocation -> {
List<ConsumerRecord<byte[], byte[]>> records = new ArrayList<>();
long offset = 0;
for (int i = 0; i < numMessages; i++) {
offset = FIRST_OFFSET + recordsReturnedTp1 + i;
records.add(new ConsumerRecord<>(TOPIC, PARTITION, offset, timestamp, timestampType,
0, 0, RAW_KEY, RAW_VALUE, headers, Optional.empty()));
}
recordsReturnedTp1 += numMessages;
final TopicPartition tp = new TopicPartition(TOPIC, PARTITION);
if (numMessages > 0) {
return new ConsumerRecords<>(Map.of(tp, records), Map.of(tp, new OffsetAndMetadata(offset + 1, Optional.empty(), "")));
}
return new ConsumerRecords<>(Map.of(), Map.of());
};
}
private void expectConversionAndTransformation(final String topicPrefix, final Headers headers) {
when(keyConverter.toConnectData(TOPIC, headers, RAW_KEY)).thenReturn(new SchemaAndValue(KEY_SCHEMA, KEY));
when(valueConverter.toConnectData(TOPIC, headers, RAW_VALUE)).thenReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE));
for (Header header : headers) {
when(headerConverter.toConnectHeader(TOPIC, header.key(), header.value())).thenReturn(new SchemaAndValue(VALUE_SCHEMA, new String(header.value())));
}
expectTransformation(topicPrefix);
}
private void expectConversion(final String topicPrefix, final Headers headers) {
when(keyConverter.toConnectData(TOPIC, headers, RAW_KEY)).thenReturn(new SchemaAndValue(KEY_SCHEMA, KEY));
when(valueConverter.toConnectData(TOPIC, headers, RAW_VALUE)).thenReturn(new SchemaAndValue(VALUE_SCHEMA, VALUE));
for (Header header : headers) {
when(headerConverter.toConnectHeader(TOPIC, header.key(), header.value())).thenReturn(new SchemaAndValue(VALUE_SCHEMA, new String(header.value())));
}
}
private void throwExceptionOnConversion(final String topicPrefix, final Headers headers) {
when(keyConverter.toConnectData(TOPIC, headers, RAW_KEY)).thenThrow(new RetriableException("Failed to convert"));
}
@SuppressWarnings("unchecked")
private void expectTransformation(final String topicPrefix) {
when(transformationChain.apply(any(ProcessingContext.class), any(SinkRecord.class))).thenAnswer((Answer<SinkRecord>)
invocation -> {
SinkRecord origRecord = invocation.getArgument(1);
return topicPrefix != null && !topicPrefix.isEmpty()
? origRecord.newRecord(
topicPrefix + origRecord.topic(),
origRecord.kafkaPartition(),
origRecord.keySchema(),
origRecord.key(),
origRecord.valueSchema(),
origRecord.value(),
origRecord.timestamp(),
origRecord.headers()
) : origRecord;
});
}
private void expectTaskGetTopic() {
when(statusBackingStore.getTopic(anyString(), anyString())).thenAnswer((Answer<TopicStatus>) invocation -> {
String connector = invocation.getArgument(0, String.class);
String topic = invocation.getArgument(1, String.class);
return new TopicStatus(topic, new ConnectorTaskId(connector, 0), Time.SYSTEM.milliseconds());
});
}
private void assertSinkMetricValue(String name, double expected) {
MetricGroup sinkTaskGroup = workerTask.sinkTaskMetricsGroup().metricGroup();
double measured = metrics.currentMetricValueAsDouble(sinkTaskGroup, name);
assertEquals(expected, measured, 0.001d);
}
private void assertTaskMetricValue(String name, double expected) {
MetricGroup taskGroup = workerTask.taskMetricsGroup().metricGroup();
double measured = metrics.currentMetricValueAsDouble(taskGroup, name);
assertEquals(expected, measured, 0.001d);
}
private void assertTaskMetricValue(String name, String expected) {
MetricGroup taskGroup = workerTask.taskMetricsGroup().metricGroup();
String measured = metrics.currentMetricValueAsString(taskGroup, name);
assertEquals(expected, measured);
}
}
| WorkerSinkTaskTest |
java | apache__camel | components/camel-aws/camel-aws2-kinesis/src/test/java/org/apache/camel/component/aws2/kinesis/KinesisClientFactoryTest.java | {
"start": 1246,
"end": 3577
} | class ____ {
@Test
void getStandardKinesisClientDefault() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
KinesisInternalClient kinesisClient = KinesisClientFactory.getKinesisClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisClientStandardImpl);
}
@Test
void getStandardKinesisClient() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
kinesis2Configuration.setUseDefaultCredentialsProvider(false);
KinesisInternalClient kinesisClient = KinesisClientFactory.getKinesisClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisClientStandardImpl);
}
@Test
void getIAMOptimizedKinesisClient() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
kinesis2Configuration.setUseDefaultCredentialsProvider(true);
KinesisInternalClient kinesisClient = KinesisClientFactory.getKinesisClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisClientIAMOptimizedImpl);
}
@Test
void getSessionTokenKinesisClient() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
kinesis2Configuration.setUseSessionCredentials(true);
KinesisInternalClient kinesisClient = KinesisClientFactory.getKinesisClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisClientSessionTokenImpl);
}
@Test
void getSessionTokenAsyncKinesisClient() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
kinesis2Configuration.setUseSessionCredentials(true);
KinesisAsyncInternalClient kinesisClient = KinesisClientFactory.getKinesisAsyncClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisAsyncClientSessionTokenImpl);
}
@Test
void getStandardKinesisAsyncClient() {
Kinesis2Configuration kinesis2Configuration = new Kinesis2Configuration();
kinesis2Configuration.setAsyncClient(true);
KinesisAsyncInternalClient kinesisClient = KinesisClientFactory.getKinesisAsyncClient(kinesis2Configuration);
assertTrue(kinesisClient instanceof KinesisAsyncClientStandardImpl);
}
}
| KinesisClientFactoryTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/AotInitializerNotFoundFailureAnalyzerTests.java | {
"start": 1315,
"end": 1634
} | class ____.springframework.boot.diagnostics.analyzer.AotInitializerNotFoundFailureAnalyzerTests__ApplicationContextInitializer could not be found");
assertThat(analysis.getAction()).isEqualTo(
"""
Consider the following:
\tDid you build the application with enabled AOT processing?
\tIs the main | org |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/entityname/DuplicateEntityNameTest.java | {
"start": 1594,
"end": 1715
} | class ____ {
@Id
public String uid;
}
@Entity(name = "Purchase")
@Table(name="purchase_new")
public static | Purchase1 |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/core/SpringSecurityCoreVersionTests.java | {
"start": 1714,
"end": 5750
} | class ____ {
@Mock
private Log logger;
@Mock(answer = Answers.CALLS_REAL_METHODS)
private MockedStatic<SpringVersion> springVersion;
@Mock(answer = Answers.CALLS_REAL_METHODS)
private MockedStatic<SpringSecurityCoreVersion> springSecurityCoreVersion;
@BeforeEach
public void setup() throws Exception {
Field logger = ReflectionUtils.findField(SpringSecurityCoreVersion.class, "logger");
StaticFinalReflectionUtils.setField(logger, this.logger);
}
@AfterEach
public void cleanup() throws Exception {
System.clearProperty(getDisableChecksProperty());
Field logger = ReflectionUtils.findField(SpringSecurityCoreVersion.class, "logger");
StaticFinalReflectionUtils.setField(logger, LogFactory.getLog(SpringSecurityCoreVersion.class));
}
@Test
public void springVersionIsUpToDate() {
// Property is set by the build script
String springVersion = System.getProperty("springVersion");
assertThat(SpringSecurityCoreVersion.MIN_SPRING_VERSION).isEqualTo(springVersion);
}
@Test
@Disabled("Since 6.3. See gh-3737")
public void serialVersionMajorAndMinorVersionMatchBuildVersion() {
String version = System.getProperty("springSecurityVersion");
// Strip patch version
String serialVersion = String.valueOf(SpringSecurityCoreVersion.SERIAL_VERSION_UID).substring(0, 2);
assertThat(serialVersion.charAt(0)).isEqualTo(version.charAt(0));
assertThat(serialVersion.charAt(1)).isEqualTo(version.charAt(2));
}
// SEC-2295
@Test
public void noLoggingIfVersionsAreEqual() throws Exception {
String version = "1";
expectSpringSecurityVersionThenReturn(version);
expectSpringVersionThenReturn(version);
performChecks();
verifyNoMoreInteractions(this.logger);
}
@Test
public void noLoggingIfSpringVersionNull() throws Exception {
String version = "1";
expectSpringSecurityVersionThenReturn(version);
expectSpringVersionThenReturn(null);
performChecks();
verifyNoMoreInteractions(this.logger);
}
@Test
public void warnIfSpringVersionTooSmall() throws Exception {
expectSpringSecurityVersionThenReturn("3");
expectSpringVersionThenReturn("2");
performChecks();
verify(this.logger, times(1)).warn(any());
}
@Test
public void noWarnIfSpringVersionLarger() throws Exception {
String version = "4.0.0.RELEASE";
expectSpringSecurityVersionThenReturn(version);
expectSpringVersionThenReturn(version);
performChecks();
verify(this.logger, never()).warn(any());
}
// SEC-2697
@Test
public void noWarnIfSpringPatchVersionDoubleDigits() throws Exception {
String minSpringVersion = "3.2.8.RELEASE";
expectSpringSecurityVersionThenReturn("3.2.0.RELEASE");
expectSpringVersionThenReturn("3.2.10.RELEASE");
performChecks(minSpringVersion);
verify(this.logger, never()).warn(any());
}
@Test
public void noLoggingIfPropertySet() throws Exception {
expectSpringSecurityVersionThenReturn("3");
expectSpringVersionThenReturn("2");
System.setProperty(getDisableChecksProperty(), Boolean.TRUE.toString());
performChecks();
verifyNoMoreInteractions(this.logger);
}
private String getDisableChecksProperty() {
return SpringSecurityCoreVersion.class.getName().concat(".DISABLE_CHECKS");
}
private void performChecks() {
Method method = ReflectionUtils.findMethod(SpringSecurityCoreVersion.class, "performVersionChecks");
ReflectionUtils.makeAccessible(method);
ReflectionUtils.invokeMethod(method, null);
}
private void performChecks(String minSpringVersion) {
Method method = ReflectionUtils.findMethod(SpringSecurityCoreVersion.class, "performVersionChecks",
String.class);
ReflectionUtils.makeAccessible(method);
ReflectionUtils.invokeMethod(method, null, minSpringVersion);
}
private void expectSpringSecurityVersionThenReturn(String version) {
this.springSecurityCoreVersion.when(SpringSecurityCoreVersion::getVersion).thenReturn(version);
}
private void expectSpringVersionThenReturn(String version) {
this.springVersion.when(SpringVersion::getVersion).thenReturn(version);
}
}
| SpringSecurityCoreVersionTests |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/initializers/annotation/FooConfig.java | {
"start": 893,
"end": 957
} | class ____ {
@Bean
String foo() {
return "foo";
}
}
| FooConfig |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/failover/partitionrelease/ConsumerRegionGroupExecutionViewMaintainerTest.java | {
"start": 1341,
"end": 4619
} | class ____ {
private TestingSchedulingPipelinedRegion producerRegion;
private TestingSchedulingPipelinedRegion consumerRegion;
private ConsumerRegionGroupExecutionView consumerRegionGroupExecutionView;
private ConsumerRegionGroupExecutionViewMaintainer consumerRegionGroupExecutionViewMaintainer;
@BeforeEach
void setup() {
createProducerAndConsumer();
createConsumerRegionGroupExecutionViewMaintainer();
}
@Test
void testRegionFinished() throws Exception {
consumerRegionGroupExecutionViewMaintainer.regionFinished(consumerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isTrue();
}
@Test
void testRegionUnfinished() throws Exception {
consumerRegionGroupExecutionViewMaintainer.regionFinished(consumerRegion);
consumerRegionGroupExecutionViewMaintainer.regionUnfinished(consumerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isFalse();
}
@Test
void testRegionFinishedMultipleTimes() throws Exception {
consumerRegionGroupExecutionViewMaintainer.regionFinished(consumerRegion);
consumerRegionGroupExecutionViewMaintainer.regionFinished(consumerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isTrue();
}
@Test
void testRegionUnfinishedMultipleTimes() throws Exception {
consumerRegionGroupExecutionViewMaintainer.regionUnfinished(consumerRegion);
consumerRegionGroupExecutionViewMaintainer.regionUnfinished(consumerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isFalse();
consumerRegionGroupExecutionViewMaintainer.regionFinished(consumerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isTrue();
}
@Test
void testFinishWrongRegion() {
consumerRegionGroupExecutionViewMaintainer.regionFinished(producerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isFalse();
}
@Test
void testUnfinishedWrongRegion() {
consumerRegionGroupExecutionViewMaintainer.regionUnfinished(producerRegion);
assertThat(consumerRegionGroupExecutionView.isFinished()).isFalse();
}
private void createProducerAndConsumer() {
TestingSchedulingExecutionVertex producer =
TestingSchedulingExecutionVertex.newBuilder().build();
TestingSchedulingExecutionVertex consumer =
TestingSchedulingExecutionVertex.newBuilder().build();
producerRegion = new TestingSchedulingPipelinedRegion(Collections.singleton(producer));
consumerRegion = new TestingSchedulingPipelinedRegion(Collections.singleton(consumer));
}
private void createConsumerRegionGroupExecutionViewMaintainer() {
consumerRegionGroupExecutionView = new ConsumerRegionGroupExecutionView();
consumerRegionGroupExecutionView.add(consumerRegion);
consumerRegionGroupExecutionViewMaintainer =
new ConsumerRegionGroupExecutionViewMaintainer();
consumerRegionGroupExecutionViewMaintainer.notifyNewRegionGroupExecutionViews(
Collections.singletonList(consumerRegionGroupExecutionView));
}
}
| ConsumerRegionGroupExecutionViewMaintainerTest |
java | quarkusio__quarkus | integration-tests/kafka-snappy/src/main/java/io/quarkus/it/kafka/codecs/PetCodec.java | {
"start": 226,
"end": 937
} | class ____ implements Serializer<Pet>, Deserializer<Pet> {
@Override
public Pet deserialize(String topic, byte[] bytes) {
String value = new String(bytes, StandardCharsets.UTF_8);
String[] segments = value.split("_");
Pet pet = new Pet();
pet.setKind(segments[0]);
pet.setName(segments[1]);
return pet;
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
// no config
}
@Override
public byte[] serialize(String topic, Pet pet) {
return (pet.getKind() + "_" + pet.getName()).getBytes(StandardCharsets.UTF_8);
}
@Override
public void close() {
// do nothing.
}
}
| PetCodec |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/Otel2ConfigurationProperties.java | {
"start": 3518,
"end": 4910
} | class ____. Must not be null.
*/
public Otel2ConfigurationProperties withInstrumentationName(String instrumentationName) {
this.instrumentationName = instrumentationName;
return this;
}
/**
* To enable OpenTelemetry
*/
public Otel2ConfigurationProperties withEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Sets whether the header keys need to be encoded (connector specific) or not. The value is a boolean. Dashes need
* for instances to be encoded for JMS property keys.
*/
public Otel2ConfigurationProperties withEncoding(boolean encoding) {
this.encoding = encoding;
return this;
}
/**
* Adds an exclude pattern that will disable tracing for Camel messages that matches the pattern. Multiple patterns
* can be separated by comma.
*/
public Otel2ConfigurationProperties withExcludePatterns(String excludePatterns) {
this.excludePatterns = excludePatterns;
return this;
}
/**
* Setting this to true will create new OpenTelemetry Spans for each Camel Processors. Use the excludePattern
* property to filter out Processors.
*/
public Otel2ConfigurationProperties withTraceProcessors(boolean traceProcessors) {
this.traceProcessors = traceProcessors;
return this;
}
}
| name |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/Limit.java | {
"start": 491,
"end": 1552
} | class ____ extends UnaryPlan {
private final Expression limit;
public Limit(Source source, Expression limit, LogicalPlan child) {
super(source, child);
this.limit = limit;
}
@Override
protected NodeInfo<Limit> info() {
return NodeInfo.create(this, Limit::new, limit, child());
}
@Override
public Limit replaceChild(LogicalPlan newChild) {
return new Limit(source(), limit, newChild);
}
public Expression limit() {
return limit;
}
@Override
public boolean expressionsResolved() {
return limit.resolved();
}
@Override
public int hashCode() {
return Objects.hash(limit, child());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
Limit other = (Limit) obj;
return Objects.equals(limit, other.limit) && Objects.equals(child(), other.child());
}
}
| Limit |
java | elastic__elasticsearch | modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java | {
"start": 3450,
"end": 12530
} | class ____ extends AzureRepositoryPlugin {
public TestAzureRepositoryPlugin(Settings settings) {
super(settings);
}
@Override
AzureStorageService createAzureStorageService(
Settings settings,
AzureClientProvider azureClientProvider,
ClusterService clusterService,
ProjectResolver projectResolver
) {
final long blockSize = ByteSizeValue.ofKb(64L).getBytes() * randomIntBetween(1, 15);
return new AzureStorageService(settings, azureClientProvider, clusterService, projectResolver) {
@Override
long getUploadBlockSize() {
return blockSize;
}
};
}
}
@ClassRule
public static AzureHttpFixture fixture = new AzureHttpFixture(
USE_FIXTURE ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.NONE,
AZURE_ACCOUNT,
System.getProperty("test.azure.container"),
System.getProperty("test.azure.tenant_id"),
System.getProperty("test.azure.client_id"),
AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_ACCOUNT),
MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE
);
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(TestAzureRepositoryPlugin.class);
}
@Override
protected Settings nodeSettings() {
if (USE_FIXTURE) {
final String endpoint = "ignored;DefaultEndpointsProtocol=http;BlobEndpoint=" + fixture.getAddress();
return Settings.builder().put(super.nodeSettings()).put("azure.client.default.endpoint_suffix", endpoint).build();
}
return super.nodeSettings();
}
@Override
protected SecureSettings credentials() {
assertThat(System.getProperty("test.azure.account"), not(blankOrNullString()));
final boolean hasSasToken = Strings.hasText(System.getProperty("test.azure.sas_token"));
if (hasSasToken == false) {
assertThat(System.getProperty("test.azure.key"), not(blankOrNullString()));
} else {
assertThat(System.getProperty("test.azure.key"), blankOrNullString());
}
assertThat(System.getProperty("test.azure.container"), not(blankOrNullString()));
assertThat(System.getProperty("test.azure.base"), not(blankOrNullString()));
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.default.account", System.getProperty("test.azure.account"));
if (hasSasToken) {
logger.info("--> Using SAS token authentication");
secureSettings.setString("azure.client.default.sas_token", System.getProperty("test.azure.sas_token"));
} else {
logger.info("--> Using key authentication");
secureSettings.setString("azure.client.default.key", System.getProperty("test.azure.key"));
}
return secureSettings;
}
@Override
protected void createRepository(String repoName) {
AcknowledgedResponse putRepositoryResponse = clusterAdmin().preparePutRepository(
TEST_REQUEST_TIMEOUT,
TEST_REQUEST_TIMEOUT,
repoName
)
.setType("azure")
.setSettings(
Settings.builder()
.put("container", System.getProperty("test.azure.container"))
.put("base_path", System.getProperty("test.azure.base") + randomAlphaOfLength(8))
.put("max_single_part_upload_size", ByteSizeValue.of(1, ByteSizeUnit.MB))
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
if (Strings.hasText(System.getProperty("test.azure.sas_token"))) {
ensureSasTokenPermissions();
}
}
private void ensureSasTokenPermissions() {
final BlobStoreRepository repository = getRepository();
final PlainActionFuture<Void> future = new PlainActionFuture<>();
repository.threadPool().generic().execute(ActionRunnable.wrap(future, l -> {
final AzureBlobStore blobStore = (AzureBlobStore) repository.blobStore();
final AzureBlobServiceClient azureBlobServiceClient = blobStore.getService()
.client(ProjectId.DEFAULT, "default", LocationMode.PRIMARY_ONLY, randomFrom(OperationPurpose.values()));
final BlobServiceClient client = azureBlobServiceClient.getSyncClient();
try {
final BlobContainerClient blobContainer = client.getBlobContainerClient(blobStore.toString());
blobContainer.exists();
future.onFailure(
new RuntimeException(
"The SAS token used in this test allowed for checking container existence. This test only supports tokens "
+ "that grant only the documented permission requirements for the Azure repository plugin."
)
);
} catch (BlobStorageException e) {
if (e.getStatusCode() == HttpURLConnection.HTTP_FORBIDDEN) {
future.onResponse(null);
} else {
future.onFailure(e);
}
}
}));
future.actionGet();
}
public void testMultiBlockUpload() throws Exception {
final BlobStoreRepository repo = getRepository();
assertThat(
asInstanceOf(AzureBlobStore.class, repo.blobStore()).getLargeBlobThresholdInBytes(),
equalTo(ByteSizeUnit.MB.toBytes(1L))
);
assertThat(asInstanceOf(AzureBlobStore.class, repo.blobStore()).getUploadBlockSize(), lessThan(ByteSizeUnit.MB.toBytes(1L)));
// The configured threshold for this test suite is 1mb
final long blobSize = randomLongBetween(ByteSizeUnit.MB.toBytes(2), ByteSizeUnit.MB.toBytes(4));
final int bufferSize = 8192;
final var file = createTempFile();
final long expectedChecksum;
try (var output = new CheckedOutputStream(new BufferedOutputStream(Files.newOutputStream(file)), new CRC32())) {
long remaining = blobSize;
while (remaining > 0L) {
final var buffer = randomByteArrayOfLength(Math.toIntExact(Math.min(bufferSize, remaining)));
output.write(buffer);
remaining -= buffer.length;
}
output.flush();
expectedChecksum = output.getChecksum().getValue();
}
PlainActionFuture<Void> future = new PlainActionFuture<>();
repo.threadPool().generic().execute(ActionRunnable.run(future, () -> {
final BlobContainer blobContainer = repo.blobStore().blobContainer(repo.basePath().add("large_write"));
try {
final var blobName = UUIDs.base64UUID();
if (randomBoolean()) {
try (var input = new BufferedInputStream(Files.newInputStream(file))) {
blobContainer.writeBlob(randomPurpose(), blobName, input, blobSize, false);
}
} else {
assertThat(blobContainer.supportsConcurrentMultipartUploads(), equalTo(true));
blobContainer.writeBlobAtomic(randomPurpose(), blobName, blobSize, (offset, length) -> {
var channel = Files.newByteChannel(file);
if (offset > 0L) {
if (channel.size() <= offset) {
throw new AssertionError();
}
channel.position(offset);
}
assert channel.position() == offset;
return new BufferedInputStream(limitStream(Channels.newInputStream(channel), length));
}, false);
}
long bytesCount = 0L;
try (var input = new CheckedInputStream(blobContainer.readBlob(OperationPurpose.INDICES, blobName), new CRC32())) {
var buffer = new byte[bufferSize];
int bytesRead;
while ((bytesRead = input.read(buffer)) != -1) {
bytesCount += bytesRead;
}
assertThat(bytesCount, equalTo(blobSize));
assertThat(input.getChecksum().getValue(), equalTo(expectedChecksum));
}
} finally {
blobContainer.delete(randomPurpose());
}
}));
future.get();
}
public void testReadFromPositionLargerThanBlobLength() {
testReadFromPositionLargerThanBlobLength(
e -> asInstanceOf(BlobStorageException.class, ExceptionsHelper.unwrap(e, HttpResponseException.class))
.getStatusCode() == RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()
);
}
}
| TestAzureRepositoryPlugin |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson/JsonNodeUtils.java | {
"start": 983,
"end": 2054
} | class ____ {
static final TypeReference<Set<String>> STRING_SET = new TypeReference<>() {
};
static final TypeReference<Map<String, Object>> STRING_OBJECT_MAP = new TypeReference<>() {
};
static String findStringValue(JsonNode jsonNode, String fieldName) {
if (jsonNode == null) {
return null;
}
JsonNode value = jsonNode.findValue(fieldName);
return (value != null && value.isString()) ? value.stringValue() : null;
}
static <T> T findValue(JsonNode jsonNode, String fieldName, TypeReference<T> valueTypeReference,
DeserializationContext context) {
if (jsonNode == null) {
return null;
}
JsonNode value = jsonNode.findValue(fieldName);
return (value != null && value.isContainer())
? context.readTreeAsValue(value, context.getTypeFactory().constructType(valueTypeReference)) : null;
}
static JsonNode findObjectNode(JsonNode jsonNode, String fieldName) {
if (jsonNode == null) {
return null;
}
JsonNode value = jsonNode.findValue(fieldName);
return (value != null && value.isObject()) ? value : null;
}
}
| JsonNodeUtils |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/forged/ErroneousNonMappableMapSource.java | {
"start": 228,
"end": 515
} | class ____ {
private Map<Foo, Foo> nonMappableMap;
public Map<Foo, Foo> getNonMappableMap() {
return nonMappableMap;
}
public void setNonMappableMap(Map<Foo, Foo> nonMappableMap) {
this.nonMappableMap = nonMappableMap;
}
}
| ErroneousNonMappableMapSource |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java | {
"start": 530,
"end": 821
} | class ____ extends ActionType<DeleteWatchResponse> {
public static final DeleteWatchAction INSTANCE = new DeleteWatchAction();
public static final String NAME = "cluster:admin/xpack/watcher/watch/delete";
private DeleteWatchAction() {
super(NAME);
}
}
| DeleteWatchAction |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CannotMockFinalClassTest.java | {
"start": 1576,
"end": 2301
} | class ____ {}
// BUG: Diagnostic contains: Mockito cannot mock
@Mock FinalClass impossible;
public void method() {
// BUG: Diagnostic contains: Mockito cannot mock
FinalClass local = Mockito.mock(FinalClass.class);
}
}\
""")
.doTest();
}
@Test
public void positiveCase_record() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.Mockito;
@RunWith(JUnit4.class)
public | FinalClass |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/async/AsyncEndpointCustomRoutePolicyTest.java | {
"start": 1417,
"end": 1670
} | class ____ extends ContextTestSupport {
private static String beforeThreadName;
private static String afterThreadName;
private final MyCustomRoutePolicy policy = new MyCustomRoutePolicy();
private static | AsyncEndpointCustomRoutePolicyTest |
java | micronaut-projects__micronaut-core | aop/src/main/java/io/micronaut/aop/Introduction.java | {
"start": 1790,
"end": 1965
} | interface ____ {
/**
* Additional interfaces that the introduction advice should implement. Note that if introduction advise is applied
* to a concrete | Introduction |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/AsyncRunnable.java | {
"start": 667,
"end": 755
} | interface ____ extends Runnable {
void run(Runnable completionCallback);
}
| AsyncRunnable |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleObserveOn.java | {
"start": 884,
"end": 1339
} | class ____<T> extends Single<T> {
final SingleSource<T> source;
final Scheduler scheduler;
public SingleObserveOn(SingleSource<T> source, Scheduler scheduler) {
this.source = source;
this.scheduler = scheduler;
}
@Override
protected void subscribeActual(final SingleObserver<? super T> observer) {
source.subscribe(new ObserveOnSingleObserver<>(observer, scheduler));
}
static final | SingleObserveOn |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java | {
"start": 86321,
"end": 89778
} | class ____ extends ParserRuleContext {
public Token qualifier;
public FieldNamePatternContext name;
public List<TerminalNode> OPENING_BRACKET() { return getTokens(EsqlBaseParser.OPENING_BRACKET); }
public TerminalNode OPENING_BRACKET(int i) {
return getToken(EsqlBaseParser.OPENING_BRACKET, i);
}
public List<TerminalNode> CLOSING_BRACKET() { return getTokens(EsqlBaseParser.CLOSING_BRACKET); }
public TerminalNode CLOSING_BRACKET(int i) {
return getToken(EsqlBaseParser.CLOSING_BRACKET, i);
}
public TerminalNode DOT() { return getToken(EsqlBaseParser.DOT, 0); }
public FieldNamePatternContext fieldNamePattern() {
return getRuleContext(FieldNamePatternContext.class,0);
}
public TerminalNode ID_PATTERN() { return getToken(EsqlBaseParser.ID_PATTERN, 0); }
@SuppressWarnings("this-escape")
public QualifiedNamePatternContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_qualifiedNamePattern; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterQualifiedNamePattern(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitQualifiedNamePattern(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitQualifiedNamePattern(this);
else return visitor.visitChildren(this);
}
}
public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException {
QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState());
enterRule(_localctx, 58, RULE_qualifiedNamePattern);
int _la;
try {
setState(410);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(398);
if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()");
setState(399);
match(OPENING_BRACKET);
setState(401);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==ID_PATTERN) {
{
setState(400);
((QualifiedNamePatternContext)_localctx).qualifier = match(ID_PATTERN);
}
}
setState(403);
match(CLOSING_BRACKET);
setState(404);
match(DOT);
setState(405);
match(OPENING_BRACKET);
setState(406);
((QualifiedNamePatternContext)_localctx).name = fieldNamePattern();
setState(407);
match(CLOSING_BRACKET);
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(409);
((QualifiedNamePatternContext)_localctx).name = fieldNamePattern();
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static | QualifiedNamePatternContext |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/AnnotationSupport.java | {
"start": 6426,
"end": 6584
} | class ____ hierarchy).
*
* <p>If the annotation still has not been found, this method will optionally
* search recursively through the enclosing | inheritance |
java | quarkusio__quarkus | extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/BuildTimeContentProcessor.java | {
"start": 3688,
"end": 71798
} | class ____ {
private static final Logger log = Logger.getLogger(BuildTimeContentProcessor.class);
private static final String UNDERSCORE = "_";
private static final String SLASH = "/";
private static final String BUILD_TIME_PATH = "dev-ui-templates/build-time";
private static final String ES_MODULE_SHIMS = "es-module-shims";
private static final String FLAG_ICONS = "flag-icons";
final Config config = ConfigProvider.getConfig();
/**
* Here we create references to internal dev ui files so that they can be imported by ref.
* This will be merged into the final importmap
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void createKnownInternalImportMap(BuildProducer<InternalImportMapBuildItem> internalImportMapProducer,
BuildProducer<DevContextBuildItem> devContextProducer,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
DevUIConfig config) {
String devUIContext = config.contextRoot().orElse("");
if (!devUIContext.isBlank())
devContextProducer.produce(new DevContextBuildItem(devUIContext));
String contextRoot = devUIContext + nonApplicationRootPathBuildItem.getNonApplicationRootPath()
+ Constants.DEV_UI + SLASH;
InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();
internalImportMapBuildItem.add("devui/", contextRoot);
// Quarkus Web Components
internalImportMapBuildItem.add("qwc/", contextRoot + "qwc/");
internalImportMapBuildItem.add("qwc-no-data", contextRoot + "qwc/qwc-no-data.js");
internalImportMapBuildItem.add("qwc-hot-reload-element", contextRoot + "qwc/qwc-hot-reload-element.js");
internalImportMapBuildItem.add("qwc-abstract-log-element", contextRoot + "qwc/qwc-abstract-log-element.js");
internalImportMapBuildItem.add("qwc-server-log", contextRoot + "qwc/qwc-server-log.js");
internalImportMapBuildItem.add("qwc-footer-log", contextRoot + "qwc/qwc-footer-log.js");
internalImportMapBuildItem.add("qwc-extension-link", contextRoot + "qwc/qwc-extension-link.js");
// Quarkus UI
internalImportMapBuildItem.add("qui-ide-link", contextRoot + "qui/qui-ide-link.js");
internalImportMapBuildItem.add("qui-themed-code-block", contextRoot + "qui/qui-themed-code-block.js");
internalImportMapBuildItem.add("qui-assistant-warning", contextRoot + "qui/qui-assistant-warning.js");
internalImportMapBuildItem.add("qui-assistant-button", contextRoot + "qui/qui-assistant-button.js");
// Echarts
internalImportMapBuildItem.add("echarts/", contextRoot + "echarts/");
internalImportMapBuildItem.add("echarts-gauge-grade", contextRoot + "echarts/echarts-gauge-grade.js");
internalImportMapBuildItem.add("echarts-pie", contextRoot + "echarts/echarts-pie.js");
internalImportMapBuildItem.add("echarts-horizontal-stacked-bar",
contextRoot + "echarts/echarts-horizontal-stacked-bar.js");
internalImportMapBuildItem.add("echarts-force-graph",
contextRoot + "echarts/echarts-force-graph.js");
internalImportMapBuildItem.add("echarts-bar-stack",
contextRoot + "echarts/echarts-bar-stack.js");
// Other assets
internalImportMapBuildItem.add("icon/", contextRoot + "icon/");
// Controllers
internalImportMapBuildItem.add("controller/", contextRoot + "controller/");
internalImportMapBuildItem.add("log-controller", contextRoot + "controller/log-controller.js");
internalImportMapBuildItem.add("storage-controller", contextRoot + "controller/storage-controller.js");
internalImportMapBuildItem.add("router-controller", contextRoot + "controller/router-controller.js");
internalImportMapBuildItem.add("notifier", contextRoot + "controller/notifier.js");
internalImportMapBuildItem.add("jsonrpc", contextRoot + "controller/jsonrpc.js");
// State
internalImportMapBuildItem.add("state/", contextRoot + "state/");
internalImportMapBuildItem.add("theme-state", contextRoot + "state/theme-state.js");
internalImportMapBuildItem.add("connection-state", contextRoot + "state/connection-state.js");
internalImportMapBuildItem.add("assistant-state", contextRoot + "state/assistant-state.js");
internalImportMapBuildItem.add("devui-state", contextRoot + "state/devui-state.js");
// i18n
internalImportMapBuildItem.add("i18n/", contextRoot + "i18n/");
internalImportMapBuildItem.add("localization", contextRoot + "i18n/localization.js");
internalImportMapProducer.produce(internalImportMapBuildItem);
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
RelocationImportMapBuildItem createRelocationMap() {
RelocationImportMapBuildItem relocationImportMapBuildItem = new RelocationImportMapBuildItem();
// Backward compatibility mappings
relocationImportMapBuildItem.add("@quarkus-webcomponents/codeblock/", "@qomponent/qui-code-block/");
relocationImportMapBuildItem.add("@quarkus-webcomponents/codeblock", "@qomponent/qui-code-block");
relocationImportMapBuildItem.add("qui-badge", "@qomponent/qui-badge");
relocationImportMapBuildItem.add("qui/qui-badge.js", "@qomponent/qui-badge");
relocationImportMapBuildItem.add("qui-alert", "@qomponent/qui-alert");
relocationImportMapBuildItem.add("qui/qui-alert.js", "@qomponent/qui-alert");
relocationImportMapBuildItem.add("qui-card", "@qomponent/qui-card");
relocationImportMapBuildItem.add("qui/qui-card.js", "@qomponent/qui-card");
return relocationImportMapBuildItem;
}
/**
* Here we map all the pages (as defined by the extensions) build time data
*
* @param pageBuildItems
* @param buildTimeConstProducer
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void mapPageBuildTimeData(List<CardPageBuildItem> cards,
List<MenuPageBuildItem> menus,
List<FooterPageBuildItem> footers,
List<SettingPageBuildItem> settings,
List<UnlistedPageBuildItem> unlisteds,
CurateOutcomeBuildItem curateOutcomeBuildItem,
BuildProducer<BuildTimeConstBuildItem> buildTimeConstProducer) {
for (CardPageBuildItem card : cards) {
String extensionPathName = card.getExtensionPathName(curateOutcomeBuildItem);
Map<String, BuildTimeData> buildTimeData = getBuildTimeDataForCard(curateOutcomeBuildItem, card);
if (!buildTimeData.isEmpty()) {
buildTimeConstProducer.produce(
new BuildTimeConstBuildItem(extensionPathName, buildTimeData));
}
}
for (MenuPageBuildItem menu : menus) {
String extensionPathName = menu.getExtensionPathName(curateOutcomeBuildItem);
Map<String, BuildTimeData> buildTimeData = getBuildTimeDataForPage(menu);
if (!buildTimeData.isEmpty()) {
buildTimeConstProducer.produce(
new BuildTimeConstBuildItem(extensionPathName, buildTimeData));
}
}
for (FooterPageBuildItem footer : footers) {
String extensionPathName = footer.getExtensionPathName(curateOutcomeBuildItem);
Map<String, BuildTimeData> buildTimeData = getBuildTimeDataForPage(footer);
if (!buildTimeData.isEmpty()) {
buildTimeConstProducer.produce(
new BuildTimeConstBuildItem(extensionPathName, buildTimeData));
}
}
for (SettingPageBuildItem setting : settings) {
String extensionPathName = setting.getExtensionPathName(curateOutcomeBuildItem);
Map<String, BuildTimeData> buildTimeData = getBuildTimeDataForPage(setting);
if (!buildTimeData.isEmpty()) {
buildTimeConstProducer.produce(
new BuildTimeConstBuildItem(extensionPathName, buildTimeData));
}
}
for (UnlistedPageBuildItem unlisted : unlisteds) {
String extensionPathName = unlisted.getExtensionPathName(curateOutcomeBuildItem);
Map<String, BuildTimeData> buildTimeData = getBuildTimeDataForPage(unlisted);
if (!buildTimeData.isEmpty()) {
buildTimeConstProducer.produce(
new BuildTimeConstBuildItem(extensionPathName, buildTimeData));
}
}
}
@BuildStep(onlyIf = IsLocalDevelopment.class)
DeploymentMethodBuildItem mapDeploymentMethods(
List<BuildTimeActionBuildItem> buildTimeActions,
CurateOutcomeBuildItem curateOutcomeBuildItem,
Capabilities capabilities) {
final boolean assistantIsAvailable = capabilities.isPresent(Capability.ASSISTANT);
Map<String, DeploymentJsonRpcMethod> methods = new HashMap<>();
Map<String, DeploymentJsonRpcMethod> subscriptions = new HashMap<>();
Map<String, RecordedJsonRpcMethod> recordedMethods = new HashMap<>();
Map<String, RecordedJsonRpcMethod> recordedSubscriptions = new HashMap<>();
for (BuildTimeActionBuildItem actions : buildTimeActions) {
String extensionPathName = actions.getExtensionPathName(curateOutcomeBuildItem);
// Build time methods
for (DeploymentJsonRpcMethod deploymentJsonRpcMethod : actions.getDeploymentActions()) {
String fullName = extensionPathName + UNDERSCORE + deploymentJsonRpcMethod.getMethodName();
if (deploymentJsonRpcMethod.hasAction()) {
DevConsoleManager.register(fullName, deploymentJsonRpcMethod.getAction());
} else if (deploymentJsonRpcMethod.hasAssistantAction()) {
DevConsoleManager.register(fullName, deploymentJsonRpcMethod.getAssistantAction());
}
deploymentJsonRpcMethod.setMethodName(fullName);
methods.put(fullName, deploymentJsonRpcMethod);
}
// Build time recorded values
for (RecordedJsonRpcMethod recordedJsonRpcMethod : actions.getRecordedActions()) {
String fullName = extensionPathName + UNDERSCORE + recordedJsonRpcMethod.getMethodName();
recordedJsonRpcMethod.setMethodName(fullName);
recordedMethods.put(fullName, recordedJsonRpcMethod);
}
// Build time subscriptions
for (DeploymentJsonRpcMethod deploymentJsonRpcSubscription : actions.getDeploymentSubscriptions()) {
String fullName = extensionPathName + UNDERSCORE + deploymentJsonRpcSubscription.getMethodName();
if (deploymentJsonRpcSubscription.hasAction()) {
DevConsoleManager.register(fullName, deploymentJsonRpcSubscription.getAction());
} else if (deploymentJsonRpcSubscription.hasAssistantAction()) {
DevConsoleManager.register(fullName, deploymentJsonRpcSubscription.getAssistantAction());
}
deploymentJsonRpcSubscription.setMethodName(fullName);
subscriptions.put(fullName, deploymentJsonRpcSubscription);
}
// Build time recorded subscription
for (RecordedJsonRpcMethod recordedJsonRpcSubscription : actions.getRecordedSubscriptions()) {
String fullName = extensionPathName + UNDERSCORE + recordedJsonRpcSubscription.getMethodName();
recordedJsonRpcSubscription.setMethodName(fullName);
recordedSubscriptions.put(fullName, recordedJsonRpcSubscription);
}
}
return new DeploymentMethodBuildItem(methods, subscriptions, recordedMethods, recordedSubscriptions);
}
private Map<String, BuildTimeData> getBuildTimeDataForPage(AbstractPageBuildItem pageBuildItem) {
Map<String, BuildTimeData> m = new HashMap<>();
if (pageBuildItem.hasBuildTimeData()) {
m.putAll(pageBuildItem.getBuildTimeData());
}
return m;
}
private Map<String, BuildTimeData> getBuildTimeDataForCard(CurateOutcomeBuildItem curateOutcomeBuildItem,
CardPageBuildItem pageBuildItem) {
Map<String, BuildTimeData> m = getBuildTimeDataForPage(pageBuildItem);
if (pageBuildItem.getOptionalCard().isPresent()) {
// Make the pages available for the custom card
List<Page> pages = new ArrayList<>();
List<PageBuilder> pageBuilders = pageBuildItem.getPages();
for (PageBuilder pageBuilder : pageBuilders) {
String path = pageBuildItem.getExtensionPathName(curateOutcomeBuildItem);
pageBuilder.namespace(path);
pageBuilder.extension(path);
pages.add(pageBuilder.build());
}
m.put("pages", new BuildTimeData(pages));
}
return m;
}
/**
* Here we find all build time data and make then available via a const
*
* js components can import the const with "import {constName} from '{ext}-data';"
*
* @param pageBuildItems
* @param quteTemplateProducer
* @param internalImportMapProducer
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void createBuildTimeConstJsTemplate(DevUIConfig config,
CurateOutcomeBuildItem curateOutcomeBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
List<BuildTimeConstBuildItem> buildTimeConstBuildItems,
BuildProducer<QuteTemplateBuildItem> quteTemplateProducer,
BuildProducer<InternalImportMapBuildItem> internalImportMapProducer) {
String contextRoot = config.contextRoot().orElse("") + nonApplicationRootPathBuildItem.getNonApplicationRootPath()
+ Constants.DEV_UI + SLASH;
QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(
QuteTemplateBuildItem.DEV_UI);
InternalImportMapBuildItem internalImportMapBuildItem = new InternalImportMapBuildItem();
var mapper = DatabindCodec.mapper().writerWithDefaultPrettyPrinter();
Map<String, String> descriptions = new HashMap<>();
Map<String, String> mcpDefaultEnabled = new HashMap<>();
Map<String, String> contentTypes = new HashMap<>();
for (BuildTimeConstBuildItem buildTimeConstBuildItem : buildTimeConstBuildItems) {
Map<String, Object> data = new HashMap<>();
if (buildTimeConstBuildItem.hasBuildTimeData()) {
for (Map.Entry<String, BuildTimeData> pageData : buildTimeConstBuildItem.getBuildTimeData().entrySet()) {
try {
String ns = buildTimeConstBuildItem.getExtensionPathName(curateOutcomeBuildItem);
String key = pageData.getKey();
String value = mapper.writeValueAsString(pageData.getValue().getContent());
String fullName = ns + UNDERSCORE + key;
String description = pageData.getValue().getDescription();
if (description != null) {
descriptions.put(fullName, description);
}
boolean isEnabledByDefault = pageData.getValue().isMcpEnabledByDefault();
mcpDefaultEnabled.put(fullName, String.valueOf(isEnabledByDefault));
String contentType = pageData.getValue().getContentType();
if (contentType != null) {
contentTypes.put(fullName, contentType);
}
data.put(key, value);
} catch (JsonProcessingException ex) {
log.error("Could not create Json Data for Dev UI page", ex);
}
}
}
if (!data.isEmpty()) {
Map<String, Object> qutedata = new HashMap<>();
qutedata.put("buildTimeData", data);
String ref = buildTimeConstBuildItem.getExtensionPathName(curateOutcomeBuildItem) + "-data";
String file = ref + ".js";
quteTemplateBuildItem.add("build-time-data.js", file, qutedata, descriptions, mcpDefaultEnabled, contentTypes);
internalImportMapBuildItem.add(ref, contextRoot + file);
}
}
quteTemplateProducer.produce(quteTemplateBuildItem);
internalImportMapProducer.produce(internalImportMapBuildItem);
}
/**
* Here we find all the mvnpm jars
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void gatherMvnpmJars(BuildProducer<MvnpmBuildItem> mvnpmProducer, CurateOutcomeBuildItem curateOutcomeBuildItem) {
Set<URL> mvnpmJars = new HashSet<>();
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Enumeration<URL> jarsWithImportMaps = tccl.getResources(Location.IMPORTMAP_PATH);
while (jarsWithImportMaps.hasMoreElements()) {
URL jarUrl = jarsWithImportMaps.nextElement();
final JarURLConnection connection = (JarURLConnection) jarUrl.openConnection();
mvnpmJars.add(connection.getJarFileURL());
}
mvnpmProducer.produce(new MvnpmBuildItem(mvnpmJars));
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
/**
* Here we create index.html
* We aggregate all import maps into one
* This includes import maps from 3rd party libs from mvnpm.org and internal ones defined above
*
* @return The QuteTemplate Build item that will create the end result
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
QuteTemplateBuildItem createIndexHtmlTemplate(DevUIConfig config,
MvnpmBuildItem mvnpmBuildItem,
ThemeVarsBuildItem themeVarsBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
List<InternalImportMapBuildItem> internalImportMapBuildItems,
RelocationImportMapBuildItem relocationImportMapBuildItem) {
QuteTemplateBuildItem quteTemplateBuildItem = new QuteTemplateBuildItem(
QuteTemplateBuildItem.DEV_UI);
Aggregator aggregator = new Aggregator(mvnpmBuildItem.getMvnpmJars());
for (InternalImportMapBuildItem importMapBuildItem : internalImportMapBuildItems) {
Map<String, String> importMap = importMapBuildItem.getImportMap();
aggregator.addMappings(importMap);
}
String devUIContext = config.contextRoot().orElse("");
Imports imports = aggregator.aggregate(devUIContext + nonApplicationRootPathBuildItem.getNonApplicationRootPath(),
false);
Map<String, String> currentImportMap = imports.getImports();
Map<String, String> relocationMap = relocationImportMapBuildItem.getRelocationMap();
for (Map.Entry<String, String> relocation : relocationMap.entrySet()) {
String from = relocation.getKey();
String to = relocation.getValue();
if (currentImportMap.containsKey(to)) {
String newTo = currentImportMap.get(to);
currentImportMap.put(from, newTo);
} else {
log.warn("Could not relocate " + from + " as " + to + " does not exist in the importmap");
}
}
Map<String, String> jsVersions = extractJsVersionsFor(mvnpmBuildItem.getMvnpmJars(), ES_MODULE_SHIMS, FLAG_ICONS);
String importmap = aggregator.aggregateAsJson(imports);
aggregator.reset();
String themeVars = themeVarsBuildItem.getTemplateValue();
String nonApplicationRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath();
String contextRoot = devUIContext + nonApplicationRoot + Constants.DEV_UI + SLASH;
Map<String, Object> data = Map.of(
"nonApplicationRoot", nonApplicationRoot,
"contextRoot", contextRoot,
"importmap", importmap,
"themeVars", themeVars,
"esModuleShimsVersion", jsVersions.get(ES_MODULE_SHIMS),
"flagsVersion", jsVersions.get(FLAG_ICONS));
quteTemplateBuildItem.add("index.html", data);
return quteTemplateBuildItem;
}
// Here load all templates
@BuildStep(onlyIf = IsLocalDevelopment.class)
void loadAllBuildTimeTemplates(BuildProducer<StaticContentBuildItem> buildTimeContentProducer,
List<QuteTemplateBuildItem> templates) {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
for (QuteTemplateBuildItem template : templates) {
List<DevUIContent> contentPerExtension = new ArrayList<>();
List<QuteTemplateBuildItem.TemplateData> templatesWithData = template.getTemplateDatas();
for (QuteTemplateBuildItem.TemplateData e : templatesWithData) {
String templateName = e.getTemplateName(); // Relative to BUILD_TIME_PATH
Map<String, Object> data = e.getData();
Map<String, String> descriptions = e.getDescriptions();
Map<String, String> mcpDefaultEnabled = e.getMcpDefaultEnables();
Map<String, String> contentTypes = e.getContentTypes();
String resourceName = BUILD_TIME_PATH + SLASH + templateName;
String fileName = e.getFileName();
// TODO: What if we find more than one ?
try (InputStream templateStream = cl.getResourceAsStream(resourceName)) {
if (templateStream != null) {
byte[] templateContent = IoUtil.readBytes(templateStream);
// Internal runs on "naked" namespace
DevUIContent content = DevUIContent.builder()
.fileName(fileName)
.template(templateContent)
.addData(data)
.descriptions(descriptions)
.mcpDefaultEnables(mcpDefaultEnabled)
.contentTypes(contentTypes)
.build();
contentPerExtension.add(content);
}
} catch (IOException ioe) {
throw new UncheckedIOException("An error occurred while processing " + resourceName, ioe);
}
}
buildTimeContentProducer.produce(new StaticContentBuildItem(
StaticContentBuildItem.DEV_UI, contentPerExtension));
}
}
/**
* Creates json data that is available in Javascript
*/
@BuildStep(onlyIf = IsLocalDevelopment.class)
void createBuildTimeData(BuildProducer<BuildTimeConstBuildItem> buildTimeConstProducer,
BuildProducer<ThemeVarsBuildItem> themeVarsProducer,
CurateOutcomeBuildItem curateOutcomeBuildItem,
List<InternalPageBuildItem> internalPages,
ExtensionsBuildItem extensionsBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem,
LaunchModeBuildItem launchModeBuildItem,
Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem,
DevUIConfig devUIConfig) {
BuildTimeConstBuildItem internalBuildTimeData = new BuildTimeConstBuildItem(AbstractDevUIBuildItem.DEV_UI);
addThemeBuildTimeData(internalBuildTimeData, devUIConfig, themeVarsProducer);
addMenuSectionBuildTimeData(internalBuildTimeData, internalPages, extensionsBuildItem);
addFooterTabBuildTimeData(internalBuildTimeData, extensionsBuildItem, devUIConfig);
addSettingTabBuildTimeData(internalBuildTimeData, extensionsBuildItem);
addUnlistedPageBuildTimeData(internalBuildTimeData, extensionsBuildItem);
addApplicationInfoBuildTimeData(internalBuildTimeData, curateOutcomeBuildItem, nonApplicationRootPathBuildItem);
addIdeBuildTimeData(internalBuildTimeData, effectiveIdeBuildItem, launchModeBuildItem);
buildTimeConstProducer.produce(internalBuildTimeData);
}
private Map<String, String> extractJsVersionsFor(Set<URL> urls, String... artifacts) {
Map<String, String> rm = new HashMap<>();
for (URL u : urls) {
for (String artifact : artifacts) {
if (u.getPath().contains(artifact)) {
int i = u.getPath().indexOf(artifact) + artifact.length() + 1;
String versionOnward = u.getPath().substring(i);
String[] parts = versionOnward.split(SLASH);
rm.put(artifact, parts[0]);
}
}
}
return rm;
}
private void addThemeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData, DevUIConfig devUIConfig,
BuildProducer<ThemeVarsBuildItem> themeVarsProducer) {
Map<String, Map<String, String>> themes = new HashMap<>();
Map<String, String> dark = new HashMap<>();
Map<String, String> light = new HashMap<>();
switch (devUIConfig.baseTheme()) {
case red:
computeRedColors(themes, dark, light, devUIConfig.theme());
break;
case blue:
computeBlueColors(themes, dark, light, devUIConfig.theme());
break;
default:
computeDefaultColors(themes, dark, light, devUIConfig.theme());
}
internalBuildTimeData.addBuildTimeData("themes", themes);
// Also set at least one there for a default
themeVarsProducer.produce(new ThemeVarsBuildItem(light.keySet(), QUARKUS_BLUE.toString()));
}
private void addMenuSectionBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
List<InternalPageBuildItem> internalPages,
ExtensionsBuildItem extensionsBuildItem) {
// Menu section
List<Page> sectionMenu = new ArrayList<>();
Collections.sort(internalPages, (t, t1) -> {
return ((Integer) t.getPosition()).compareTo(t1.getPosition());
});
for (InternalPageBuildItem internalPageBuildItem : internalPages) {
List<Page> pages = internalPageBuildItem.getPages();
for (Page page : pages) {
if (internalPageBuildItem.getMenuActionComponent() != null) {
page.setMenuActionComponent(internalPageBuildItem.getMenuActionComponent());
}
sectionMenu.add(page);
}
internalBuildTimeData.addAllBuildTimeData(internalPageBuildItem.getBuildTimeData());
}
// Menus from extensions
for (Extension e : extensionsBuildItem.getSectionMenuExtensions()) {
List<Page> pagesFromExtension = e.getMenuPages();
sectionMenu.addAll(pagesFromExtension);
}
internalBuildTimeData.addBuildTimeData("menuItems", sectionMenu);
}
private void addSettingTabBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
ExtensionsBuildItem extensionsBuildItem) {
List<Page> settingTabs = new ArrayList<>();
// Settings from extensions
for (Extension e : extensionsBuildItem.getSettingTabsExtensions()) {
List<Page> pagesFromExtension = e.getSettingPages();
settingTabs.addAll(pagesFromExtension);
}
internalBuildTimeData.addBuildTimeData("settingTabs", settingTabs);
}
private void addUnlistedPageBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
ExtensionsBuildItem extensionsBuildItem) {
List<Page> unlistedPages = new ArrayList<>();
// Unlisted pages from extensions
for (Extension e : extensionsBuildItem.getUnlistedExtensions()) {
List<Page> pagesFromExtension = e.getUnlistedPages();
unlistedPages.addAll(pagesFromExtension);
}
internalBuildTimeData.addBuildTimeData("unlistedPages", unlistedPages);
}
private void addFooterTabBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
ExtensionsBuildItem extensionsBuildItem, DevUIConfig devUIConfig) {
// Add the Footer tabs
List<Page> footerTabs = new ArrayList<>();
Page serverLog = Page.webComponentPageBuilder().internal()
.namespace("devui-logstream")
.title("Server")
.icon("font-awesome-solid:server")
.componentLink("qwc-server-log.js").build();
footerTabs.add(serverLog);
Page testLog = Page.webComponentPageBuilder().internal()
.namespace("devui-continuous-testing")
.title("Testing")
.icon("font-awesome-solid:flask-vial")
.componentLink("qwc-test-log.js").build();
footerTabs.add(testLog);
// This is only needed when extension developers work on an extension, so we only included it if you build from source,
// or in the case of non-core extensions, extension developers can set a config flag
if (Version.getVersion().equalsIgnoreCase("999-SNAPSHOT") || devUIConfig.showJsonRpcLog()) {
Page devUiLog = Page.webComponentPageBuilder().internal()
.namespace("devui-jsonrpcstream")
.title("Dev UI")
.icon("font-awesome-solid:satellite-dish")
.componentLink("qwc-jsonrpc-messages.js").build();
footerTabs.add(devUiLog);
}
// Add any Footer tabs from extensions
for (Extension e : extensionsBuildItem.getFooterTabsExtensions()) {
List<Page> tabsFromExtension = e.getFooterPages();
footerTabs.addAll(tabsFromExtension);
}
internalBuildTimeData.addBuildTimeData("footerTabs", footerTabs);
internalBuildTimeData.addBuildTimeData("loggerLevels", LEVELS, "All the available logger levels", true);
}
private void addApplicationInfoBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
CurateOutcomeBuildItem curateOutcomeBuildItem,
NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) {
Map<String, String> applicationInfo = new HashMap<>();
// Add GAV
ApplicationModel applicationModel = curateOutcomeBuildItem.getApplicationModel();
ResolvedDependency appArtifact = applicationModel.getAppArtifact();
String groupId = appArtifact.getGroupId();
applicationInfo.put("groupId", groupId);
String artifactId = appArtifact.getArtifactId();
applicationInfo.put("artifactId", artifactId);
String contextRoot = nonApplicationRootPathBuildItem.getNonApplicationRootPath() + Constants.DEV_UI + SLASH;
applicationInfo.put("contextRoot", contextRoot);
// Add version info
applicationInfo.put("quarkusVersion", Version.getVersion());
applicationInfo.put("applicationName", config.getOptionalValue("quarkus.application.name", String.class).orElse(""));
applicationInfo.put("applicationVersion",
config.getOptionalValue("quarkus.application.version", String.class).orElse(""));
internalBuildTimeData.addBuildTimeData("applicationInfo", applicationInfo);
}
private void addIdeBuildTimeData(BuildTimeConstBuildItem internalBuildTimeData,
Optional<EffectiveIdeBuildItem> effectiveIdeBuildItem,
LaunchModeBuildItem launchModeBuildItem) {
Map<String, Object> ideInfo = new HashMap<>();
boolean disable = launchModeBuildItem.getDevModeType().orElse(DevModeType.LOCAL) != DevModeType.LOCAL;
ideInfo.put("disable", disable);
if (effectiveIdeBuildItem.isPresent()) {
EffectiveIdeBuildItem eibi = effectiveIdeBuildItem.get();
if (!disable) {
// Add IDE info
Ide ide = eibi.getIde();
ideInfo.put("ideName", ide.name());
ideInfo.put("idePackages", getAllUserPackages());
}
}
internalBuildTimeData.addBuildTimeData("ideInfo", ideInfo);
}
private List<String> getAllUserPackages() {
List<Path> sourcesDir = DevConsoleManager.getHotReplacementContext().getSourcesDir();
List<String> packages = new ArrayList<>();
for (Path sourcePaths : sourcesDir) {
packages.addAll(sourcePackagesForRoot(sourcePaths));
}
return packages;
}
/**
* Return the most general packages used in the application
* <p>
* TODO: this likely covers almost all typical use cases, but probably needs some tweaks for extreme corner cases
*/
private List<String> sourcePackagesForRoot(Path langPath) {
if (!Files.exists(langPath)) {
return Collections.emptyList();
}
File[] rootFiles = langPath.toFile().listFiles();
List<Path> rootPackages = new ArrayList<>(1);
if (rootFiles != null) {
for (File rootFile : rootFiles) {
if (rootFile.isDirectory()) {
rootPackages.add(rootFile.toPath());
}
}
}
if (rootPackages.isEmpty()) {
return List.of("");
}
List<String> result = new ArrayList<>(rootPackages.size());
for (Path rootPackage : rootPackages) {
List<String> paths = new ArrayList<>();
SimpleFileVisitor<Path> simpleFileVisitor = new DetectPackageFileVisitor(paths);
try {
Files.walkFileTree(rootPackage, simpleFileVisitor);
if (paths.isEmpty()) {
continue;
}
String commonPath = commonPath(paths);
String rootPackageStr = commonPath.replace(langPath.toAbsolutePath().toString(), "")
.replace(File.separator, ".");
if (rootPackageStr.startsWith(".")) {
rootPackageStr = rootPackageStr.substring(1);
}
if (rootPackageStr.endsWith(".")) {
rootPackageStr = rootPackageStr.substring(0, rootPackageStr.length() - 1);
}
result.add(rootPackageStr);
} catch (IOException e) {
log.debug("Unable to determine the sources directories", e);
// just ignore it as it's not critical for the DevUI functionality
}
}
return result;
}
private String commonPath(List<String> paths) {
String commonPath = "";
List<String[]> dirs = new ArrayList<>(paths.size());
for (int i = 0; i < paths.size(); i++) {
dirs.add(i, paths.get(i).split(Pattern.quote(File.separator)));
}
for (int j = 0; j < dirs.get(0).length; j++) {
String thisDir = dirs.get(0)[j]; // grab the next directory name in the first path
boolean allMatched = true;
for (int i = 1; i < dirs.size() && allMatched; i++) { // look at the other paths
if (dirs.get(i).length < j) { //there is no directory
allMatched = false;
break;
}
allMatched = dirs.get(i)[j].equals(thisDir); //check if it matched
}
if (allMatched) {
commonPath += thisDir + File.separator;
} else {
break;
}
}
return commonPath;
}
private static final List<String> LEVELS = List.of(
OFF.getName(),
SEVERE.getName(),
ERROR.getName(),
FATAL.getName(),
WARNING.getName(),
WARN.getName(),
INFO.getName(),
DEBUG.getName(),
TRACE.getName(),
CONFIG.getName(),
FINE.getName(),
FINER.getName(),
FINEST.getName(),
ALL.getName());
private static void addQuarkusLogoColors(Map<String, String> dark,
Map<String, String> light) {
// Quarkus logo colors
light.put("--quarkus-blue", QUARKUS_BLUE.toString());
dark.put("--quarkus-blue", QUARKUS_BLUE.toString());
light.put("--quarkus-red", QUARKUS_RED.toString());
dark.put("--quarkus-red", QUARKUS_RED.toString());
light.put("--quarkus-center", QUARKUS_DARK.toString());
dark.put("--quarkus-center", QUARKUS_LIGHT.toString());
light.put("--quarkus-assistant", QUARKUS_ASSISTANT.toString());
dark.put("--quarkus-assistant", QUARKUS_ASSISTANT.toString());
}
/**
* To get back to the original, add this
* %dev.quarkus.dev-ui.theme.dark.base-color-light=hsla(0, 100%, 100%, 1)
* %dev.quarkus.dev-ui.theme.dark.base-color-dark=hsla(210, 10%, 23%, 1)
* %dev.quarkus.dev-ui.theme.dark.contrast-5pct-light=hsla(214, 61%, 25%, 0.05)
* %dev.quarkus.dev-ui.theme.dark.contrast-5pct-dark=hsla(214, 65%, 85%, 0.06)
* %dev.quarkus.dev-ui.theme.dark.contrast-10pct-light=hsla(214, 57%, 24%, 0.1)
* %dev.quarkus.dev-ui.theme.dark.contrast-10pct-dark=hsla(214, 60%, 80%, 0.14)
* %dev.quarkus.dev-ui.theme.dark.contrast-20pct-light=hsla(214, 53%, 23%, 0.16)
* %dev.quarkus.dev-ui.theme.dark.contrast-20pct-dark=hsla(214, 64%, 82%, 0.23)
* %dev.quarkus.dev-ui.theme.dark.contrast-30pct-light=hsla(214, 50%, 22%, 0.26)
* %dev.quarkus.dev-ui.theme.dark.contrast-30pct-dark=hsla(214, 69%, 84%, 0.32)
* %dev.quarkus.dev-ui.theme.dark.contrast-40pct-light=hsla(214, 47%, 21%, 0.38)
* %dev.quarkus.dev-ui.theme.dark.contrast-40pct-dark=hsla(214, 73%, 86%, 0.41)
* %dev.quarkus.dev-ui.theme.dark.contrast-50pct-light=hsla(214, 45%, 20%, 0.52)
* %dev.quarkus.dev-ui.theme.dark.contrast-50pct-dark=hsla(214, 78%, 88%, 0.50)
* %dev.quarkus.dev-ui.theme.dark.contrast-60pct-light=hsla(214, 43%, 19%, 0.6)
* %dev.quarkus.dev-ui.theme.dark.contrast-60pct-dark=hsla(214, 82%, 90%, 0.6)
* %dev.quarkus.dev-ui.theme.dark.contrast-70pct-light=hsla(214, 42%, 18%, 0.69)
* %dev.quarkus.dev-ui.theme.dark.contrast-70pct-dark=hsla(214, 87%, 92%, 0.7)
* %dev.quarkus.dev-ui.theme.dark.contrast-80pct-light=hsla(214, 41%, 17%, 0.83)
* %dev.quarkus.dev-ui.theme.dark.contrast-80pct-dark=hsla(214, 91%, 94%, 0.8)
* %dev.quarkus.dev-ui.theme.dark.contrast-90pct-light=hsla(214, 40%, 16%, 0.94)
* %dev.quarkus.dev-ui.theme.dark.contrast-90pct-dark=hsla(214, 96%, 96%, 0.9)
* %dev.quarkus.dev-ui.theme.dark.contrast-light=hsla(214, 35%, 15%, 1)
* %dev.quarkus.dev-ui.theme.dark.contrast-dark=hsla(214, 100%, 98%, 1)
* %dev.quarkus.dev-ui.theme.dark.error-color-light=hsla(3, 85%, 48%, 1)
* %dev.quarkus.dev-ui.theme.dark.error-color-dark=hsla(3, 90%, 63%, 1)
* %dev.quarkus.dev-ui.theme.dark.header-text-color-light=hsla(214, 35%, 15%, 1)
* %dev.quarkus.dev-ui.theme.dark.header-text-color-dark=hsla(214, 100%, 98%, 1)
*/
private static void computeDefaultColors(Map<String, Map<String, String>> themes,
Map<String, String> dark,
Map<String, String> light,
Optional<DevUIConfig.Theme> theme) {
addQuarkusLogoColors(dark, light);
// Base Colors
light.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::baseColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::baseColor,
Color.from(0, 0, 13).toString()));
// Contrast Colors
light.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 13).toString()));
dark.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 100).toString()));
// Primary Colors
light.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryColor, QUARKUS_BLUE.toString()));
dark.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryColor, QUARKUS_BLUE.toString()));
light.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(210, 90, 60).toString()));
dark.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(210, 90, 60).toString()));
light.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
// Error Colors
light.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorTextColor, QUARKUS_RED.toString()));
dark.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorTextColor, QUARKUS_RED.toString()));
light.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorTextColor, Color.from(3, 90, 42).toString()));
dark.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorTextColor, Color.from(3, 90, 63).toString()));
light.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
// Warning Colors
light.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningColor, Color.from(30, 100, 50).toString()));
dark.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningColor, Color.from(30, 100, 50).toString()));
light.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningTextColor, Color.from(30, 89, 42).toString()));
dark.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningTextColor, Color.from(30, 100, 67).toString()));
light.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
// Success Colors
light.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successColor, Color.from(145, 72, 30).toString()));
dark.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successColor, Color.from(145, 65, 42).toString()));
light.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successTextColor, Color.from(145, 85, 25).toString()));
dark.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successTextColor, Color.from(145, 85, 47).toString()));
light.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
// Text Colors
light.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::headerTextColor, Color.from(0, 0, 13).toString()));
dark.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::headerTextColor, Color.from(0, 0, 100).toString()));
light.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::bodyTextColor, Color.from(0, 0, 20, 0.94).toString()));
dark.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::bodyTextColor, Color.from(0, 0, 90, 0.9).toString()));
light.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::secondaryTextColor, Color.from(0, 0, 40, 0.69).toString()));
dark.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::secondaryTextColor, Color.from(0, 0, 70, 0.7).toString()));
light.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::tertiaryTextColor, Color.from(0, 0, 50, 0.52).toString()));
dark.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::tertiaryTextColor, Color.from(0, 0, 60, 0.5).toString()));
light.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::disabledTextColor, Color.from(0, 0, 60, 0.26).toString()));
dark.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::disabledTextColor, Color.from(0, 0, 50, 0.32).toString()));
// Grayscale Adjustments
for (int i = 5; i <= 90; i += 5) {
double opacity = i / 100.0;
String key = "--lumo-contrast-" + i + "pct";
// Retrieve from theme if available, otherwise use default computed color
String lightContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::light,
getContrastPct(i),
Color.from(0, 0, 13, opacity).toString());
String darkContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::dark,
getContrastPct(i),
Color.from(0, 0, 100, opacity).toString());
light.put(key, lightContrast);
dark.put(key, darkContrast);
}
themes.put("dark", dark);
themes.put("light", light);
}
private static void computeRedColors(Map<String, Map<String, String>> themes,
Map<String, String> dark,
Map<String, String> light,
Optional<DevUIConfig.Theme> theme) {
addQuarkusLogoColors(dark, light);
// Base Colors
light.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::baseColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::baseColor,
Color.from(0, 0, 10).toString()));
// Contrast Colors
light.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 10).toString()));
dark.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 100).toString()));
// Primary Colors
light.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryColor, Color.from(0, 100, 47).toString()));
dark.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryColor, Color.from(0, 100, 47).toString()));
light.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(0, 100, 47).toString()));
dark.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(0, 100, 47).toString()));
light.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
// Error Colors
light.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorColor, Color.from(0, 100, 47).toString()));
dark.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorColor, Color.from(0, 100, 47).toString()));
light.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorTextColor, Color.from(0, 100, 40).toString()));
dark.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorTextColor, Color.from(0, 100, 55).toString()));
light.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
// Warning Colors
light.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningColor, Color.from(0, 0, 96).toString()));
dark.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningColor, Color.from(0, 0, 96).toString()));
light.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningTextColor, Color.from(0, 0, 80).toString()));
dark.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningTextColor, Color.from(0, 0, 85).toString()));
light.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
// Success Colors
light.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successColor, Color.from(207, 100, 25).toString()));
dark.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successColor, Color.from(207, 100, 25).toString()));
light.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successTextColor, Color.from(207, 100, 30).toString()));
dark.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successTextColor, Color.from(207, 100, 35).toString()));
light.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
// Text Colors
light.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 10).toString()));
dark.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
light.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 20, 0.94).toString()));
dark.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 90, 0.9).toString()));
light.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 40, 0.69).toString()));
dark.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 70, 0.7).toString()));
light.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 50, 0.52).toString()));
dark.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 60, 0.5).toString()));
light.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 60, 0.26).toString()));
dark.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 50, 0.32).toString()));
// Grayscale Adjustments
for (int i = 5; i <= 90; i += 5) {
double opacity = i / 100.0;
String key = "--lumo-contrast-" + i + "pct";
// Retrieve from theme if available, otherwise use default computed color
String lightContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::light,
getContrastPct(i),
Color.from(0, 0, 10, opacity).toString());
String darkContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::dark,
getContrastPct(i),
Color.from(0, 0, 100, opacity).toString());
light.put(key, lightContrast);
dark.put(key, darkContrast);
}
themes.put("dark", dark);
themes.put("light", light);
}
private static void computeBlueColors(Map<String, Map<String, String>> themes,
Map<String, String> dark,
Map<String, String> light,
Optional<DevUIConfig.Theme> theme) {
addQuarkusLogoColors(dark, light);
// Base Colors
light.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::baseColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-base-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::baseColor,
Color.from(0, 0, 9).toString()));
// Contrast Colors
light.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 9).toString()));
dark.put("--lumo-contrast", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark, DevUIConfig.ThemeMode::contrast,
Color.from(0, 0, 100).toString()));
// Primary Colors
light.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryColor, Color.from(212, 90, 20).toString()));
dark.put("--lumo-primary-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryColor, Color.from(212, 90, 20).toString()));
light.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(212, 90, 20).toString()));
dark.put("--lumo-primary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryTextColor, Color.from(212, 53, 48).toString()));
light.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-primary-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::primaryContrastColor, Color.from(0, 0, 100).toString()));
// Error Colors
light.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorColor, Color.from(212, 90, 20).toString()));
dark.put("--lumo-error-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorColor, Color.from(212, 90, 20).toString()));
light.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorTextColor, Color.from(212, 90, 30).toString()));
dark.put("--lumo-error-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorTextColor, Color.from(212, 90, 40).toString()));
light.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-error-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::errorContrastColor, Color.from(0, 0, 100).toString()));
// Warning Colors
light.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningColor, Color.from(0, 0, 96).toString()));
dark.put("--lumo-warning-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningColor, Color.from(0, 0, 96).toString()));
light.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningTextColor, Color.from(0, 0, 80).toString()));
dark.put("--lumo-warning-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningTextColor, Color.from(0, 0, 85).toString()));
light.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-warning-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::warningContrastColor, Color.from(0, 0, 100).toString()));
// Success Colors
light.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successColor, Color.from(214, 49, 50).toString()));
dark.put("--lumo-success-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successColor, Color.from(214, 49, 50).toString()));
light.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successTextColor, Color.from(214, 49, 55).toString()));
dark.put("--lumo-success-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successTextColor, Color.from(214, 49, 60).toString()));
light.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
dark.put("--lumo-success-contrast-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
// Text colors
light.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 9).toString()));
dark.put("--lumo-header-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 100).toString()));
light.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 20, 0.94).toString()));
dark.put("--lumo-body-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 90, 0.9).toString()));
light.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 40, 0.69).toString()));
dark.put("--lumo-secondary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 70, 0.7).toString()));
light.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 50, 0.52).toString()));
dark.put("--lumo-tertiary-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 60, 0.5).toString()));
light.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::light,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 60, 0.26).toString()));
dark.put("--lumo-disabled-text-color", getThemeSettingOrDefault(theme, DevUIConfig.Theme::dark,
DevUIConfig.ThemeMode::successContrastColor, Color.from(0, 0, 50, 0.32).toString()));
// Grayscale Adjustments
for (int i = 5; i <= 90; i += 5) {
double opacity = i / 100.0;
String key = "--lumo-contrast-" + i + "pct";
// Retrieve from theme if available, otherwise use default computed color
String lightContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::light,
getContrastPct(i),
Color.from(0, 0, 10, opacity).toString());
String darkContrast = getThemeSettingOrDefault(theme,
DevUIConfig.Theme::dark,
getContrastPct(i),
Color.from(0, 0, 100, opacity).toString());
light.put(key, lightContrast);
dark.put(key, darkContrast);
}
themes.put("dark", dark);
themes.put("light", light);
}
private static Function<DevUIConfig.ThemeMode, Optional<String>> getContrastPct(int percentage) {
return switch (percentage) {
case 5 -> DevUIConfig.ThemeMode::contrast5pct;
case 10 -> DevUIConfig.ThemeMode::contrast10pct;
case 15 -> DevUIConfig.ThemeMode::contrast15pct;
case 20 -> DevUIConfig.ThemeMode::contrast20pct;
case 25 -> DevUIConfig.ThemeMode::contrast25pct;
case 30 -> DevUIConfig.ThemeMode::contrast30pct;
case 35 -> DevUIConfig.ThemeMode::contrast35pct;
case 40 -> DevUIConfig.ThemeMode::contrast40pct;
case 45 -> DevUIConfig.ThemeMode::contrast45pct;
case 50 -> DevUIConfig.ThemeMode::contrast50pct;
case 55 -> DevUIConfig.ThemeMode::contrast55pct;
case 60 -> DevUIConfig.ThemeMode::contrast60pct;
case 65 -> DevUIConfig.ThemeMode::contrast65pct;
case 70 -> DevUIConfig.ThemeMode::contrast70pct;
case 75 -> DevUIConfig.ThemeMode::contrast75pct;
case 80 -> DevUIConfig.ThemeMode::contrast80pct;
case 85 -> DevUIConfig.ThemeMode::contrast85pct;
case 90 -> DevUIConfig.ThemeMode::contrast90pct;
default -> mode -> Optional.empty(); // Just in case, should never happen
};
}
private static final Color QUARKUS_BLUE = Color.from(210, 90, 60);
private static final Color QUARKUS_RED = Color.from(4, 90, 58);
private static final Color QUARKUS_DARK = Color.from(0, 0, 13);
private static final Color QUARKUS_LIGHT = Color.from(0, 0, 100);
private static final Color QUARKUS_ASSISTANT = Color.from(320, 100, 71);
private static String getThemeSettingOrDefault(Optional<DevUIConfig.Theme> theme,
Function<DevUIConfig.Theme, Optional<DevUIConfig.ThemeMode>> themeModeExtractor,
Function<DevUIConfig.ThemeMode, Optional<String>> settingExtractor,
String defaultValue) {
return theme.flatMap(themeModeExtractor) // Extract dark or light theme mode
.flatMap(settingExtractor) // Extract specific setting
.orElse(defaultValue); // Return default if not present
}
/**
* This represents a HSLA color
* see https://www.w3schools.com/html/html_colors_hsl.asp
*/
static | BuildTimeContentProcessor |
java | elastic__elasticsearch | x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedQuery.java | {
"start": 8651,
"end": 8822
} | interface ____ {
boolean matchesBinaryDV(ByteArrayStreamInput bytes, BytesRef bytesRef, BytesRef scratch) throws IOException;
}
private static | BinaryDVMatcher |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.