language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-property-validation/src/main/java/smoketest/propertyvalidation/SampleProperties.java
|
{
"start": 898,
"end": 1288
}
|
class ____ {
/**
* Sample host.
*/
private @Nullable String host;
/**
* Sample port.
*/
private Integer port = 8080;
public @Nullable String getHost() {
return this.host;
}
public void setHost(@Nullable String host) {
this.host = host;
}
public Integer getPort() {
return this.port;
}
public void setPort(Integer port) {
this.port = port;
}
}
|
SampleProperties
|
java
|
apache__camel
|
components/camel-netty/src/main/java/org/apache/camel/component/netty/util/SubnetUtils.java
|
{
"start": 1119,
"end": 3677
}
|
class ____ {
private static final String IP_ADDRESS = "(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})";
private static final String SLASH_FORMAT = IP_ADDRESS + "/(\\d{1,3})";
private static final Pattern ADDRESS_PATTERN = Pattern.compile(IP_ADDRESS);
private static final Pattern CIDR_PATTERN = Pattern.compile(SLASH_FORMAT);
private static final int NBITS = 32;
private int netmask;
private int address;
private int network;
private int broadcast;
/** Whether the broadcast/network address are included in host count */
private boolean inclusiveHostCount;
/**
* Constructor that takes a CIDR-notation string, e.g. "192.168.0.1/16"
*
* @param cidrNotation A CIDR-notation string, e.g. "192.168.0.1/16"
* @throws IllegalArgumentException if the parameter is invalid, i.e. does not match n.n.n.n/m where n=1-3 decimal
* digits, m = 1-3 decimal digits in range 1-32
*/
public SubnetUtils(String cidrNotation) {
calculate(cidrNotation);
}
/**
* Constructor that takes a dotted decimal address and a dotted decimal mask.
*
* @param address An IP address, e.g. "192.168.0.1"
* @param mask A dotted decimal netmask e.g. "255.255.0.0"
* @throws IllegalArgumentException if the address or mask is invalid, i.e. does not match n.n.n.n where n=1-3
* decimal digits and the mask is not all zeros
*/
public SubnetUtils(String address, String mask) {
calculate(toCidrNotation(address, mask));
}
/**
* Returns <code>true</code> if the return value of {@link SubnetInfo#getAddressCount()} includes the network and
* broadcast addresses.
*
* @since 2.2
* @return true if the hostcount includes the network and broadcast addresses
*/
public boolean isInclusiveHostCount() {
return inclusiveHostCount;
}
/**
* Set to <code>true</code> if you want the return value of {@link SubnetInfo#getAddressCount()} to include the
* network and broadcast addresses.
*
* @param inclusiveHostCount true if network and broadcast addresses are to be included
* @since 2.2
*/
public void setInclusiveHostCount(boolean inclusiveHostCount) {
this.inclusiveHostCount = inclusiveHostCount;
}
/**
* Convenience container for subnet summary information.
*
*/
public final
|
SubnetUtils
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/testutils/CancelableInvokable.java
|
{
"start": 1197,
"end": 1873
}
|
class ____ extends AbstractInvokable {
private volatile boolean canceled;
protected CancelableInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
try {
doInvoke();
} catch (Exception e) {
throw e;
}
}
protected abstract void doInvoke() throws Exception;
@Override
public void cancel() {
canceled = true;
}
protected void waitUntilCancelled() throws InterruptedException {
synchronized (this) {
while (!canceled) {
wait();
}
}
}
}
|
CancelableInvokable
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/config/ConfigDataLocationResolversTests.java
|
{
"start": 11703,
"end": 12345
}
|
class ____ extends ConfigDataResource {
private final TestResolver resolver;
private final ConfigDataLocation location;
private final boolean profileSpecific;
TestConfigDataResource(boolean optional, TestResolver resolver, ConfigDataLocation location,
boolean profileSpecific) {
super(optional);
this.resolver = resolver;
this.location = location;
this.profileSpecific = profileSpecific;
}
TestResolver getResolver() {
return this.resolver;
}
ConfigDataLocation getLocation() {
return this.location;
}
boolean isProfileSpecific() {
return this.profileSpecific;
}
}
}
|
TestConfigDataResource
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/primitive/PrimitiveWrapperProducerTest.java
|
{
"start": 3420,
"end": 4024
}
|
class ____ {
@Inject
boolean bool = true;
@Inject
byte b = 1;
@Inject
short s = 1;
int i = 1;
long l = 1L;
float f = 1.0F;
double d = 1.0;
char c = 'a';
@Inject
Injection(int i, long l) {
this.i = i;
this.l = l;
}
@Inject
void doubleParamInit(float f, double d) {
this.f = f;
this.d = d;
}
@Inject
void singleParamInit(char c) {
this.c = c;
}
}
@Dependent
static
|
Injection
|
java
|
apache__avro
|
lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
|
{
"start": 1059,
"end": 1927
}
|
class ____ {
@Test
void metadataFiltering() throws Exception {
JobConf job = new JobConf();
job.set(AvroTrevniOutputFormat.META_PREFIX + "test1", "1");
job.set(AvroTrevniOutputFormat.META_PREFIX + "test2", "2");
job.set("test3", "3");
job.set(AvroJob.TEXT_PREFIX + "test4", "4");
job.set(AvroTrevniOutputFormat.META_PREFIX + "test5", "5");
ColumnFileMetaData metadata = AvroTrevniOutputFormat.filterMetadata(job);
assertTrue(metadata.get("test1") != null);
assertEquals(new String(metadata.get("test1")), "1");
assertTrue(metadata.get("test2") != null);
assertEquals(new String(metadata.get("test2")), "2");
assertTrue(metadata.get("test5") != null);
assertEquals(new String(metadata.get("test5")), "5");
assertNull(metadata.get("test3"));
assertNull(metadata.get("test4"));
}
}
|
TestMetadataFiltering
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/functions/windowing/AllWindowFunction.java
|
{
"start": 1113,
"end": 1378
}
|
interface ____ functions that are evaluated over non-keyed windows.
*
* @param <IN> The type of the input value.
* @param <OUT> The type of the output value.
* @param <W> The type of {@code Window} that this window function can be applied on.
*/
@Public
public
|
for
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/workload/ConsumeBenchWorker.java
|
{
"start": 4125,
"end": 8892
}
|
class ____ implements Runnable {
@Override
public void run() {
try {
List<Future<Void>> consumeTasks = new ArrayList<>();
for (ConsumeMessages task : consumeTasks()) {
consumeTasks.add(executor.submit(task));
}
executor.submit(new CloseStatusUpdater(consumeTasks));
} catch (Throwable e) {
WorkerUtils.abort(log, "Prepare", e, doneFuture);
}
}
private List<ConsumeMessages> consumeTasks() {
List<ConsumeMessages> tasks = new ArrayList<>();
String consumerGroup = consumerGroup();
int consumerCount = spec.threadsPerWorker();
Map<String, List<TopicPartition>> partitionsByTopic = spec.materializeTopics();
boolean toUseGroupPartitionAssignment = partitionsByTopic.values().stream().allMatch(List::isEmpty);
if (!toUseGroupPartitionAssignment && !toUseRandomConsumeGroup() && consumerCount > 1)
throw new ConfigException("You may not specify an explicit partition assignment when using multiple consumers in the same group."
+ "Please leave the consumer group unset, specify topics instead of partitions or use a single consumer.");
consumer = consumer(consumerGroup, clientId(0));
if (toUseGroupPartitionAssignment) {
Set<String> topics = partitionsByTopic.keySet();
tasks.add(new ConsumeMessages(consumer, spec.recordProcessor(), topics));
for (int i = 0; i < consumerCount - 1; i++) {
tasks.add(new ConsumeMessages(consumer(consumerGroup(), clientId(i + 1)), spec.recordProcessor(), topics));
}
} else {
List<TopicPartition> partitions = populatePartitionsByTopic(consumer.consumer(), partitionsByTopic)
.values().stream().flatMap(List::stream).toList();
tasks.add(new ConsumeMessages(consumer, spec.recordProcessor(), partitions));
for (int i = 0; i < consumerCount - 1; i++) {
tasks.add(new ConsumeMessages(consumer(consumerGroup(), clientId(i + 1)), spec.recordProcessor(), partitions));
}
}
return tasks;
}
private String clientId(int idx) {
return String.format("consumer.%s-%d", id, idx);
}
/**
* Creates a new KafkaConsumer instance
*/
private ThreadSafeConsumer consumer(String consumerGroup, String clientId) {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, spec.bootstrapServers());
props.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId);
props.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 100000);
// these defaults maybe over-written by the user-specified commonClientConf or consumerConf
WorkerUtils.addConfigsToProperties(props, spec.commonClientConf(), spec.consumerConf());
return new ThreadSafeConsumer(new KafkaConsumer<>(props, new ByteArrayDeserializer(), new ByteArrayDeserializer()), clientId);
}
private String consumerGroup() {
return toUseRandomConsumeGroup()
? "consume-bench-" + UUID.randomUUID()
: spec.consumerGroup();
}
private boolean toUseRandomConsumeGroup() {
return spec.consumerGroup().isEmpty();
}
private Map<String, List<TopicPartition>> populatePartitionsByTopic(KafkaConsumer<byte[], byte[]> consumer,
Map<String, List<TopicPartition>> materializedTopics) {
// fetch partitions for topics who do not have any listed
for (Map.Entry<String, List<TopicPartition>> entry : materializedTopics.entrySet()) {
String topicName = entry.getKey();
List<TopicPartition> partitions = entry.getValue();
if (partitions.isEmpty()) {
List<TopicPartition> fetchedPartitions = consumer.partitionsFor(topicName).stream()
.map(partitionInfo -> new TopicPartition(partitionInfo.topic(), partitionInfo.partition()))
.toList();
partitions.addAll(fetchedPartitions);
}
materializedTopics.put(topicName, partitions);
}
return materializedTopics;
}
}
public
|
Prepare
|
java
|
spring-projects__spring-boot
|
module/spring-boot-tomcat/src/test/java/org/springframework/boot/tomcat/autoconfigure/metrics/TomcatMetricsAutoConfigurationTests.java
|
{
"start": 6942,
"end": 7125
}
|
class ____ {
@Bean
TomcatMetricsBinder customTomcatMetricsBinder(MeterRegistry meterRegistry) {
return new TomcatMetricsBinder(meterRegistry);
}
}
}
|
CustomTomcatMetricsBinder
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java
|
{
"start": 2662,
"end": 2712
}
|
class ____ auditor unit tests.
*/
public abstract
|
for
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/indices/AliasFilterParsingException.java
|
{
"start": 690,
"end": 1033
}
|
class ____ extends ElasticsearchException {
public AliasFilterParsingException(Index index, String name, String desc, Throwable ex) {
super("[" + name + "], " + desc, ex);
setIndex(index);
}
public AliasFilterParsingException(StreamInput in) throws IOException {
super(in);
}
}
|
AliasFilterParsingException
|
java
|
apache__logging-log4j2
|
log4j-web/src/main/java/org/apache/logging/log4j/web/appender/ServletAppender.java
|
{
"start": 1785,
"end": 5589
}
|
class ____<B extends Builder<B>> extends AbstractAppender.Builder<B>
implements org.apache.logging.log4j.core.util.Builder<ServletAppender> {
@PluginBuilderAttribute
@SuppressWarnings("log4j.public.setter") // The setter is not assignable.
private boolean logThrowables;
@Override
public ServletAppender build() {
final String name = getName();
if (name == null) {
LOGGER.error("No name provided for ServletAppender");
}
final ServletContext servletContext = WebLoggerContextUtils.getServletContext();
if (servletContext == null) {
LOGGER.error("No servlet context is available");
return null;
}
final Layout<? extends Serializable> layout = getOrCreateLayout();
if (!(layout instanceof StringLayout)) {
LOGGER.error("Layout must be a StringLayout to log to ServletContext");
return null;
}
return new ServletAppender(name, layout, getFilter(), servletContext, isIgnoreExceptions(), logThrowables);
}
/**
* Logs with {@link ServletContext#log(String, Throwable)} if true and with {@link ServletContext#log(String)} if false.
*
* @return whether to log a Throwable with the servlet context.
*/
public boolean isLogThrowables() {
return logThrowables;
}
/**
* Logs with {@link ServletContext#log(String, Throwable)} if true and with {@link ServletContext#log(String)} if false.
*/
public void setLogThrowables(final boolean logThrowables) {
this.logThrowables = logThrowables;
}
}
@PluginBuilderFactory
public static <B extends Builder<B>> B newBuilder() {
return new Builder<B>().asBuilder();
}
private final ServletContext servletContext;
private final boolean logThrowables;
private ServletAppender(
final String name,
final Layout<? extends Serializable> layout,
final Filter filter,
final ServletContext servletContext,
final boolean ignoreExceptions,
final boolean logThrowables) {
super(name, filter, layout, ignoreExceptions, Property.EMPTY_ARRAY);
this.servletContext = servletContext;
this.logThrowables = logThrowables;
}
@Override
public void append(final LogEvent event) {
final String serialized = ((AbstractStringLayout) getLayout()).toSerializable(event);
if (logThrowables) {
servletContext.log(serialized, event.getThrown());
} else {
servletContext.log(serialized);
}
}
/**
* Creates a Servlet Appender.
* @param layout The layout to use (required). Must extend {@link AbstractStringLayout}.
* @param filter The Filter or null.
* @param name The name of the Appender (required).
* @param ignoreExceptions If {@code true} (default) exceptions encountered when appending events are logged;
* otherwise they are propagated to the caller.
* @return The ServletAppender.
* @deprecated Use {@link #newBuilder()}.
*/
@Deprecated
public static ServletAppender createAppender(
final Layout<? extends Serializable> layout,
final Filter filter,
final String name,
final boolean ignoreExceptions) {
// @formatter:off
return newBuilder()
.setFilter(filter)
.setIgnoreExceptions(ignoreExceptions)
.setLayout(layout)
.setName(name)
.build();
// @formatter:on
}
}
|
Builder
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatWithSingle.java
|
{
"start": 1813,
"end": 3158
}
|
class ____<T>
extends SinglePostCompleteSubscriber<T, T>
implements SingleObserver<T> {
private static final long serialVersionUID = -7346385463600070225L;
final AtomicReference<Disposable> otherDisposable;
SingleSource<? extends T> other;
ConcatWithSubscriber(Subscriber<? super T> actual, SingleSource<? extends T> other) {
super(actual);
this.other = other;
this.otherDisposable = new AtomicReference<>();
}
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(otherDisposable, d);
}
@Override
public void onNext(T t) {
produced++;
downstream.onNext(t);
}
@Override
public void onError(Throwable t) {
downstream.onError(t);
}
@Override
public void onSuccess(T t) {
complete(t);
}
@Override
public void onComplete() {
upstream = SubscriptionHelper.CANCELLED;
SingleSource<? extends T> ss = other;
other = null;
ss.subscribe(this);
}
@Override
public void cancel() {
super.cancel();
DisposableHelper.dispose(otherDisposable);
}
}
}
|
ConcatWithSubscriber
|
java
|
netty__netty
|
common/src/test/java/io/netty/util/RunInFastThreadLocalThreadExtension.java
|
{
"start": 1403,
"end": 2267
}
|
class ____ implements InvocationInterceptor {
@Override
public void interceptTestMethod(
final Invocation<Void> invocation,
final ReflectiveInvocationContext<Method> invocationContext,
final ExtensionContext extensionContext) throws Throwable {
final AtomicReference<Throwable> throwable = new AtomicReference<Throwable>();
Thread thread = new FastThreadLocalThread(new Runnable() {
@Override
public void run() {
try {
invocation.proceed();
} catch (Throwable t) {
throwable.set(t);
}
}
});
thread.start();
thread.join();
Throwable t = throwable.get();
if (t != null) {
throw t;
}
}
}
|
RunInFastThreadLocalThreadExtension
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java
|
{
"start": 6307,
"end": 22218
}
|
class ____[]");
}
@Test
void should_return_toString_of_Collection_of_String() {
// GIVEN
Collection<String> collection = list("s1", "s2");
// WHEN
String stringCollectionStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(collection);
// THEN
then(stringCollectionStandardRepresentation).isEqualTo("[\"s1\", \"s2\"]");
}
@Test
void should_return_toString_of_Collection_of_arrays() {
// GIVEN
List<Boolean[]> collection = list(array(true, false), array(true, false, true));
// WHEN
String arrayCollectionStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(collection);
// THEN
then(arrayCollectionStandardRepresentation).isEqualTo("[[true, false], [true, false, true]]");
}
@Test
void should_return_toString_of_Collection_of_arrays_up_to_the_maximum_allowed_elements() {
// GIVEN
List<Boolean[]> collection = list(array(true),
array(true, false, true, false, true),
array(true, true),
array(true),
array(true));
StandardRepresentation.setMaxElementsForPrinting(4);
// WHEN
String collectionStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(collection);
// THEN
then(collectionStandardRepresentation).isEqualTo("[[true], [true, false, ... false, true], ... [true], [true]]");
}
@Test
void should_return_toString_of_Collection_of_Collections() {
// GIVEN
Collection<List<String>> collection = list(list("s1", "s2"), list("s3", "s4", "s5"));
// WHEN
String collectionOfCollectionStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(collection);
// THEN
then(collectionOfCollectionStandardRepresentation).isEqualTo("[[\"s1\", \"s2\"], [\"s3\", \"s4\", \"s5\"]]");
}
@Test
void should_return_toString_of_Collection_of_Collections_up_to_the_maximum_allowed_elements() {
// GIVEN
Collection<List<String>> collection = list(list("s1"),
list("s2", "s3", "s4", "s5", "s6"),
list("s7", "s8"),
list("s9"),
list("s10"));
StandardRepresentation.setMaxElementsForPrinting(4);
// WHEN
String collectionOfCollectionStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(collection);
// THEN
then(collectionOfCollectionStandardRepresentation).isEqualTo("[[\"s1\"], [\"s2\", \"s3\", ... \"s5\", \"s6\"], ... [\"s9\"], [\"s10\"]]");
}
@Test
void should_return_toString_of_Map() {
// GIVEN
Map<String, String> map = new LinkedHashMap<>();
map.put("key1", "value1");
map.put("key2", "value2");
// WHEN
String mapStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(map);
// THEN
then(mapStandardRepresentation).isEqualTo("{\"key1\"=\"value1\", \"key2\"=\"value2\"}");
}
@Test
void should_return_toString_of_array() {
// GIVEN
String[] array = array("s1", "s2");
// WHEN
String arrayStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(array);
// THEN
then(arrayStandardRepresentation).isEqualTo("[\"s1\", \"s2\"]");
}
@Test
void should_return_toString_of_array_of_arrays() {
// GIVEN
String[][] array = array(array("s1", "s2"), array("s3", "s4", "s5"));
// WHEN
String arrayStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(array);
// THEN
then(arrayStandardRepresentation).isEqualTo("[[\"s1\", \"s2\"], [\"s3\", \"s4\", \"s5\"]]");
}
@Test
void should_return_toString_of_array_of_arrays_up_to_the_maximum_allowed_elements() {
// GIVEN
String[][] array = array(array("s1", "s2"),
array("s3", "s4", "s5", "s6", "s7"),
array("s8"),
array("s9"),
array("s10"));
StandardRepresentation.setMaxElementsForPrinting(4);
// WHEN
String arrayStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(array);
// THEN
then(arrayStandardRepresentation).isEqualTo("[[\"s1\", \"s2\"], [\"s3\", \"s4\", ... \"s6\", \"s7\"], ... [\"s9\"], [\"s10\"]]");
}
@Test
void should_return_toString_of_array_of_Class() {
// GIVEN
Class<?>[] array = { String.class, File.class };
// WHEN
String arrayStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(array);
// THEN
then(arrayStandardRepresentation).isEqualTo("[java.lang.String, java.io.File]");
}
@Test
void should_return_unambiguous_toString_of_calendar() {
// GIVEN
GregorianCalendar calendar = new GregorianCalendar(2011, Calendar.JANUARY, 18, 23, 53, 17);
// WHEN
String stringOfCalendar = STANDARD_REPRESENTATION.toStringOf(calendar);
// THEN
then(stringOfCalendar).isEqualTo("2011-01-18T23:53:17 (java.util.GregorianCalendar)");
}
@Test
void should_return_unambiguous_toString_of_date() {
// GIVEN
Date date = new GregorianCalendar(2011, Calendar.JUNE, 18, 23, 53, 17).getTime();
// WHEN
String dateRepresentation = STANDARD_REPRESENTATION.toStringOf(date);
// THEN
then(dateRepresentation).isEqualTo("2011-06-18T23:53:17.000 (java.util.Date)");
}
@Test
void should_return_unambiguous_toString_of_LocalDate() {
// GIVEN use Object to call toStringOf(Object) and not toStringOf(LocalDateTime)
Object localDate = LocalDate.of(2011, 6, 18);
// WHEN
String localDateRepresentation = STANDARD_REPRESENTATION.toStringOf(localDate);
// THEN
then(localDateRepresentation).isEqualTo("2011-06-18 (java.time.LocalDate)");
}
@Test
void should_return_unambiguous_toString_of_YearMonth() {
// GIVEN use Object to call toStringOf(Object) and not toStringOf(YearMonth)
Object yearMonth = YearMonth.of(2011, 6);
// WHEN
String localDateRepresentation = STANDARD_REPRESENTATION.toStringOf(yearMonth);
// THEN
then(localDateRepresentation).isEqualTo("2011-06 (java.time.YearMonth)");
}
@Test
void should_return_unambiguous_toString_of_LocalDateTime() {
// GIVEN use Object to call toStringOf(Object) and not toStringOf(LocalDateTime)
Object localDateTime = LocalDateTime.of(2011, 6, 18, 23, 53, 17);
// WHEN
String localDateTimeRepresentation = STANDARD_REPRESENTATION.toStringOf(localDateTime);
// THEN
then(localDateTimeRepresentation).isEqualTo("2011-06-18T23:53:17 (java.time.LocalDateTime)");
}
@Test
void should_return_unambiguous_toString_of_OffsetDateTime() {
// GIVEN use Object to call toStringOf(Object) and not toStringOf(LocalDateTime)
LocalDateTime localDateTime = LocalDateTime.of(2011, 6, 18, 23, 53, 17);
Object offsetDateTime = OffsetDateTime.of(localDateTime, ZoneOffset.UTC);
// WHEN
String offsetDateTimeRepresentation = STANDARD_REPRESENTATION.toStringOf(offsetDateTime);
// THEN
then(offsetDateTimeRepresentation).isEqualTo("2011-06-18T23:53:17Z (java.time.OffsetDateTime)");
}
@Test
void should_return_unambiguous_toString_of_ZonedDateTime() {
// GIVEN use Object to call toStringOf(Object) and not toStringOf(LocalDateTime)
LocalDateTime localDateTime = LocalDateTime.of(2011, 6, 18, 23, 53, 17);
Object offsetDateTime = ZonedDateTime.of(localDateTime, ZoneOffset.UTC);
// WHEN
String offsetDateTimeRepresentation = STANDARD_REPRESENTATION.toStringOf(offsetDateTime);
// THEN
then(offsetDateTimeRepresentation).isEqualTo("2011-06-18T23:53:17Z (java.time.ZonedDateTime)");
}
@Test
void should_return_toString_of_AtomicReference() {
// GIVEN
AtomicReference<String> atomicReference = new AtomicReference<>("actual");
// WHEN
String atomicReferenceRepresentation = STANDARD_REPRESENTATION.toStringOf(atomicReference);
// THEN
then(atomicReferenceRepresentation).isEqualTo("AtomicReference[\"actual\"]");
}
@Test
void should_return_toString_of_AtomicMarkableReference() {
// GIVEN
AtomicMarkableReference<String> atomicMarkableReference = new AtomicMarkableReference<>("actual", true);
// WHEN
String atomicMarkableReferenceRepresentation = STANDARD_REPRESENTATION.toStringOf(atomicMarkableReference);
// THEN
then(atomicMarkableReferenceRepresentation).isEqualTo("AtomicMarkableReference[marked=true, reference=\"actual\"]");
}
@Test
void should_return_toString_of_AtomicStampedReference() {
// GIVEN
AtomicStampedReference<String> atomicStampedReference = new AtomicStampedReference<>("actual", 123);
// WHEN
String atomicStampedReferenceRepresentation = STANDARD_REPRESENTATION.toStringOf(atomicStampedReference);
// THEN
then(atomicStampedReferenceRepresentation).isEqualTo("AtomicStampedReference[stamp=123, reference=\"actual\"]");
}
@Test
void should_return_toString_of_AtomicIntegerFieldUpdater() {
// GIVEN
AtomicIntegerFieldUpdater<Person> updater = AtomicIntegerFieldUpdater.newUpdater(Person.class, "age");
// WHEN
String atomicIntegerFieldUpdaterRepresentation = STANDARD_REPRESENTATION.toStringOf(updater);
// THEN
then(atomicIntegerFieldUpdaterRepresentation).isEqualTo("AtomicIntegerFieldUpdater");
}
@Test
void should_return_toString_of_AtomicLongFieldUpdater() {
// GIVEN
AtomicLongFieldUpdater<Person> updater = AtomicLongFieldUpdater.newUpdater(Person.class, "account");
// WHEN
String atomicLongFieldUpdaterRepresentation = STANDARD_REPRESENTATION.toStringOf(updater);
// THEN
then(atomicLongFieldUpdaterRepresentation).isEqualTo("AtomicLongFieldUpdater");
}
@Test
void should_return_toString_of_AtomicReferenceFieldUpdater() {
AtomicReferenceFieldUpdater<Person, String> updater = newUpdater(Person.class, String.class, "name");
// THEN
then(STANDARD_REPRESENTATION.toStringOf(updater)).isEqualTo("AtomicReferenceFieldUpdater");
}
@Test
void toString_with_anonymous_comparator() {
// GIVEN
Comparator<String> anonymousComparator = new Comparator<>() {
@Override
public int compare(String s1, String s2) {
return s1.length() - s2.length();
}
};
// WHEN
String representation = STANDARD_REPRESENTATION.toStringOf(anonymousComparator);
// THEN
then(representation).isEqualTo("'anonymous comparator class'");
}
@Test
void toString_with_anonymous_comparator_overriding_toString() {
// GIVEN
Comparator<String> anonymousComparator = new Comparator<>() {
@Override
public int compare(String s1, String s2) {
return s1.length() - s2.length();
}
@Override
public String toString() {
return "foo";
}
};
// WHEN
String representation = STANDARD_REPRESENTATION.toStringOf(anonymousComparator);
// THEN
then(representation).isEqualTo("foo");
}
@Test
void toString_with_comparator_not_overriding_toString() {
// GIVEN
StringTestComparator comparator = new StringTestComparator();
// WHEN
String comparatorStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(comparator);
// THEN
then(comparatorStandardRepresentation).isEqualTo("StringTestComparator");
}
@Test
void toString_with_comparator_overriding_toString() {
// GIVEN
OtherStringTestComparator comparator = new OtherStringTestComparator();
// WHEN
String comparatorStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(comparator);
// THEN
then(comparatorStandardRepresentation).isEqualTo("other String comparator");
}
@Test
void toString_with_comparator_overriding_toString_and_having_at() {
// GIVEN
OtherStringTestComparatorWithAt comparator = new OtherStringTestComparatorWithAt();
// WHEN
String comparatorStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(comparator);
// THEN
then(comparatorStandardRepresentation).isEqualTo("other String comparator with @");
}
@Test
void should_format_byte() {
// GIVEN
byte b = 20;
// WHEN
String byteStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(b);
// THEN
then(byteStandardRepresentation).isEqualTo("20");
}
@Test
void should_format_char() {
// GIVEN
char a = 'a';
// WHEN
String charStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(a);
// THEN
then(charStandardRepresentation).isEqualTo("'a'");
}
@Test
void should_format_short() {
// GIVEN
short s = 20;
// WHEN
String shortStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(s);
// THEN
then(shortStandardRepresentation).isEqualTo("20");
}
@Test
void should_format_int() {
// GIVEN
int i = 20;
// WHEN
String intStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(i);
// THEN
then(intStandardRepresentation).isEqualTo("20");
}
@Test
void should_format_long() {
// GIVEN
long l = 20;
// WHEN
String longStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(l);
// THEN
then(longStandardRepresentation).isEqualTo("20L");
}
@Test
void should_format_float() {
// GIVEN
float d = 20.0f;
// WHEN
String floatStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(d);
// THEN
then(floatStandardRepresentation).isEqualTo("20.0f");
}
@Test
void should_format_double() {
// GIVEN
double d = 20.0;
// WHEN
String doubleStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(d);
// THEN
then(doubleStandardRepresentation).isEqualTo("20.0");
}
@Test
void should_format_tuple() {
// GIVEN
Tuple tuple = tuple(1, 2, 3);
// WHEN
String tupleStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(tuple);
// THEN
then(tupleStandardRepresentation).isEqualTo("(1, 2, 3)");
}
@Test
void should_format_tuples_up_to_the_maximum_allowed_elements() {
// GIVEN
StandardRepresentation.setMaxElementsForPrinting(4);
// WHEN
String tupleRepresentation = STANDARD_REPRESENTATION.toStringOf(tuple(1, 2, 3, 4, 5));
// THEN
then(tupleRepresentation).isEqualTo("(1, 2, ... 4, 5)");
}
@Test
void should_format_simple_date_format() {
// GIVEN
SimpleDateFormat sdf = new SimpleDateFormat("ddMMyyyy");
// WHEN
String sdfRepresentation = STANDARD_REPRESENTATION.toStringOf(sdf);
// THEN
then(sdfRepresentation).isEqualTo("ddMMyyyy");
}
@Test
void should_format_assertj_map_entry() {
// GIVEN
MapEntry<String, Integer> entry = entry("A", 1);
// WHEN
String entryRepresentation = STANDARD_REPRESENTATION.toStringOf(entry);
// THEN
then(entryRepresentation).isEqualTo("\"A\"=1");
}
@Test
void should_format_java_map_entry() {
// GIVEN
Entry<String, Integer> entry = newHashMap("key", 123).entrySet().iterator().next();
// WHEN
String javaEntryRepresentation = STANDARD_REPRESENTATION.toStringOf(entry);
// THEN
then(javaEntryRepresentation).isEqualTo("\"key\"=123");
}
@Test
void should_return_toStringOf_method() {
// GIVEN
Method method = Arrays.stream(GenericClass.class.getMethods()).filter(m -> m.getName().equals("someGenericMethod"))
.findAny()
.get();
// WHEN
String methodRepresentation = STANDARD_REPRESENTATION.toStringOf(method);
// THEN
then(methodRepresentation).isEqualTo(method.toGenericString());
}
@Test
void should_fix_1483() {
// GIVEN
VolatileSizeArrayList<String> list = new VolatileSizeArrayList<>();
list.add("abc");
list.add("def");
// WHEN
String toString = STANDARD_REPRESENTATION.toStringOf(list);
// THEN
then(toString).isEqualTo("[\"abc\", \"def\"]");
}
static
|
LocalClass
|
java
|
bumptech__glide
|
annotation/compiler/src/main/java/com/bumptech/glide/annotation/compiler/RequestOptionsExtensionGenerator.java
|
{
"start": 1256,
"end": 1663
}
|
class ____.
*/
List<ExecutableElement> getRequestOptionExtensionMethods(Set<String> glideExtensionClassNames) {
return processorUtil.findAnnotatedElementsInClasses(
glideExtensionClassNames, GlideOption.class);
}
/**
* Returns a list containing an override {@link MethodSpec} for all {@link GlideOption} annotated
* methods in the classes that correspond to the given extension
|
names
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/internal/LettuceClassUtils.java
|
{
"start": 113,
"end": 233
}
|
class ____ methods. Mainly for internal use within the framework.
*
* @author Mark Paluch
* @since 4.2
*/
public
|
utility
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/streams/AnalyticsStreamFactory.java
|
{
"start": 1800,
"end": 1929
}
|
class ____ instantiated during initialization of
* {@code S3AStore}, if fs.s3a.input.stream.type is set to Analytics.
*/
public
|
is
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/example/test/MyProjectClass.java
|
{
"start": 651,
"end": 828
}
|
class ____ {
private final Object value;
public MyProjectClass(Object value) {
this.value = value;
}
public Object getValue() {
return value;
}
}
|
MyProjectClass
|
java
|
quarkusio__quarkus
|
extensions/reactive-routes/deployment/src/test/java/io/quarkus/vertx/web/base/RouteBaseTest.java
|
{
"start": 428,
"end": 1394
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(SimpleBean.class));
@Test
public void testPath() {
when().get("/hello").then().statusCode(200).body(is("Hello world!"));
when().get("/simple/hello").then().statusCode(200).body(is("Hello another world!"));
when().get("/simple").then().statusCode(200).body(is("Hello root!"));
when().get("/simple/").then().statusCode(200).body(is("Hello root!"));
when().get("/some/foo").then().statusCode(200).body(is("Hello foo!"));
}
@Test
public void testProduces() {
given().header("Accept", "application/json").when().get("/ping").then().statusCode(200).body(is("{\"ping\":\"pong\"}"));
given().header("Accept", "text/html").when().get("/ping").then().statusCode(200).body(is("<html>pong</html>"));
}
@RouteBase
static
|
RouteBaseTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/NatsEndpointBuilderFactory.java
|
{
"start": 67781,
"end": 86089
}
|
interface ____
extends
NatsEndpointConsumerBuilder,
NatsEndpointProducerBuilder {
default AdvancedNatsEndpointBuilder advanced() {
return (AdvancedNatsEndpointBuilder) this;
}
/**
* Timeout for connection attempts. (in milliseconds).
*
* The option is a: <code>int</code> type.
*
* Default: 2000
* Group: common
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder connectionTimeout(int connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Timeout for connection attempts. (in milliseconds).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2000
* Group: common
*
* @param connectionTimeout the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder connectionTimeout(String connectionTimeout) {
doSetProperty("connectionTimeout", connectionTimeout);
return this;
}
/**
* Define if we want to flush connection when stopping or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param flushConnection the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder flushConnection(boolean flushConnection) {
doSetProperty("flushConnection", flushConnection);
return this;
}
/**
* Define if we want to flush connection when stopping or not.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param flushConnection the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder flushConnection(String flushConnection) {
doSetProperty("flushConnection", flushConnection);
return this;
}
/**
* Set the flush timeout (in milliseconds).
*
* The option is a: <code>int</code> type.
*
* Default: 1000
* Group: common
*
* @param flushTimeout the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder flushTimeout(int flushTimeout) {
doSetProperty("flushTimeout", flushTimeout);
return this;
}
/**
* Set the flush timeout (in milliseconds).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 1000
* Group: common
*
* @param flushTimeout the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder flushTimeout(String flushTimeout) {
doSetProperty("flushTimeout", flushTimeout);
return this;
}
/**
* Sets whether to enable JetStream support for this endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param jetstreamEnabled the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder jetstreamEnabled(boolean jetstreamEnabled) {
doSetProperty("jetstreamEnabled", jetstreamEnabled);
return this;
}
/**
* Sets whether to enable JetStream support for this endpoint.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param jetstreamEnabled the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder jetstreamEnabled(String jetstreamEnabled) {
doSetProperty("jetstreamEnabled", jetstreamEnabled);
return this;
}
/**
* Sets the name of the JetStream stream to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param jetstreamName the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder jetstreamName(String jetstreamName) {
doSetProperty("jetstreamName", jetstreamName);
return this;
}
/**
* maximum number of pings have not received a response allowed by the
* client.
*
* The option is a: <code>int</code> type.
*
* Default: 2
* Group: common
*
* @param maxPingsOut the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder maxPingsOut(int maxPingsOut) {
doSetProperty("maxPingsOut", maxPingsOut);
return this;
}
/**
* maximum number of pings have not received a response allowed by the
* client.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2
* Group: common
*
* @param maxPingsOut the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder maxPingsOut(String maxPingsOut) {
doSetProperty("maxPingsOut", maxPingsOut);
return this;
}
/**
* Max reconnection attempts.
*
* The option is a: <code>int</code> type.
*
* Default: 60
* Group: common
*
* @param maxReconnectAttempts the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder maxReconnectAttempts(int maxReconnectAttempts) {
doSetProperty("maxReconnectAttempts", maxReconnectAttempts);
return this;
}
/**
* Max reconnection attempts.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 60
* Group: common
*
* @param maxReconnectAttempts the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder maxReconnectAttempts(String maxReconnectAttempts) {
doSetProperty("maxReconnectAttempts", maxReconnectAttempts);
return this;
}
/**
* Turn off echo. If supported by the gnatsd version you are connecting
* to this flag will prevent the server from echoing messages back to
* the connection if it has subscriptions on the subject being published
* to.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noEcho the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder noEcho(boolean noEcho) {
doSetProperty("noEcho", noEcho);
return this;
}
/**
* Turn off echo. If supported by the gnatsd version you are connecting
* to this flag will prevent the server from echoing messages back to
* the connection if it has subscriptions on the subject being published
* to.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noEcho the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder noEcho(String noEcho) {
doSetProperty("noEcho", noEcho);
return this;
}
/**
* Whether or not randomizing the order of servers for the connection
* attempts.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noRandomizeServers the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder noRandomizeServers(boolean noRandomizeServers) {
doSetProperty("noRandomizeServers", noRandomizeServers);
return this;
}
/**
* Whether or not randomizing the order of servers for the connection
* attempts.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param noRandomizeServers the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder noRandomizeServers(String noRandomizeServers) {
doSetProperty("noRandomizeServers", noRandomizeServers);
return this;
}
/**
* Whether or not running in pedantic mode (this affects performance).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param pedantic the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder pedantic(boolean pedantic) {
doSetProperty("pedantic", pedantic);
return this;
}
/**
* Whether or not running in pedantic mode (this affects performance).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param pedantic the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder pedantic(String pedantic) {
doSetProperty("pedantic", pedantic);
return this;
}
/**
* Ping interval to be aware if connection is still alive (in
* milliseconds).
*
* The option is a: <code>int</code> type.
*
* Default: 120000
* Group: common
*
* @param pingInterval the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder pingInterval(int pingInterval) {
doSetProperty("pingInterval", pingInterval);
return this;
}
/**
* Ping interval to be aware if connection is still alive (in
* milliseconds).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 120000
* Group: common
*
* @param pingInterval the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder pingInterval(String pingInterval) {
doSetProperty("pingInterval", pingInterval);
return this;
}
/**
* Whether or not using reconnection feature.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param reconnect the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder reconnect(boolean reconnect) {
doSetProperty("reconnect", reconnect);
return this;
}
/**
* Whether or not using reconnection feature.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param reconnect the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder reconnect(String reconnect) {
doSetProperty("reconnect", reconnect);
return this;
}
/**
* Waiting time before attempts reconnection (in milliseconds).
*
* The option is a: <code>int</code> type.
*
* Default: 2000
* Group: common
*
* @param reconnectTimeWait the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder reconnectTimeWait(int reconnectTimeWait) {
doSetProperty("reconnectTimeWait", reconnectTimeWait);
return this;
}
/**
* Waiting time before attempts reconnection (in milliseconds).
*
* The option will be converted to a <code>int</code> type.
*
* Default: 2000
* Group: common
*
* @param reconnectTimeWait the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder reconnectTimeWait(String reconnectTimeWait) {
doSetProperty("reconnectTimeWait", reconnectTimeWait);
return this;
}
/**
* Interval to clean up cancelled/timed out requests.
*
* The option is a: <code>int</code> type.
*
* Default: 5000
* Group: common
*
* @param requestCleanupInterval the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder requestCleanupInterval(int requestCleanupInterval) {
doSetProperty("requestCleanupInterval", requestCleanupInterval);
return this;
}
/**
* Interval to clean up cancelled/timed out requests.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 5000
* Group: common
*
* @param requestCleanupInterval the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder requestCleanupInterval(String requestCleanupInterval) {
doSetProperty("requestCleanupInterval", requestCleanupInterval);
return this;
}
/**
* URLs to one or more NAT servers. Use comma to separate URLs when
* specifying multiple servers.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param servers the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder servers(String servers) {
doSetProperty("servers", servers);
return this;
}
/**
* Whether or not running in verbose mode.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param verbose the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder verbose(boolean verbose) {
doSetProperty("verbose", verbose);
return this;
}
/**
* Whether or not running in verbose mode.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param verbose the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder verbose(String verbose) {
doSetProperty("verbose", verbose);
return this;
}
/**
* If we use useCredentialsFile to true we'll need to set the
* credentialsFilePath option. It can be loaded by default from
* classpath, but you can prefix with classpath:, file:, or http: to
* load the resource from different systems.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param credentialsFilePath the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder credentialsFilePath(String credentialsFilePath) {
doSetProperty("credentialsFilePath", credentialsFilePath);
return this;
}
/**
* Set secure option indicating TLS is required.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param secure the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder secure(boolean secure) {
doSetProperty("secure", secure);
return this;
}
/**
* Set secure option indicating TLS is required.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param secure the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder secure(String secure) {
doSetProperty("secure", secure);
return this;
}
/**
* To configure security using SSLContextParameters.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* To configure security using SSLContextParameters.
*
* The option will be converted to a
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default NatsEndpointBuilder sslContextParameters(String sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
}
/**
* Advanced builder for endpoint for the Nats component.
*/
public
|
NatsEndpointBuilder
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java
|
{
"start": 16023,
"end": 16458
}
|
class ____ extends AbstractProvider {
public static final String NO_AUTH = "No auth";
public static AwsCredentialsProvider create() {
throw new NoAuthWithAWSException(NO_AUTH);
}
}
@Test
public void testFactoryWrongType() throws Throwable {
expectProviderInstantiationFailure(
FactoryOfWrongType.class,
InstantiationIOException.CONSTRUCTOR_EXCEPTION);
}
static
|
AWSExceptionRaisingFactory
|
java
|
apache__flink
|
flink-formats/flink-avro/src/main/java/org/apache/flink/formats/avro/utils/DataOutputEncoder.java
|
{
"start": 1088,
"end": 5271
}
|
class ____ extends Encoder {
private DataOutput out;
public void setOut(DataOutput out) {
this.out = out;
}
@Override
public void flush() throws IOException {}
// --------------------------------------------------------------------------------------------
// primitives
// --------------------------------------------------------------------------------------------
@Override
public void writeNull() {}
@Override
public void writeBoolean(boolean b) throws IOException {
out.writeBoolean(b);
}
@Override
public void writeInt(int n) throws IOException {
out.writeInt(n);
}
@Override
public void writeLong(long n) throws IOException {
out.writeLong(n);
}
@Override
public void writeFloat(float f) throws IOException {
out.writeFloat(f);
}
@Override
public void writeDouble(double d) throws IOException {
out.writeDouble(d);
}
@Override
public void writeEnum(int e) throws IOException {
out.writeInt(e);
}
// --------------------------------------------------------------------------------------------
// bytes
// --------------------------------------------------------------------------------------------
@Override
public void writeFixed(byte[] bytes, int start, int len) throws IOException {
out.write(bytes, start, len);
}
@Override
public void writeBytes(byte[] bytes, int start, int len) throws IOException {
out.writeInt(len);
if (len > 0) {
out.write(bytes, start, len);
}
}
@Override
public void writeBytes(ByteBuffer bytes) throws IOException {
int num = bytes.remaining();
out.writeInt(num);
if (num > 0) {
writeFixed(bytes);
}
}
// --------------------------------------------------------------------------------------------
// strings
// --------------------------------------------------------------------------------------------
@Override
public void writeString(String str) throws IOException {
byte[] bytes = Utf8.getBytesFor(str);
writeBytes(bytes, 0, bytes.length);
}
@Override
public void writeString(Utf8 utf8) throws IOException {
writeBytes(utf8.getBytes(), 0, utf8.getByteLength());
}
// --------------------------------------------------------------------------------------------
// collection types
// --------------------------------------------------------------------------------------------
@Override
public void writeArrayStart() {}
@Override
public void setItemCount(long itemCount) throws IOException {
if (itemCount > 0) {
writeVarLongCount(out, itemCount);
}
}
@Override
public void startItem() {}
@Override
public void writeArrayEnd() throws IOException {
// write a single byte 0, shortcut for a var-length long of 0
out.write(0);
}
@Override
public void writeMapStart() {}
@Override
public void writeMapEnd() throws IOException {
// write a single byte 0, shortcut for a var-length long of 0
out.write(0);
}
// --------------------------------------------------------------------------------------------
// union
// --------------------------------------------------------------------------------------------
@Override
public void writeIndex(int unionIndex) throws IOException {
out.writeInt(unionIndex);
}
// --------------------------------------------------------------------------------------------
// utils
// --------------------------------------------------------------------------------------------
public static void writeVarLongCount(DataOutput out, long val) throws IOException {
if (val < 0) {
throw new IOException("Illegal count (must be non-negative): " + val);
}
while ((val & ~0x7FL) != 0) {
out.write(((int) val) | 0x80);
val >>>= 7;
}
out.write((int) val);
}
}
|
DataOutputEncoder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/DocsV3Support.java
|
{
"start": 13904,
"end": 14097
}
|
interface ____ {
void write(Path dir, String name, String extension, String str, boolean kibana) throws IOException;
boolean supportsRendering();
}
public static
|
Callbacks
|
java
|
apache__rocketmq
|
client/src/main/java/org/apache/rocketmq/acl/common/AclClientRPCHook.java
|
{
"start": 1035,
"end": 2532
}
|
class ____ implements RPCHook {
private final SessionCredentials sessionCredentials;
public AclClientRPCHook(SessionCredentials sessionCredentials) {
this.sessionCredentials = sessionCredentials;
}
@Override
public void doBeforeRequest(String remoteAddr, RemotingCommand request) {
// Add AccessKey and SecurityToken into signature calculating.
request.addExtField(SessionCredentials.ACCESS_KEY, sessionCredentials.getAccessKey());
// The SecurityToken value is unnecessary,user can choose this one.
if (sessionCredentials.getSecurityToken() != null) {
request.addExtField(SessionCredentials.SECURITY_TOKEN, sessionCredentials.getSecurityToken());
}
byte[] total = AclUtils.combineRequestContent(request, parseRequestContent(request));
String signature = AclUtils.calSignature(total, sessionCredentials.getSecretKey());
request.addExtField(SessionCredentials.SIGNATURE, signature);
}
@Override
public void doAfterResponse(String remoteAddr, RemotingCommand request, RemotingCommand response) {
}
protected SortedMap<String, String> parseRequestContent(RemotingCommand request) {
request.makeCustomHeaderToNet();
Map<String, String> extFields = request.getExtFields();
// Sort property
return new TreeMap<>(extFields);
}
public SessionCredentials getSessionCredentials() {
return sessionCredentials;
}
}
|
AclClientRPCHook
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/KeyValueStoreWrapper.java
|
{
"start": 1842,
"end": 6107
}
|
class ____<K, V> implements StateStore {
public static final long PUT_RETURN_CODE_IS_LATEST
= VersionedKeyValueStore.PUT_RETURN_CODE_VALID_TO_UNDEFINED;
private TimestampedKeyValueStore<K, V> timestampedStore = null;
private VersionedKeyValueStore<K, V> versionedStore = null;
// same as either timestampedStore or versionedStore above. kept merely as a convenience
// to simplify implementation for methods which do not depend on store type.
private StateStore store;
public KeyValueStoreWrapper(final ProcessorContext<?, ?> context, final String storeName) {
try {
// first try timestamped store
timestampedStore = context.getStateStore(storeName);
store = timestampedStore;
return;
} catch (final ClassCastException e) {
// ignore since could be versioned store instead
}
try {
// next try versioned store
versionedStore = context.getStateStore(storeName);
store = versionedStore;
} catch (final ClassCastException e) {
store = context.getStateStore(storeName);
final String storeType = store == null ? "null" : store.getClass().getName();
throw new InvalidStateStoreException("KTable source state store must implement either "
+ "TimestampedKeyValueStore or VersionedKeyValueStore. Got: " + storeType);
}
}
public ValueAndTimestamp<V> get(final K key) {
if (timestampedStore != null) {
return timestampedStore.get(key);
}
if (versionedStore != null) {
final VersionedRecord<V> versionedRecord = versionedStore.get(key);
return versionedRecord == null
? null
: ValueAndTimestamp.make(versionedRecord.value(), versionedRecord.timestamp());
}
throw new IllegalStateException("KeyValueStoreWrapper must be initialized with either timestamped or versioned store");
}
public ValueAndTimestamp<V> get(final K key, final long asOfTimestamp) {
if (!isVersionedStore()) {
throw new UnsupportedOperationException("get(key, timestamp) is only supported for versioned stores");
}
final VersionedRecord<V> versionedRecord = versionedStore.get(key, asOfTimestamp);
return versionedRecord == null ? null : ValueAndTimestamp.make(versionedRecord.value(), versionedRecord.timestamp());
}
/**
* @return {@code -1} if the put record is the latest for its key, and {@code Long.MIN_VALUE}
* if the put was rejected (i.e., due to grace period having elapsed for a versioned
* store). If neither, any other long value may be returned.
*/
public long put(final K key, final V value, final long timestamp) {
if (timestampedStore != null) {
timestampedStore.put(key, ValueAndTimestamp.make(value, timestamp));
return PUT_RETURN_CODE_IS_LATEST;
}
if (versionedStore != null) {
return versionedStore.put(key, value, timestamp);
}
throw new IllegalStateException("KeyValueStoreWrapper must be initialized with either timestamped or versioned store");
}
public StateStore store() {
return store;
}
public boolean isVersionedStore() {
return versionedStore != null;
}
@Override
public String name() {
return store.name();
}
@Override
public void init(final StateStoreContext stateStoreContext, final StateStore root) {
store.init(stateStoreContext, root);
}
@Override
public void flush() {
store.flush();
}
@Override
public void close() {
store.close();
}
@Override
public boolean persistent() {
return store.persistent();
}
@Override
public boolean isOpen() {
return store.isOpen();
}
@Override
public <R> QueryResult<R> query(final Query<R> query, final PositionBound positionBound, final QueryConfig config) {
return store.query(query, positionBound, config);
}
@Override
public Position getPosition() {
return store.getPosition();
}
}
|
KeyValueStoreWrapper
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-devtools/src/main/java/smoketest/devtools/SampleDevToolsApplication.java
|
{
"start": 806,
"end": 953
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleDevToolsApplication.class, args);
}
}
|
SampleDevToolsApplication
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsStreamCacheSynchronousTest.java
|
{
"start": 857,
"end": 1010
}
|
class ____ extends CxfRsStreamCacheTest {
@Override
protected boolean isSynchronous() {
return true;
}
}
|
CxfRsStreamCacheSynchronousTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/action/GoogleVertexAiUnifiedChatCompletionActionTests.java
|
{
"start": 2800,
"end": 10204
}
|
class ____ extends ESTestCase {
private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
private final MockWebServer webServer = new MockWebServer();
private ThreadPool threadPool;
private HttpClientManager clientManager;
@Before
public void init() throws Exception {
webServer.start();
threadPool = createThreadPool(inferenceUtilityExecutors());
clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class));
}
@After
public void shutdown() throws IOException {
clientManager.close();
terminate(threadPool);
webServer.close();
}
private static UnifiedChatInput createUnifiedChatInput(List<String> messages) {
boolean stream = true;
return new UnifiedChatInput(messages, "user", stream);
}
// Successful case would typically be tested via end-to-end notebook tests in AppEx repo
public void testExecute_ThrowsElasticsearchExceptionGoogleVertexAi() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.GOOGLE);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledGoogleVertexAi() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.GOOGLE);
}
public void testExecute_ThrowsExceptionGoogleVertexAi() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.GOOGLE);
}
public void testExecute_ThrowsElasticsearchExceptionAnthropic() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.ANTHROPIC);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledAnthropic() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.ANTHROPIC);
}
public void testExecute_ThrowsExceptionAnthropic() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.ANTHROPIC);
}
public void testExecute_ThrowsElasticsearchExceptionMeta() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.META);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledMeta() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.META);
}
public void testExecute_ThrowsExceptionMeta() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.META);
}
public void testExecute_ThrowsElasticsearchExceptionMistral() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.MISTRAL);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledMistral() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.MISTRAL);
}
public void testExecute_ThrowsExceptionMistral() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.MISTRAL);
}
public void testExecute_ThrowsElasticsearchExceptionHuggingFace() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.HUGGING_FACE);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledHuggingFace() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.HUGGING_FACE);
}
public void testExecute_ThrowsExceptionHuggingFace() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.HUGGING_FACE);
}
public void testExecute_ThrowsElasticsearchExceptionAi21() {
testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider.AI21);
}
public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalledAi21() {
testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider.AI21);
}
public void testExecute_ThrowsExceptionAi21() {
testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider.AI21);
}
private void testExecute_ThrowsIllegalArgumentException(GoogleModelGardenProvider provider) {
testExecute_ThrowsException(
provider,
new IllegalArgumentException("failed"),
"Failed to send Google Vertex AI chat completion request. Cause: failed"
);
}
private void testExecute_ThrowsElasticsearchException(GoogleModelGardenProvider provider) {
testExecute_ThrowsException(provider, new ElasticsearchException("failed"), "failed");
}
private void testExecute_ThrowsException(GoogleModelGardenProvider provider, Exception exception, String expectedExceptionMessage) {
var sender = mock(Sender.class);
doThrow(exception).when(sender).send(any(), any(), any(), any());
var action = createAction(sender, provider, provider.getChatCompletionResponseHandler());
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
action.execute(createUnifiedChatInput(List.of("test query")), InferenceAction.Request.DEFAULT_TIMEOUT, listener);
var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is(expectedExceptionMessage));
}
private void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled(GoogleModelGardenProvider provider) {
var sender = mock(Sender.class);
doAnswer(invocation -> {
ActionListener<InferenceServiceResults> listenerArg = invocation.getArgument(3);
listenerArg.onFailure(new IllegalStateException("failed"));
return Void.TYPE;
}).when(sender).send(any(), any(), any(), any());
var action = createAction(sender, provider, provider.getChatCompletionResponseHandler());
PlainActionFuture<InferenceServiceResults> listener = new PlainActionFuture<>();
action.execute(createUnifiedChatInput(List.of("test query")), InferenceAction.Request.DEFAULT_TIMEOUT, listener);
var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT));
assertThat(thrownException.getMessage(), is("Failed to send Google Vertex AI chat completion request. Cause: failed"));
}
private ExecutableAction createAction(Sender sender, GoogleModelGardenProvider provider, ResponseHandler handler) {
var model = GoogleVertexAiChatCompletionModelTests.createCompletionModel(
null,
null,
null,
"api-key",
new RateLimitSettings(100),
new ThinkingConfig(256),
provider,
null,
123
);
var manager = new GenericRequestManager<>(
threadPool,
model,
handler,
inputs -> new GoogleVertexAiUnifiedChatCompletionRequest(new UnifiedChatInput(inputs, USER_ROLE), model),
ChatCompletionInput.class
);
var failedToSendRequestErrorMessage = constructFailedToSendRequestMessage("Google Vertex AI chat completion");
return new SenderExecutableAction(sender, manager, failedToSendRequestErrorMessage);
}
}
|
GoogleVertexAiUnifiedChatCompletionActionTests
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/json/JsonValue.java
|
{
"start": 936,
"end": 3276
}
|
interface ____ {
/**
* Execute any {@link io.lettuce.core.codec.RedisCodec} decoding and fetch the result.
*
* @return the {@link String} representation of this {@link JsonValue}
*/
String toString();
/**
* @return the raw JSON text as a {@link ByteBuffer}
*/
ByteBuffer asByteBuffer();
/**
* @return {@code true} if this {@link JsonValue} represents a JSON array
*/
boolean isJsonArray();
/**
* @return the {@link JsonArray} representation of this {@link JsonValue}, null if this is not a JSON array
* @see #isJsonArray()
*/
JsonArray asJsonArray();
/**
* @return {@code true} if this {@link JsonValue} represents a JSON object
*/
boolean isJsonObject();
/**
* @return the {@link JsonObject} representation of this {@link JsonValue}, null if this is not a JSON object
* @see #isJsonObject()
*/
JsonObject asJsonObject();
/**
* @return {@code true} if this {@link JsonValue} represents a JSON string
*/
boolean isString();
/**
* @return the {@link String} representation of this {@link JsonValue}, null if this is not a JSON string
* @see #isString()
*/
String asString();
/**
* @return {@code true} if this {@link JsonValue} represents a JSON number
*/
boolean isNumber();
/**
* @return the {@link Number} representation of this {@link JsonValue}, null if this is not a JSON number
* @see #isNumber()
*/
Number asNumber();
/**
* @return {@code true} if this {@link JsonValue} represents a JSON boolean value
*/
boolean isBoolean();
/**
* @return the {@link Boolean} representation of this {@link JsonValue}, null if this is not a JSON boolean value
* @see #isNumber()
*/
Boolean asBoolean();
/**
* @return {@code true} if this {@link JsonValue} represents the value of null
*/
boolean isNull();
/**
* Given a {@link Class} type, this method will attempt to convert the JSON value to the provided type.
*
* @return <T> the newly created instance of the provided type with the data from the JSON value
* @throws RedisJsonException if the provided type is not a valid JSON document
*/
<T> T toObject(Class<T> type);
}
|
JsonValue
|
java
|
apache__camel
|
components/camel-kamelet/src/test/java/org/apache/camel/component/kamelet/KameletRouteDumpTest.java
|
{
"start": 1153,
"end": 2323
}
|
class ____ extends CamelTestSupport {
@AfterEach
protected void setupDumpRouters() {
context.setDumpRoutes("xml");
}
@Test
public void canProduceToKamelet() {
String body = UUID.randomUUID().toString();
assertThat(
fluentTemplate.toF("direct:templateEmbedded", body).request(String.class)).isEqualTo("test");
}
// **********************************************
//
// test set-up
//
// **********************************************
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
routeTemplate("setBody")
.templateParameter("bodyValue")
.from("kamelet:source")
.setBody().constant("{{bodyValue}}")
.to("kamelet:sink");
from("direct:templateEmbedded").id("test")
.kamelet("setBody?bodyValue=test")
.to("log:TEST?showAll=true&multiline=true");
}
};
}
}
|
KameletRouteDumpTest
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/builder/BuilderViaUpdateTest.java
|
{
"start": 837,
"end": 2947
}
|
class ____
{
public int x, y;
public SimpleBuilderXY withX(int x0) {
this.x = x0;
return this;
}
public SimpleBuilderXY withY(int y0) {
this.y = y0;
return this;
}
public ValueClassXY build() {
return new ValueClassXY(x, y);
}
}
/*
/*****************************************************
/* Basic tests, potential (but not current) success cases
/*****************************************************
*/
private final static ObjectMapper MAPPER = new ObjectMapper();
// Tests where result value is passed as thing to update
@Test
public void testBuilderUpdateWithValue() throws Exception
{
try {
/*ValueClassXY value =*/ MAPPER.readerFor(ValueClassXY.class)
.withValueToUpdate(new ValueClassXY(6, 7))
.readValue(a2q("{'x':1,'y:'2'}"));
fail("Should not have passed");
} catch (InvalidDefinitionException e) {
verifyException(e, "Deserialization of");
verifyException(e, "by passing existing instance");
verifyException(e, "ValueClassXY");
}
}
/*
/*****************************************************
/* Failing test cases
/*****************************************************
*/
// and then test to ensure error handling works as expected if attempts
// is made to pass builder (API requires value, not builder)
@Test
public void testBuilderWithWrongType() throws Exception
{
try {
/* Object result =*/ MAPPER.readerFor(ValueClassXY.class)
.withValueToUpdate(new SimpleBuilderXY())
.readValue(a2q("{'x':1,'y:'2'}"));
fail("Should not have passed");
} catch (InvalidDefinitionException e) {
verifyException(e, "Deserialization of");
verifyException(e, "by passing existing Builder");
verifyException(e, "SimpleBuilderXY");
}
}
}
|
SimpleBuilderXY
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/integration/MockitoSpyBeanAndCircularDependenciesWithAutowiredSettersIntegrationTests.java
|
{
"start": 2144,
"end": 2295
}
|
class ____ {
private One one;
@Autowired
void setOne(One one) {
this.one = one;
}
void callOne() {
this.one.doSomething();
}
}
}
|
Two
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java
|
{
"start": 80283,
"end": 80765
}
|
class ____ {",
" public void method() {",
" GenericWithImmutableParam<? extends MutableClass> value = null;",
" }",
"}")
.doTest();
}
@Test
public void typeParameterExtendsImmutable_noViolation() {
withImmutableTypeParameterGeneric()
.addSourceLines(
"ImmutableClass.java",
"""
import com.google.errorprone.annotations.Immutable;
@Immutable
|
Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metamodel/MixedIdAndIdClassHandling.java
|
{
"start": 2496,
"end": 2641
}
|
class ____ extends Employee {
@Column(name="SALARY")
private float salary;
public FullTimeEmployee() {
}
}
public static
|
FullTimeEmployee
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumReader.java
|
{
"start": 2134,
"end": 8315
}
|
class ____ is not trusted throws a
* SecurityException.
*/
@Test
void testNotSerializableClasses() {
ClassSecurityPredicate originalValidator = ClassSecurityValidator.getGlobal();
try {
ClassSecurityValidator.setGlobal(ClassSecurityValidator.builder().build());
assertThrows(SecurityException.class, () -> new ReflectDatumReader<>(PojoWithArray.class));
} finally {
ClassSecurityValidator.setGlobal(originalValidator);
}
}
@Test
void read_PojoWithList() throws IOException {
PojoWithList pojoWithList = new PojoWithList();
pojoWithList.setId(42);
pojoWithList.setRelatedIds(Arrays.asList(1, 2, 3));
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithList, PojoWithList.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithList> reflectDatumReader = new ReflectDatumReader<>(PojoWithList.class);
PojoWithList deserialized = new PojoWithList();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithList, deserialized);
}
@Test
void read_PojoWithArray() throws IOException {
PojoWithArray pojoWithArray = new PojoWithArray();
pojoWithArray.setId(42);
pojoWithArray.setRelatedIds(new int[] { 1, 2, 3 });
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithArray, PojoWithArray.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithArray> reflectDatumReader = new ReflectDatumReader<>(PojoWithArray.class);
PojoWithArray deserialized = new PojoWithArray();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithArray, deserialized);
}
@Test
public void testRead_PojoWithSet() throws IOException {
PojoWithSet pojoWithSet = new PojoWithSet();
pojoWithSet.setId(42);
Set<Integer> relatedIds = new HashSet<>();
relatedIds.add(1);
relatedIds.add(2);
relatedIds.add(3);
pojoWithSet.setRelatedIds(relatedIds);
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithSet, PojoWithSet.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithSet> reflectDatumReader = new ReflectDatumReader<>(PojoWithSet.class);
PojoWithSet deserialized = new PojoWithSet();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithSet, deserialized);
}
@Test
public void testRead_PojoWithMap() throws IOException {
PojoWithMap pojoWithMap = new PojoWithMap();
pojoWithMap.setId(42);
Map<Integer, Integer> relatedIds = new HashMap<>();
relatedIds.put(1, 11);
relatedIds.put(2, 22);
relatedIds.put(3, 33);
pojoWithMap.setRelatedIds(relatedIds);
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithMap, PojoWithMap.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithMap> reflectDatumReader = new ReflectDatumReader<>(PojoWithMap.class);
PojoWithMap deserialized = new PojoWithMap();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithMap, deserialized);
}
@Test
public void testRead_PojoWithOptional() throws IOException {
PojoWithOptional pojoWithOptional = new PojoWithOptional();
pojoWithOptional.setId(42);
pojoWithOptional.setRelatedId(Optional.of(13));
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithOptional, PojoWithOptional.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithOptional> reflectDatumReader = new ReflectDatumReader<>(PojoWithOptional.class);
PojoWithOptional deserialized = new PojoWithOptional();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithOptional, deserialized);
}
@Test
public void testRead_PojoWithEmptyOptional() throws IOException {
PojoWithOptional pojoWithOptional = new PojoWithOptional();
pojoWithOptional.setId(42);
pojoWithOptional.setRelatedId(Optional.empty());
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithOptional, PojoWithOptional.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithOptional> reflectDatumReader = new ReflectDatumReader<>(PojoWithOptional.class);
PojoWithOptional deserialized = new PojoWithOptional();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithOptional, deserialized);
}
@Test
public void testRead_PojoWithNullableAnnotation() throws IOException {
PojoWithBasicTypeNullableAnnotationV1 v1Pojo = new PojoWithBasicTypeNullableAnnotationV1();
int idValue = 1;
v1Pojo.setId(idValue);
byte[] serializedBytes = serializeWithReflectDatumWriter(v1Pojo, PojoWithBasicTypeNullableAnnotationV1.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectData reflectData = ReflectData.get();
Schema schemaV1 = reflectData.getSchema(PojoWithBasicTypeNullableAnnotationV1.class);
Schema schemaV2 = reflectData.getSchema(PojoWithBasicTypeNullableAnnotationV2.class);
ReflectDatumReader<PojoWithBasicTypeNullableAnnotationV2> reflectDatumReader = new ReflectDatumReader<>(schemaV1,
schemaV2);
PojoWithBasicTypeNullableAnnotationV2 v2Pojo = new PojoWithBasicTypeNullableAnnotationV2();
reflectDatumReader.read(v2Pojo, decoder);
assertEquals(v1Pojo.id, v2Pojo.id);
assertEquals(v2Pojo.id, idValue);
assertEquals(v2Pojo.intId, FieldAccess.INT_DEFAULT_VALUE);
assertEquals(v2Pojo.floatId, FieldAccess.FLOAT_DEFAULT_VALUE);
assertEquals(v2Pojo.shortId, FieldAccess.SHORT_DEFAULT_VALUE);
assertEquals(v2Pojo.byteId, FieldAccess.BYTE_DEFAULT_VALUE);
assertEquals(v2Pojo.booleanId, FieldAccess.BOOLEAN_DEFAULT_VALUE);
assertEquals(v2Pojo.charId, FieldAccess.CHAR_DEFAULT_VALUE);
assertEquals(v2Pojo.longId, FieldAccess.LONG_DEFAULT_VALUE);
assertEquals(v2Pojo.doubleId, FieldAccess.DOUBLE_DEFAULT_VALUE);
}
public static
|
that
|
java
|
apache__kafka
|
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/GlobalKTableEOSIntegrationTest.java
|
{
"start": 3178,
"end": 21552
}
|
class ____ {
private static final int NUM_BROKERS = 1;
private static final Properties BROKER_CONFIG;
static {
BROKER_CONFIG = new Properties();
BROKER_CONFIG.put("transaction.state.log.replication.factor", (short) 1);
BROKER_CONFIG.put("transaction.state.log.min.isr", 1);
}
public static final EmbeddedKafkaCluster CLUSTER =
new EmbeddedKafkaCluster(NUM_BROKERS, BROKER_CONFIG);
@BeforeAll
public static void startCluster() throws IOException {
CLUSTER.start();
}
@AfterAll
public static void closeCluster() {
CLUSTER.stop();
}
private final MockTime mockTime = CLUSTER.time;
private final KeyValueMapper<String, Long, Long> keyMapper = (key, value) -> value;
private final ValueJoiner<Long, String, String> joiner = (value1, value2) -> value1 + "+" + value2;
private final String globalStore = "globalStore";
private final Map<String, String> results = new HashMap<>();
private StreamsBuilder builder;
private Properties streamsConfiguration;
private KafkaStreams kafkaStreams;
private String globalTableTopic;
private String streamTopic;
private GlobalKTable<Long, String> globalTable;
private KStream<String, Long> stream;
private ForeachAction<String, String> foreachAction;
@BeforeEach
public void before(final TestInfo testInfo) throws Exception {
builder = new StreamsBuilder();
final String safeTestName = safeUniqueTestName(testInfo);
createTopics(safeTestName);
streamsConfiguration = new Properties();
streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "app-" + safeTestName);
streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0L);
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L);
streamsConfiguration.put(StreamsConfig.TASK_TIMEOUT_MS_CONFIG, 1L);
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 1000);
streamsConfiguration.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, 300);
streamsConfiguration.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000);
globalTable = builder.globalTable(
globalTableTopic,
Consumed.with(Serdes.Long(), Serdes.String()),
Materialized.<Long, String, KeyValueStore<Bytes, byte[]>>as(globalStore)
.withKeySerde(Serdes.Long())
.withValueSerde(Serdes.String()));
final Consumed<String, Long> stringLongConsumed = Consumed.with(Serdes.String(), Serdes.Long());
stream = builder.stream(streamTopic, stringLongConsumed);
foreachAction = results::put;
}
@AfterEach
public void after() throws Exception {
if (kafkaStreams != null) {
kafkaStreams.close(Duration.ofSeconds(60));
}
IntegrationTestUtils.purgeLocalStreamsState(streamsConfiguration);
}
@Test
public void shouldKStreamGlobalKTableLeftJoin() throws Exception {
final KStream<String, String> streamTableJoin = stream.leftJoin(globalTable, keyMapper, joiner);
streamTableJoin.foreach(foreachAction);
produceInitialGlobalTableValues();
startStreams();
produceTopicValues(streamTopic);
final Map<String, String> expected = new HashMap<>();
expected.put("a", "1+A");
expected.put("b", "2+B");
expected.put("c", "3+C");
expected.put("d", "4+D");
expected.put("e", "5+null");
TestUtils.waitForCondition(
() -> results.equals(expected),
30_000L,
() -> "waiting for initial values;" +
"\n expected: " + expected +
"\n received: " + results
);
produceGlobalTableValues();
final ReadOnlyKeyValueStore<Long, String> replicatedStore = IntegrationTestUtils
.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore());
assertNotNull(replicatedStore);
final Map<Long, String> expectedState = new HashMap<>();
expectedState.put(1L, "F");
expectedState.put(2L, "G");
expectedState.put(3L, "H");
expectedState.put(4L, "I");
expectedState.put(5L, "J");
final Map<Long, String> globalState = new HashMap<>();
TestUtils.waitForCondition(
() -> {
globalState.clear();
try (final KeyValueIterator<Long, String> it = replicatedStore.all()) {
it.forEachRemaining(pair -> globalState.put(pair.key, pair.value));
}
return globalState.equals(expectedState);
},
30_000L,
() -> "waiting for data in replicated store" +
"\n expected: " + expectedState +
"\n received: " + globalState
);
produceTopicValues(streamTopic);
expected.put("a", "1+F");
expected.put("b", "2+G");
expected.put("c", "3+H");
expected.put("d", "4+I");
expected.put("e", "5+J");
TestUtils.waitForCondition(
() -> results.equals(expected),
30_000L,
() -> "waiting for final values" +
"\n expected: " + expected +
"\n received: " + results
);
}
@Test
public void shouldKStreamGlobalKTableJoin() throws Exception {
final KStream<String, String> streamTableJoin = stream.join(globalTable, keyMapper, joiner);
streamTableJoin.foreach(foreachAction);
produceInitialGlobalTableValues();
startStreams();
produceTopicValues(streamTopic);
final Map<String, String> expected = new HashMap<>();
expected.put("a", "1+A");
expected.put("b", "2+B");
expected.put("c", "3+C");
expected.put("d", "4+D");
TestUtils.waitForCondition(
() -> results.equals(expected),
30_000L,
() -> "waiting for initial values" +
"\n expected: " + expected +
"\n received: " + results
);
produceGlobalTableValues();
final ReadOnlyKeyValueStore<Long, String> replicatedStore = IntegrationTestUtils
.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore());
assertNotNull(replicatedStore);
final Map<Long, String> expectedState = new HashMap<>();
expectedState.put(1L, "F");
expectedState.put(2L, "G");
expectedState.put(3L, "H");
expectedState.put(4L, "I");
expectedState.put(5L, "J");
final Map<Long, String> globalState = new HashMap<>();
TestUtils.waitForCondition(
() -> {
globalState.clear();
try (final KeyValueIterator<Long, String> it = replicatedStore.all()) {
it.forEachRemaining(pair -> globalState.put(pair.key, pair.value));
}
return globalState.equals(expectedState);
},
30_000L,
() -> "waiting for data in replicated store" +
"\n expected: " + expectedState +
"\n received: " + globalState
);
produceTopicValues(streamTopic);
expected.put("a", "1+F");
expected.put("b", "2+G");
expected.put("c", "3+H");
expected.put("d", "4+I");
expected.put("e", "5+J");
TestUtils.waitForCondition(
() -> results.equals(expected),
30_000L,
() -> "waiting for final values" +
"\n expected: " + expected +
"\n received: " + results
);
}
@Test
public void shouldRestoreTransactionalMessages() throws Exception {
produceInitialGlobalTableValues();
startStreams();
final Map<Long, String> expected = new HashMap<>();
expected.put(1L, "A");
expected.put(2L, "B");
expected.put(3L, "C");
expected.put(4L, "D");
final ReadOnlyKeyValueStore<Long, String> store = IntegrationTestUtils
.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore());
assertNotNull(store);
final Map<Long, String> result = new HashMap<>();
TestUtils.waitForCondition(
() -> {
result.clear();
try (final KeyValueIterator<Long, String> it = store.all()) {
it.forEachRemaining(kv -> result.put(kv.key, kv.value));
}
return result.equals(expected);
},
30_000L,
() -> "waiting for initial values" +
"\n expected: " + expected +
"\n received: " + result
);
}
@Test
public void shouldSkipOverTxMarkersOnRestore() throws Exception {
shouldSkipOverTxMarkersAndAbortedMessagesOnRestore(false);
}
@Test
public void shouldSkipOverAbortedMessagesOnRestore() throws Exception {
shouldSkipOverTxMarkersAndAbortedMessagesOnRestore(true);
}
private void shouldSkipOverTxMarkersAndAbortedMessagesOnRestore(final boolean appendAbortedMessages) throws Exception {
// records with key 1L, 2L, and 4L are written into partition-0
// record with key 3L is written into partition-1
produceInitialGlobalTableValues();
final String stateDir = streamsConfiguration.getProperty(StreamsConfig.STATE_DIR_CONFIG);
final File globalStateDir = new File(
stateDir
+ File.separator
+ streamsConfiguration.getProperty(StreamsConfig.APPLICATION_ID_CONFIG)
+ File.separator
+ "global");
assertTrue(globalStateDir.mkdirs());
final OffsetCheckpoint checkpoint = new OffsetCheckpoint(new File(globalStateDir, ".checkpoint"));
// set the checkpointed offset to the commit marker of partition-1
// even if `poll()` won't return any data for partition-1, we should still finish the restore
checkpoint.write(Collections.singletonMap(new TopicPartition(globalTableTopic, 1), 1L));
if (appendAbortedMessages) {
final AtomicReference<Exception> error = new AtomicReference<>();
startStreams(new StateRestoreListener() {
@Override
public void onRestoreStart(final TopicPartition topicPartition,
final String storeName,
final long startingOffset,
final long endingOffset) {
// we need to write aborted messages only after we init the `highWatermark`
// to move the `endOffset` beyond the `highWatermark
//
// we cannot write committed messages because we want to test the case that
// poll() returns no records
//
// cf. GlobalStateManagerImpl#restoreState()
try {
produceAbortedMessages();
} catch (final Exception fatal) {
error.set(fatal);
}
}
@Override
public void onBatchRestored(final TopicPartition topicPartition,
final String storeName,
final long batchEndOffset,
final long numRestored) { }
@Override
public void onRestoreEnd(final TopicPartition topicPartition,
final String storeName,
final long totalRestored) { }
});
final Exception fatal = error.get();
if (fatal != null) {
throw fatal;
}
} else {
startStreams();
}
final Map<Long, String> expected = new HashMap<>();
expected.put(1L, "A");
expected.put(2L, "B");
// skip record <3L, "C"> because we won't read it (cf checkpoint file above)
expected.put(4L, "D");
final ReadOnlyKeyValueStore<Long, String> store = IntegrationTestUtils
.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore());
assertNotNull(store);
final Map<Long, String> storeContent = new HashMap<>();
TestUtils.waitForCondition(
() -> {
storeContent.clear();
try (final KeyValueIterator<Long, String> it = store.all()) {
it.forEachRemaining(kv -> storeContent.put(kv.key, kv.value));
}
return storeContent.equals(expected);
},
30_000L,
() -> "waiting for initial values" +
"\n expected: " + expected +
"\n received: " + storeContent
);
}
@Test
public void shouldNotRestoreAbortedMessages() throws Exception {
produceAbortedMessages();
produceInitialGlobalTableValues();
produceAbortedMessages();
startStreams();
final Map<Long, String> expected = new HashMap<>();
expected.put(1L, "A");
expected.put(2L, "B");
expected.put(3L, "C");
expected.put(4L, "D");
final ReadOnlyKeyValueStore<Long, String> store = IntegrationTestUtils
.getStore(globalStore, kafkaStreams, QueryableStoreTypes.keyValueStore());
assertNotNull(store);
final Map<Long, String> storeContent = new HashMap<>();
TestUtils.waitForCondition(
() -> {
storeContent.clear();
try (final KeyValueIterator<Long, String> it = store.all()) {
it.forEachRemaining(pair -> storeContent.put(pair.key, pair.value));
}
return storeContent.equals(expected);
},
30_000L,
() -> "waiting for initial values" +
"\n expected: " + expected +
"\n received: " + storeContent
);
}
private void createTopics(final String safeTestName) throws Exception {
streamTopic = "stream-" + safeTestName;
globalTableTopic = "globalTable-" + safeTestName;
CLUSTER.createTopics(streamTopic);
CLUSTER.createTopic(globalTableTopic, 2, 1);
}
private void startStreams() {
startStreams(null);
}
private void startStreams(final StateRestoreListener stateRestoreListener) {
streamsConfiguration.put(StreamsConfig.PROCESSING_GUARANTEE_CONFIG, StreamsConfig.EXACTLY_ONCE_V2);
kafkaStreams = new KafkaStreams(builder.build(), streamsConfiguration);
kafkaStreams.setGlobalStateRestoreListener(stateRestoreListener);
kafkaStreams.start();
}
private void produceTopicValues(final String topic) {
final Properties config = new Properties();
config.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
IntegrationTestUtils.produceKeyValuesSynchronously(
topic,
Arrays.asList(
new KeyValue<>("a", 1L),
new KeyValue<>("b", 2L),
new KeyValue<>("c", 3L),
new KeyValue<>("d", 4L),
new KeyValue<>("e", 5L)
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
StringSerializer.class,
LongSerializer.class,
config
),
mockTime
);
}
private void produceAbortedMessages() throws Exception {
final Properties properties = new Properties();
properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "someid");
IntegrationTestUtils.produceAbortedKeyValuesSynchronouslyWithTimestamp(
globalTableTopic, Arrays.asList(
new KeyValue<>(1L, "A"),
new KeyValue<>(2L, "B"),
new KeyValue<>(3L, "C"),
new KeyValue<>(4L, "D")
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
LongSerializer.class,
StringSerializer.class,
properties
),
mockTime.milliseconds()
);
}
private void produceInitialGlobalTableValues() {
final Properties properties = new Properties();
properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "someid");
IntegrationTestUtils.produceKeyValuesSynchronously(
globalTableTopic,
Arrays.asList(
new KeyValue<>(1L, "A"),
new KeyValue<>(2L, "B"),
new KeyValue<>(3L, "C"),
new KeyValue<>(4L, "D")
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
LongSerializer.class,
StringSerializer.class,
properties
),
mockTime,
true
);
}
private void produceGlobalTableValues() {
final Properties config = new Properties();
config.setProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
IntegrationTestUtils.produceKeyValuesSynchronously(
globalTableTopic,
Arrays.asList(
new KeyValue<>(1L, "F"),
new KeyValue<>(2L, "G"),
new KeyValue<>(3L, "H"),
new KeyValue<>(4L, "I"),
new KeyValue<>(5L, "J")
),
TestUtils.producerConfig(
CLUSTER.bootstrapServers(),
LongSerializer.class,
StringSerializer.class,
config
),
mockTime
);
}
}
|
GlobalKTableEOSIntegrationTest
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/selection/methodgenerics/plain/ReturnTypeIsRawTypeMapper.java
|
{
"start": 603,
"end": 831
}
|
class ____ {
private final Set<Integer> prop;
public Source(Set<Integer> prop) {
this.prop = prop;
}
public Set<Integer> getProp() {
return prop;
}
}
|
Source
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sql/spi/NonSelectInterpretationsKey.java
|
{
"start": 428,
"end": 1341
}
|
class ____ implements QueryInterpretationCache.Key {
private final String sql;
private final Collection<String> querySpaces;
public NonSelectInterpretationsKey(String sql, Collection<String> querySpaces) {
this.sql = sql;
this.querySpaces = querySpaces == null ? emptySet() : querySpaces;
}
@Override
public String getQueryString() {
return sql;
}
@Override
public QueryInterpretationCache.Key prepareForStore() {
return new NonSelectInterpretationsKey( sql,
querySpaces.isEmpty() ? emptySet() : new HashSet<>( querySpaces ) );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof NonSelectInterpretationsKey that) ) {
return false;
}
return sql.equals( that.sql )
&& querySpaces.equals( that.querySpaces );
}
@Override
public int hashCode() {
return Objects.hash( sql, querySpaces );
}
}
|
NonSelectInterpretationsKey
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/type/TimestampArrayTest.java
|
{
"start": 9462,
"end": 10011
}
|
class ____ {
@Id
private Long id;
@Column( name = "the_array" )
private LocalDateTime[] theArray;
public TableWithTimestampArrays() {
}
public TableWithTimestampArrays(Long id, LocalDateTime[] theArray) {
this.id = id;
this.theArray = theArray;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public LocalDateTime[] getTheArray() {
return theArray;
}
public void setTheArray(LocalDateTime[] theArray) {
this.theArray = theArray;
}
}
}
|
TableWithTimestampArrays
|
java
|
greenrobot__EventBus
|
EventBusTestJava/src/main/java/org/greenrobot/eventbus/EventBusNoSubscriberEventTest.java
|
{
"start": 2291,
"end": 2442
}
|
class ____ {
@SuppressWarnings("unused")
@Subscribe
public void onEvent(String dummy) {
}
}
public
|
DummySubscriber
|
java
|
spring-projects__spring-framework
|
spring-aop/src/testFixtures/java/org/springframework/aop/testfixture/aspectj/CommonPointcuts.java
|
{
"start": 780,
"end": 878
}
|
class ____ {
@Pointcut("execution(* getAge())")
public void getAgeExecution() {}
}
|
CommonPointcuts
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java
|
{
"start": 13187,
"end": 18057
}
|
class ____ extends ActionRunnable<Void> {
private final DiscoveryNode discoveryNode;
private final Transport.Connection connection;
JoinValidation(DiscoveryNode discoveryNode, Transport.Connection connection, ActionListener<Void> listener) {
super(listener);
this.discoveryNode = discoveryNode;
this.connection = connection;
}
@Override
protected void doRun() {
// NB these things never run concurrently to each other, or to the cache cleaner (see IMPLEMENTATION NOTES above) so it is safe
// to do these (non-atomic) things to the (unsynchronized) statesByVersion map.
var transportVersion = connection.getTransportVersion();
var cachedBytes = statesByVersion.get(transportVersion);
var bytes = maybeSerializeClusterState(cachedBytes, discoveryNode, transportVersion);
if (bytes == null) {
// Normally if we're not the master then the Coordinator sends a ping message just to validate connectivity instead of
// getting here. But if we were the master when the Coordinator checked then we might not be the master any more, so we
// get a null and fall back to a ping here too.
// noinspection ConstantConditions
assert cachedBytes == null;
transportService.sendRequest(
connection,
JoinHelper.JOIN_PING_ACTION_NAME,
new JoinHelper.JoinPingRequest(),
REQUEST_OPTIONS,
TransportResponseHandler.empty(responseExecutor, listener)
);
return;
}
bytes.mustIncRef();
transportService.sendRequest(
connection,
JOIN_VALIDATE_ACTION_NAME,
new BytesTransportRequest(bytes, transportVersion),
REQUEST_OPTIONS,
new CleanableResponseHandler<>(
listener.map(ignored -> null),
in -> ActionResponse.Empty.INSTANCE,
responseExecutor,
bytes::decRef
)
);
try {
if (cachedBytes == null) {
transportService.getThreadPool().schedule(new Runnable() {
@Override
public void run() {
execute(cacheClearer);
}
@Override
public String toString() {
return cacheClearer + " after timeout";
}
}, cacheTimeout, responseExecutor);
}
} catch (Exception e) {
assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e;
// we're shutting down, so clear the cache (and handle the shutdown) right away
execute(cacheClearer);
}
}
@Override
public String toString() {
return "send cached join validation request to " + discoveryNode;
}
}
@Nullable // if we are not the master according to the current cluster state
private ReleasableBytesReference maybeSerializeClusterState(
ReleasableBytesReference cachedBytes,
DiscoveryNode discoveryNode,
TransportVersion version
) {
if (cachedBytes != null) {
return cachedBytes;
}
final var clusterState = clusterStateSupplier.get();
if (clusterState == null) {
return null;
}
assert clusterState.nodes().isLocalNodeElectedMaster();
try (var bytesStream = transportService.newNetworkBytesStream()) {
try (
var stream = new OutputStreamStreamOutput(
CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(bytesStream))
)
) {
stream.setTransportVersion(version);
clusterState.writeTo(stream);
} catch (IOException e) {
throw new ElasticsearchException("failed to serialize cluster state for publishing to node {}", e, discoveryNode);
}
logger.trace(
"serialized join validation cluster state version [{}] for transport version [{}] with size [{}]",
clusterState.version(),
version,
bytesStream.position()
);
var newBytes = bytesStream.moveToBytesReference();
final var previousBytes = statesByVersion.put(version, newBytes);
assert previousBytes == null;
return newBytes;
}
}
}
|
JoinValidation
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/JiraEndpointBuilderFactory.java
|
{
"start": 57127,
"end": 57434
}
|
class ____ extends AbstractEndpointBuilder implements JiraEndpointBuilder, AdvancedJiraEndpointBuilder {
public JiraEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new JiraEndpointBuilderImpl(path);
}
}
|
JiraEndpointBuilderImpl
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-iam/src/test/java/org/apache/camel/component/aws2/iam/IAMComponentClientRegistryTest.java
|
{
"start": 1129,
"end": 2511
}
|
class ____ extends CamelTestSupport {
@Test
public void createEndpointWithMinimalECSClientConfiguration() throws Exception {
AmazonIAMClientMock clientMock = new AmazonIAMClientMock();
context.getRegistry().bind("amazonIamClient", clientMock);
IAM2Component component = context.getComponent("aws2-iam", IAM2Component.class);
IAM2Endpoint endpoint = (IAM2Endpoint) component.createEndpoint("aws2-iam://TestDomain");
assertNotNull(endpoint.getConfiguration().getIamClient());
}
@Test
public void createEndpointWithMinimalECSClientMisconfiguration() {
IAM2Component component = context.getComponent("aws2-iam", IAM2Component.class);
assertThrows(IllegalArgumentException.class, () -> {
component.createEndpoint("aws2-iam://TestDomain");
});
}
@Test
public void createEndpointWithAutowire() throws Exception {
AmazonIAMClientMock clientMock = new AmazonIAMClientMock();
context.getRegistry().bind("amazonIamClient", clientMock);
IAM2Component component = context.getComponent("aws2-iam", IAM2Component.class);
IAM2Endpoint endpoint = (IAM2Endpoint) component.createEndpoint("aws2-iam://TestDomain?accessKey=xxx&secretKey=yyy");
assertSame(clientMock, endpoint.getConfiguration().getIamClient());
}
}
|
IAMComponentClientRegistryTest
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/convert/Delimiter.java
|
{
"start": 1189,
"end": 1550
}
|
interface ____ {
/**
* A delimiter value used to indicate that no delimiter is required and the result
* should be a single element containing the entire string.
*/
String NONE = "";
/**
* The delimiter to use or {@code NONE} if the entire contents should be treated as a
* single element.
* @return the delimiter
*/
String value();
}
|
Delimiter
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/pool/ha/selector/RandomDataSourceSelector.java
|
{
"start": 1073,
"end": 10841
}
|
class ____ implements DataSourceSelector {
private static final String PROP_PREFIX = "druid.ha.random.";
public static final String PROP_CHECKING_INTERVAL = PROP_PREFIX + "checkingIntervalSeconds";
public static final String PROP_RECOVERY_INTERVAL = PROP_PREFIX + "recoveryIntervalSeconds";
public static final String PROP_VALIDATION_SLEEP = PROP_PREFIX + "validationSleepSeconds";
public static final String PROP_BLACKLIST_THRESHOLD = PROP_PREFIX + "blacklistThreshold";
private static final Log LOG = LogFactory.getLog(RandomDataSourceSelector.class);
private Random random = new Random();
private List<DataSource> blacklist = new CopyOnWriteArrayList<DataSource>();
private HighAvailableDataSource highAvailableDataSource;
private RandomDataSourceValidateThread validateThread;
private RandomDataSourceRecoverThread recoverThread;
private Thread runningValidateThread;
private Thread runningRecoverThread;
private int checkingIntervalSeconds = RandomDataSourceValidateThread.DEFAULT_CHECKING_INTERVAL_SECONDS;
private int recoveryIntervalSeconds = RandomDataSourceRecoverThread.DEFAULT_RECOVER_INTERVAL_SECONDS;
private int validationSleepSeconds;
private int blacklistThreshold = RandomDataSourceValidateThread.DEFAULT_BLACKLIST_THRESHOLD;
public RandomDataSourceSelector(HighAvailableDataSource highAvailableDataSource) {
this.highAvailableDataSource = highAvailableDataSource;
}
@Override
public void init() {
if (highAvailableDataSource == null) {
LOG.warn("highAvailableDataSource is NULL!");
return;
}
if (!highAvailableDataSource.isTestOnBorrow() && !highAvailableDataSource.isTestOnReturn()) {
loadProperties();
initThreads();
} else {
LOG.info("testOnBorrow or testOnReturn has been set to true, ignore validateThread");
}
}
/**
* Interrupt Threads if needed.
*/
@Override
public void destroy() {
if (runningValidateThread != null) {
runningValidateThread.interrupt();
validateThread.setSelector(null);
}
if (runningRecoverThread != null) {
runningRecoverThread.interrupt();
recoverThread.setSelector(null);
}
}
@Override
public String getName() {
return DataSourceSelectorEnum.RANDOM.getName();
}
@Override
public DataSource get() {
Map<String, DataSource> dataSourceMap = getDataSourceMap();
if (dataSourceMap == null || dataSourceMap.isEmpty()) {
return null;
}
Collection<DataSource> targetDataSourceSet = removeBlackList(dataSourceMap);
removeBusyDataSource(targetDataSourceSet);
DataSource dataSource = getRandomDataSource(targetDataSourceSet);
return dataSource;
}
@Override
public void setTarget(String name) {
// do nothing
}
public Map<String, DataSource> getFullDataSourceMap() {
if (highAvailableDataSource != null) {
return highAvailableDataSource.getDataSourceMap();
}
return new HashMap<String, DataSource>();
}
public Map<String, DataSource> getDataSourceMap() {
if (highAvailableDataSource != null) {
return highAvailableDataSource.getAvailableDataSourceMap();
}
return new HashMap<String, DataSource>();
}
public List<DataSource> getBlacklist() {
return blacklist;
}
public boolean containInBlacklist(DataSource dataSource) {
return dataSource != null && blacklist.contains(dataSource);
}
public void addBlacklist(DataSource dataSource) {
if (dataSource != null && !blacklist.contains(dataSource)) {
blacklist.add(dataSource);
if (dataSource instanceof DruidDataSource) {
((DruidDataSource) dataSource).setTestOnReturn(true);
}
}
}
public void removeBlacklist(DataSource dataSource) {
if (containInBlacklist(dataSource)) {
blacklist.remove(dataSource);
if (dataSource instanceof DruidDataSource) {
((DruidDataSource) dataSource).setTestOnReturn(highAvailableDataSource.isTestOnReturn());
}
}
}
private void loadProperties() {
checkingIntervalSeconds = loadInteger(PROP_CHECKING_INTERVAL, checkingIntervalSeconds);
recoveryIntervalSeconds = loadInteger(PROP_RECOVERY_INTERVAL, recoveryIntervalSeconds);
validationSleepSeconds = loadInteger(PROP_VALIDATION_SLEEP, validationSleepSeconds);
blacklistThreshold = loadInteger(PROP_BLACKLIST_THRESHOLD, blacklistThreshold);
}
private int loadInteger(String name, int defaultValue) {
if (name == null) {
return defaultValue;
}
Properties properties = highAvailableDataSource.getConnectProperties();
int value = defaultValue;
try {
if (properties.containsKey(name)) {
value = Integer.parseInt(properties.getProperty(name));
}
} catch (Exception e) {
LOG.error("Exception occurred while parsing " + name, e);
}
return value;
}
private void initThreads() {
if (validateThread == null) {
validateThread = new RandomDataSourceValidateThread(this);
validateThread.setCheckingIntervalSeconds(checkingIntervalSeconds);
validateThread.setValidationSleepSeconds(validationSleepSeconds);
validateThread.setBlacklistThreshold(blacklistThreshold);
} else {
validateThread.setSelector(this);
}
if (runningValidateThread != null) {
runningValidateThread.interrupt();
}
runningValidateThread = new Thread(validateThread, "RandomDataSourceSelector-validate-thread");
runningValidateThread.start();
if (recoverThread == null) {
recoverThread = new RandomDataSourceRecoverThread(this);
recoverThread.setRecoverIntervalSeconds(recoveryIntervalSeconds);
recoverThread.setValidationSleepSeconds(validationSleepSeconds);
} else {
recoverThread.setSelector(this);
}
if (runningRecoverThread != null) {
runningRecoverThread.interrupt();
}
runningRecoverThread = new Thread(recoverThread, "RandomDataSourceSelector-recover-thread");
runningRecoverThread.start();
}
private Collection<DataSource> removeBlackList(Map<String, DataSource> dataSourceMap) {
Collection<DataSource> dataSourceSet;
if (blacklist == null || blacklist.isEmpty() || blacklist.size() >= dataSourceMap.size()) {
dataSourceSet = dataSourceMap.values();
} else {
dataSourceSet = new HashSet<DataSource>(dataSourceMap.values());
for (DataSource b : blacklist) {
dataSourceSet.remove(b);
}
LOG.info(blacklist.size() + " Blacklist DataSource removed, return "
+ dataSourceSet.size() + " DataSource(s).");
}
return dataSourceSet;
}
private void removeBusyDataSource(Collection<DataSource> dataSourceSet) {
Collection<DataSource> busyDataSourceSet = new HashSet<DataSource>();
for (DataSource ds : dataSourceSet) {
if (ds instanceof DruidDataSource && ((DruidDataSource) ds).getPoolingCount() <= 0) {
busyDataSourceSet.add(ds);
}
}
if (!busyDataSourceSet.isEmpty() && busyDataSourceSet.size() < dataSourceSet.size()) {
LOG.info("Busy DataSouces: " + busyDataSourceSet.size() + "/" + dataSourceSet.size());
for (DataSource ds : busyDataSourceSet) {
dataSourceSet.remove(ds);
}
}
}
private DataSource getRandomDataSource(Collection<DataSource> dataSourceSet) {
DataSource[] dataSources = dataSourceSet.toArray(new DataSource[]{});
if (dataSources != null && dataSources.length > 0) {
return dataSources[random.nextInt(dataSourceSet.size())];
}
return null;
}
public HighAvailableDataSource getHighAvailableDataSource() {
return highAvailableDataSource;
}
public RandomDataSourceValidateThread getValidateThread() {
return validateThread;
}
public void setValidateThread(RandomDataSourceValidateThread validateThread) {
this.validateThread = validateThread;
}
public RandomDataSourceRecoverThread getRecoverThread() {
return recoverThread;
}
public void setRecoverThread(RandomDataSourceRecoverThread recoverThread) {
this.recoverThread = recoverThread;
}
public int getCheckingIntervalSeconds() {
return checkingIntervalSeconds;
}
public void setCheckingIntervalSeconds(int checkingIntervalSeconds) {
this.checkingIntervalSeconds = checkingIntervalSeconds;
}
public int getRecoveryIntervalSeconds() {
return recoveryIntervalSeconds;
}
public void setRecoveryIntervalSeconds(int recoveryIntervalSeconds) {
this.recoveryIntervalSeconds = recoveryIntervalSeconds;
}
public int getValidationSleepSeconds() {
return validationSleepSeconds;
}
public void setValidationSleepSeconds(int validationSleepSeconds) {
this.validationSleepSeconds = validationSleepSeconds;
}
public int getBlacklistThreshold() {
return blacklistThreshold;
}
public void setBlacklistThreshold(int blacklistThreshold) {
this.blacklistThreshold = blacklistThreshold;
}
}
|
RandomDataSourceSelector
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/propertyref/inheritence/joined/Person.java
|
{
"start": 192,
"end": 793
}
|
class ____ {
private Long id;
private String name;
private BankAccount bankAccount;
/**
* @return Returns the id.
*/
public Long getId() {
return id;
}
/**
* @param id The id to set.
*/
public void setId(Long id) {
this.id = id;
}
/**
* @return Returns the name.
*/
public String getName() {
return name;
}
/**
* @param name The name to set.
*/
public void setName(String name) {
this.name = name;
}
public BankAccount getBankAccount() {
return bankAccount;
}
public void setBankAccount(BankAccount bankAccount) {
this.bankAccount = bankAccount;
}
}
|
Person
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/cors/CORSFluentApiOriginRegexTest.java
|
{
"start": 2153,
"end": 2446
}
|
class ____ {
void configure(@Observes HttpSecurity httpSecurity) {
httpSecurity.cors(Set.of(
"/https:\\/\\/(?:[a-z0-9\\-]+\\.)*domain\\.com/",
"/https://([a-z0-9\\-_]+)\\.app\\.mydomain\\.com/"));
}
}
}
|
CorsProgrammaticConfig
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/javabean/ConvertDO.java
|
{
"start": 503,
"end": 1457
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 3987648902475498726L;
private int inta;
private Integer intb;
private long longa;
private Long longb;
private boolean booleana;
private Boolean booleanb;
private Character character;
private String str;
private String[] arrayStr;
private long[] arrayLong;
private List<String> list;
private Date date;
// private DateTime dateTime;
// private LocalDate localDate;
// private LocalTime localTime;
// private LocalDateTime localDateTime;
private Map<String, Object> map;
private JSONObject jsonObject;
private JSONArray jsonArray;
private List<JSONObject> jsonObjectList;
private Map<String, Object> strToMap;
private JSONObject strToJsonObject;
private JSONArray strToJsonArray;
private List<JSONObject> strToJsonObjectList;
private ConvertEnum convertEnum;
}
|
ConvertDO
|
java
|
google__dagger
|
javatests/dagger/functional/producers/subcomponent/ModuleSubcomponentsInterop.java
|
{
"start": 1153,
"end": 1221
}
|
interface ____ {
@ProductionSubcomponent.Builder
|
ProductionChild
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MultibindingTest.java
|
{
"start": 7859,
"end": 8350
}
|
interface ____ {",
" @Binds",
" @IntoMap",
" @StringKey(\"key1\")",
" fun bind1(myImpl: MyImpl): MyInterface",
"}");
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import javax.inject.Provider;",
"",
"@Component(modules = TestModule.class)",
"
|
TestModule
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoPojosForMigrationTests.java
|
{
"start": 4184,
"end": 4625
}
|
class ____ extends Serializer<Dog> implements Serializable {
private static final long serialVersionUID = 1L;
@Override
public void write(Kryo kryo, Output output, Dog object) {
output.writeString(object.getName());
}
@Override
public Dog read(Kryo kryo, Input input, Class<? extends Dog> type) {
return new Dog(input.readString());
}
}
}
|
DogV2KryoSerializer
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
|
{
"start": 1114,
"end": 2584
}
|
class ____ extends ESTestCase {
public void testSimple() throws Exception {
try (IndexWriter writer = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(Lucene.STANDARD_ANALYZER))) {
Document doc = new Document();
doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
writer.addDocument(doc);
try (IndexReader reader = DirectoryReader.open(writer)) {
IndexSearcher searcher = newSearcher(reader);
MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("field");
query.add(new Term("field", "aa"));
assertThat(searcher.count(query), equalTo(1));
query = new MultiPhrasePrefixQuery("field");
query.add(new Term("field", "aaa"));
query.add(new Term("field", "bb"));
assertThat(searcher.count(query), equalTo(1));
query = new MultiPhrasePrefixQuery("field");
query.setSlop(1);
query.add(new Term("field", "aaa"));
query.add(new Term("field", "cc"));
assertThat(searcher.count(query), equalTo(1));
query = new MultiPhrasePrefixQuery("field");
query.setSlop(1);
query.add(new Term("field", "xxx"));
assertThat(searcher.count(query), equalTo(0));
}
}
}
}
|
MultiPhrasePrefixQueryTests
|
java
|
spring-projects__spring-security
|
web/src/test/java/org/springframework/security/web/access/DelegatingAccessDeniedHandlerTests.java
|
{
"start": 1456,
"end": 3384
}
|
class ____ {
@Mock
private AccessDeniedHandler handler1;
@Mock
private AccessDeniedHandler handler2;
@Mock
private AccessDeniedHandler handler3;
@Mock
private HttpServletRequest request;
@Mock
private HttpServletResponse response;
private LinkedHashMap<Class<? extends AccessDeniedException>, AccessDeniedHandler> handlers;
private DelegatingAccessDeniedHandler handler;
@BeforeEach
public void setup() {
this.handlers = new LinkedHashMap<>();
}
@Test
public void moreSpecificDoesNotInvokeLessSpecific() throws Exception {
this.handlers.put(CsrfException.class, this.handler1);
this.handler = new DelegatingAccessDeniedHandler(this.handlers, this.handler3);
AccessDeniedException accessDeniedException = new AccessDeniedException("");
this.handler.handle(this.request, this.response, accessDeniedException);
verify(this.handler1, never()).handle(any(HttpServletRequest.class), any(HttpServletResponse.class),
any(AccessDeniedException.class));
verify(this.handler3).handle(this.request, this.response, accessDeniedException);
}
@Test
public void matchesDoesNotInvokeDefault() throws Exception {
this.handlers.put(InvalidCsrfTokenException.class, this.handler1);
this.handlers.put(MissingCsrfTokenException.class, this.handler2);
this.handler = new DelegatingAccessDeniedHandler(this.handlers, this.handler3);
AccessDeniedException accessDeniedException = new MissingCsrfTokenException("123");
this.handler.handle(this.request, this.response, accessDeniedException);
verify(this.handler1, never()).handle(any(HttpServletRequest.class), any(HttpServletResponse.class),
any(AccessDeniedException.class));
verify(this.handler2).handle(this.request, this.response, accessDeniedException);
verify(this.handler3, never()).handle(any(HttpServletRequest.class), any(HttpServletResponse.class),
any(AccessDeniedException.class));
}
}
|
DelegatingAccessDeniedHandlerTests
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/launcher/core/LauncherDiscoveryRequestBuilderTests.java
|
{
"start": 2418,
"end": 6500
}
|
class ____ {
@Test
void modulesAreStoredInDiscoveryRequest() {
// @formatter:off
var discoveryRequest = LauncherDiscoveryRequestBuilder.request()
.selectors(
selectModule("java.base")
).build();
// @formatter:on
var packageSelectors = discoveryRequest.getSelectorsByType(ModuleSelector.class).stream().map(
ModuleSelector::getModuleName).toList();
assertThat(packageSelectors).contains("java.base");
}
@Test
void packagesAreStoredInDiscoveryRequest() {
// @formatter:off
var discoveryRequest = discoveryRequest()
.selectors(
selectPackage("org.junit.platform.engine")
).build();
// @formatter:on
var packageSelectors = discoveryRequest.getSelectorsByType(PackageSelector.class).stream().map(
PackageSelector::getPackageName).toList();
assertThat(packageSelectors).contains("org.junit.platform.engine");
}
@Test
void classesAreStoredInDiscoveryRequest() {
// @formatter:off
var discoveryRequest = discoveryRequest()
.selectors(
selectClass(LauncherDiscoveryRequestBuilderTests.class.getName()),
selectClass(SampleTestClass.class)
)
.build();
// @formatter:on
@SuppressWarnings("rawtypes")
List<Class> classes = discoveryRequest.getSelectorsByType(ClassSelector.class).stream()//
.map(ClassSelector::getJavaClass).map(Class.class::cast).toList();
assertThat(classes).contains(SampleTestClass.class, LauncherDiscoveryRequestBuilderTests.class);
}
@Test
void methodsByFullyQualifiedNameAreStoredInDiscoveryRequest() {
// @formatter:off
var discoveryRequest = discoveryRequest()
.selectors(selectMethod(fullyQualifiedMethodName()))
.build();
// @formatter:on
var methodSelectors = discoveryRequest.getSelectorsByType(MethodSelector.class);
assertThat(methodSelectors).hasSize(1);
var methodSelector = methodSelectors.getFirst();
assertThat(methodSelector.getJavaClass()).isEqualTo(LauncherDiscoveryRequestBuilderTests.class);
assertThat(methodSelector.getJavaMethod()).isEqualTo(fullyQualifiedMethod());
}
@Test
void methodsByNameAreStoredInDiscoveryRequest() throws Exception {
Class<?> testClass = SampleTestClass.class;
var testMethod = testClass.getDeclaredMethod("test");
// @formatter:off
var discoveryRequest = discoveryRequest()
.selectors(selectMethod(SampleTestClass.class.getName(), "test"))
.build();
// @formatter:on
var methodSelectors = discoveryRequest.getSelectorsByType(MethodSelector.class);
assertThat(methodSelectors).hasSize(1);
var methodSelector = methodSelectors.getFirst();
assertThat(methodSelector.getJavaClass()).isEqualTo(testClass);
assertThat(methodSelector.getJavaMethod()).isEqualTo(testMethod);
}
@Test
void methodsByClassAreStoredInDiscoveryRequest() throws Exception {
Class<?> testClass = SampleTestClass.class;
var testMethod = testClass.getDeclaredMethod("test");
// @formatter:off
var discoveryRequest = (DefaultDiscoveryRequest) discoveryRequest()
.selectors(
selectMethod(testClass, "test")
).build();
// @formatter:on
var methodSelectors = discoveryRequest.getSelectorsByType(MethodSelector.class);
assertThat(methodSelectors).hasSize(1);
var methodSelector = methodSelectors.getFirst();
assertThat(methodSelector.getJavaClass()).isEqualTo(testClass);
assertThat(methodSelector.getJavaMethod()).isEqualTo(testMethod);
}
@Test
void uniqueIdsAreStoredInDiscoveryRequest() {
var id1 = UniqueId.forEngine("engine").append("foo", "id1");
var id2 = UniqueId.forEngine("engine").append("foo", "id2");
// @formatter:off
var discoveryRequest = discoveryRequest()
.selectors(
selectUniqueId(id1),
selectUniqueId(id2)
).build();
// @formatter:on
var uniqueIds = discoveryRequest.getSelectorsByType(UniqueIdSelector.class).stream().map(
UniqueIdSelector::getUniqueId).map(Object::toString).toList();
assertThat(uniqueIds).contains(id1.toString(), id2.toString());
}
}
@Nested
|
DiscoverySelectionTests
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/reflect/MethodUtils.java
|
{
"start": 7677,
"end": 7833
}
|
class
____ Class<?>[] interfaces = cls.getInterfaces();
for (final Class<?> anInterface : interfaces) {
// Is this
|
final
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricherFactory.java
|
{
"start": 1074,
"end": 1343
}
|
interface ____ extends Plugin {
/**
* Construct a FailureEnricher.
*
* @param conf configuration for this failure enricher
* @return the FailureEnricher
*/
FailureEnricher createFailureEnricher(Configuration conf);
}
|
FailureEnricherFactory
|
java
|
apache__avro
|
lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/JetBrainsNullSafeAnnotationsFieldsTest.java
|
{
"start": 501,
"end": 2381
}
|
class ____ extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 4199351420542345651L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"JetBrainsNullSafeAnnotationsFieldsTest\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"Test that @org.jetbrains.annotations.Nullable and @org.jetbrains.annotations.NotNull annotations are created for all fields\",\"fields\":[{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"nullable_name\",\"type\":[{\"type\":\"string\",\"avro.java.string\":\"String\"},\"null\"]},{\"name\":\"favorite_number\",\"type\":\"int\"},{\"name\":\"nullable_favorite_number\",\"type\":[\"int\",\"null\"]}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<JetBrainsNullSafeAnnotationsFieldsTest> ENCODER =
new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<JetBrainsNullSafeAnnotationsFieldsTest> DECODER =
new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder<JetBrainsNullSafeAnnotationsFieldsTest> getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<JetBrainsNullSafeAnnotationsFieldsTest> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this
|
JetBrainsNullSafeAnnotationsFieldsTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/servlet/samples/spr/CustomRequestAttributesRequestContextHolderTests.java
|
{
"start": 4471,
"end": 4650
}
|
class ____ implements WebMvcConfigurer {
@Bean
SingletonController singletonController() {
return new SingletonController();
}
}
@RestController
private static
|
WebConfig
|
java
|
google__dagger
|
javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveSubcomponentModulesTest.java
|
{
"start": 7575,
"end": 8283
}
|
interface ____ {}")
.addSrcFile(subcomponent);
GradleModule.create(projectDir, "library2")
.addBuildFile(
"plugins {",
" id 'java'",
" id 'java-library'",
"}",
"dependencies {",
" implementation \"com.google.dagger:dagger:$dagger_version\"",
" annotationProcessor \"com.google.dagger:dagger-compiler:$dagger_version\"",
"}")
.addSrcFile(
"TransitiveModule.java",
"package library2;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"public
|
IncludesTransitiveModule
|
java
|
resilience4j__resilience4j
|
resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/CircuitBreakerConfig.java
|
{
"start": 14412,
"end": 52710
}
|
class ____ {
@Nullable
private Predicate<Throwable> recordExceptionPredicate;
@Nullable
private Predicate<Throwable> ignoreExceptionPredicate;
private Function<Clock, Long> currentTimestampFunction = DEFAULT_TIMESTAMP_FUNCTION;
private TimeUnit timestampUnit = DEFAULT_TIMESTAMP_UNIT;
@SuppressWarnings("unchecked")
private Class<? extends Throwable>[] recordExceptions = new Class[0];
@SuppressWarnings("unchecked")
private Class<? extends Throwable>[] ignoreExceptions = new Class[0];
private float failureRateThreshold = DEFAULT_FAILURE_RATE_THRESHOLD;
private int minimumNumberOfCalls = DEFAULT_MINIMUM_NUMBER_OF_CALLS;
private boolean writableStackTraceEnabled = DEFAULT_WRITABLE_STACK_TRACE_ENABLED;
private int permittedNumberOfCallsInHalfOpenState = DEFAULT_PERMITTED_CALLS_IN_HALF_OPEN_STATE;
private int slidingWindowSize = DEFAULT_SLIDING_WINDOW_SIZE;
private Predicate<Object> recordResultPredicate = DEFAULT_RECORD_RESULT_PREDICATE;
private IntervalFunction waitIntervalFunctionInOpenState = IntervalFunction
.of(Duration.ofSeconds(DEFAULT_SLOW_CALL_DURATION_THRESHOLD));
private Function<Either<Object, Throwable>, TransitionCheckResult> transitionOnResult
= DEFAULT_TRANSITION_ON_RESULT;
private boolean automaticTransitionFromOpenToHalfOpenEnabled = false;
private State initialState = State.CLOSED;
private SlidingWindowType slidingWindowType = DEFAULT_SLIDING_WINDOW_TYPE;
private SlidingWindowSynchronizationStrategy slidingWindowSynchronizationStrategy =
DEFAULT_SLIDING_WINDOW_SYNCHRONIZATION_STRATEGY;
private float slowCallRateThreshold = DEFAULT_SLOW_CALL_RATE_THRESHOLD;
private Duration slowCallDurationThreshold = Duration
.ofSeconds(DEFAULT_SLOW_CALL_DURATION_THRESHOLD);
private Duration maxWaitDurationInHalfOpenState = Duration
.ofSeconds(DEFAULT_WAIT_DURATION_IN_HALF_OPEN_STATE);
private State transitionToStateAfterWaitDuration = DEFAULT_TRANSITION_TO_STATE_AFTER_WAIT_DURATION;
private byte createWaitIntervalFunctionCounter = 0;
private Clock clock = DEFAULT_CLOCK;
private boolean ignoreExceptionsPrecedenceEnabled = false;
public Builder(CircuitBreakerConfig baseConfig) {
this.waitIntervalFunctionInOpenState = baseConfig.waitIntervalFunctionInOpenState;
this.transitionOnResult = baseConfig.transitionOnResult;
this.permittedNumberOfCallsInHalfOpenState = baseConfig.permittedNumberOfCallsInHalfOpenState;
this.slidingWindowSize = baseConfig.slidingWindowSize;
this.slidingWindowType = baseConfig.slidingWindowType;
this.slidingWindowSynchronizationStrategy = baseConfig.slidingWindowSynchronizationStrategy;
this.minimumNumberOfCalls = baseConfig.minimumNumberOfCalls;
this.failureRateThreshold = baseConfig.failureRateThreshold;
this.ignoreExceptions = baseConfig.ignoreExceptions;
this.recordExceptions = baseConfig.recordExceptions;
this.recordExceptionPredicate = baseConfig.recordExceptionPredicate;
this.ignoreExceptionPredicate = baseConfig.ignoreExceptionPredicate;
this.currentTimestampFunction = baseConfig.currentTimestampFunction;
this.timestampUnit = baseConfig.timestampUnit;
this.automaticTransitionFromOpenToHalfOpenEnabled = baseConfig.automaticTransitionFromOpenToHalfOpenEnabled;
this.initialState = baseConfig.initialState;
this.slowCallRateThreshold = baseConfig.slowCallRateThreshold;
this.slowCallDurationThreshold = baseConfig.slowCallDurationThreshold;
this.maxWaitDurationInHalfOpenState = baseConfig.maxWaitDurationInHalfOpenState;
this.transitionToStateAfterWaitDuration = baseConfig.transitionToStateAfterWaitDuration;
this.writableStackTraceEnabled = baseConfig.writableStackTraceEnabled;
this.recordResultPredicate = baseConfig.recordResultPredicate;
this.clock = baseConfig.clock;
this.ignoreExceptionsPrecedenceEnabled = baseConfig.ignoreExceptionsPrecedenceEnabled;
}
public Builder() {
}
/**
* Configures the failure rate threshold in percentage. If the failure rate is equal to or
* greater than the threshold, the CircuitBreaker transitions to open and starts
* short-circuiting calls.
* <p>
* The threshold must be greater than 0 and not greater than 100. Default value is 50
* percentage.
*
* @param failureRateThreshold the failure rate threshold in percentage
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code failureRateThreshold <= 0 ||
* failureRateThreshold > 100}
*/
public Builder failureRateThreshold(float failureRateThreshold) {
if (failureRateThreshold <= 0 || failureRateThreshold > 100) {
throw new IllegalArgumentException(
"failureRateThreshold must be greater than 0 and lower than 100, but was " + failureRateThreshold);
}
this.failureRateThreshold = failureRateThreshold;
return this;
}
/**
* Configures a threshold in percentage. The CircuitBreaker considers a call as slow when
* the call duration is greater than {@link #slowCallDurationThreshold(Duration)}. When the
* percentage of slow calls is equal to or greater than the threshold, the CircuitBreaker
* transitions to open and starts short-circuiting calls.
*
* <p>
* The threshold must be greater than 0 and not greater than 100. Default value is 100
* percentage which means that all recorded calls must be slower than {@link
* #slowCallDurationThreshold(Duration)}.
*
* @param slowCallRateThreshold the slow calls threshold in percentage
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code slowCallRateThreshold <= 0 ||
* slowCallRateThreshold > 100}
*/
public Builder slowCallRateThreshold(float slowCallRateThreshold) {
if (slowCallRateThreshold <= 0 || slowCallRateThreshold > 100) {
throw new IllegalArgumentException(
"slowCallRateThreshold must be greater than 0 and not greater than 100, but was " + slowCallRateThreshold);
}
this.slowCallRateThreshold = slowCallRateThreshold;
return this;
}
/**
* Enables writable stack traces. When set to false, {@link Exception#getStackTrace()}
* returns a zero length array. This may be used to reduce log spam when the circuit breaker
* is open as the cause of the exceptions is already known (the circuit breaker is
* short-circuiting calls).
*
* @param writableStackTraceEnabled the flag to enable writable stack traces.
* @return the CircuitBreakerConfig.Builder
*/
public Builder writableStackTraceEnabled(boolean writableStackTraceEnabled) {
this.writableStackTraceEnabled = writableStackTraceEnabled;
return this;
}
/**
* Configures an interval function with a fixed wait duration which controls how long the
* CircuitBreaker should stay open, before it switches to half open. Default value is 60
* seconds.
* <p>
* Do not use with {@link #waitIntervalFunctionInOpenState(IntervalFunction)}!
* Please, when using, make sure not to override the value set earlier from the
* {@link #waitIntervalFunctionInOpenState(IntervalFunction)}
*
* @param waitDurationInOpenState the wait duration which specifies how long the
* CircuitBreaker should stay open
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code waitDurationInOpenState.toMillis() < 1}
*/
public Builder waitDurationInOpenState(Duration waitDurationInOpenState) {
long waitDurationInMillis = waitDurationInOpenState.toMillis();
if (waitDurationInMillis < 1) {
throw new IllegalArgumentException(
"waitDurationInOpenState must be at least 1[ms]");
}
this.waitIntervalFunctionInOpenState = IntervalFunction.of(waitDurationInMillis);
createWaitIntervalFunctionCounter++;
return this;
}
/**
* Configures an interval function which controls how long the CircuitBreaker should stay
* open, before it switches to half open. The default interval function returns a fixed wait
* duration of 60 seconds.
* <p>
* A custom interval function is useful if you need an exponential backoff algorithm.
* <p>
* Do not use with {@link #waitDurationInOpenState(Duration)}!
* Please, when using, make sure not to override the value set earlier from the
* {@link #waitDurationInOpenState(Duration)}
*
* @param waitIntervalFunctionInOpenState Interval function that returns wait time as a
* function of attempts
* @return the CircuitBreakerConfig.Builder
*/
public Builder waitIntervalFunctionInOpenState(
IntervalFunction waitIntervalFunctionInOpenState) {
this.waitIntervalFunctionInOpenState = waitIntervalFunctionInOpenState;
createWaitIntervalFunctionCounter++;
return this;
}
/**
* Configures a function which can decide if the circuit breaker should transition to a different
* state base on the result of the protected function.
*
* @param transitionOnResult function which instructs the circuit breaker if it should transition
* to a different state
* @return the CircuitBreakerConfig.Builder
*/
public Builder transitionOnResult(
Function<Either<Object, Throwable>, TransitionCheckResult> transitionOnResult) {
this.transitionOnResult = transitionOnResult;
return this;
}
/**
* Configures the duration threshold above which calls are considered as slow and increase
* the slow calls percentage. Default value is 60 seconds.
*
* @param slowCallDurationThreshold the duration above which calls are considered as slow
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code slowCallDurationThreshold.toNanos() < 1}
*/
public Builder slowCallDurationThreshold(Duration slowCallDurationThreshold) {
if (slowCallDurationThreshold.toNanos() < 1) {
throw new IllegalArgumentException(
"slowCallDurationThreshold must be at least 1[ns]");
}
this.slowCallDurationThreshold = slowCallDurationThreshold;
return this;
}
/**
* Configures CircuitBreaker with a fixed wait duration which controls how long the
* CircuitBreaker should stay in Half Open state, before it switches to open. This is an
* optional parameter.
*
* By default CircuitBreaker will stay in Half Open state until
* {@code minimumNumberOfCalls} is completed with either success or failure.
*
* @param maxWaitDurationInHalfOpenState the wait duration which specifies how long the
* CircuitBreaker should stay in Half Open
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code maxWaitDurationInHalfOpenState.toMillis() < 0}
*/
public Builder maxWaitDurationInHalfOpenState(Duration maxWaitDurationInHalfOpenState) {
if (maxWaitDurationInHalfOpenState.toMillis() < 0) {
throw new IllegalArgumentException(
"maxWaitDurationInHalfOpenState must be greater than or equal to 0[ms]");
}
this.maxWaitDurationInHalfOpenState = maxWaitDurationInHalfOpenState;
return this;
}
/**
* Configures CircuitBreaker transition after max wait duration in half open state. Possible values are
* OPEN or CLOSE, by default the value is OPEN.
*
* @param transitionToStateAfterWaitDuration transition to state after wait duration in half open state
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code transitionToStateAfterWaitDuration == null ||
* transitionToStateAfterWaitDuration != State.OPEN && transitionToStateAfterWaitDuration != State.CLOSED}
*/
public Builder transitionToStateAfterWaitDuration(State transitionToStateAfterWaitDuration) {
if (transitionToStateAfterWaitDuration == null) {
throw new IllegalArgumentException("transitionToStateAfterWaitDuration must not be null");
}
if (transitionToStateAfterWaitDuration != State.OPEN && transitionToStateAfterWaitDuration != State.CLOSED) {
throw new IllegalArgumentException("transitionToStateAfterWaitDuration must be either OPEN or CLOSED");
}
this.transitionToStateAfterWaitDuration = transitionToStateAfterWaitDuration;
return this;
}
/**
* Configures the number of permitted calls when the CircuitBreaker is half open.
* <p>
* The size must be greater than 0. Default size is 10.
*
* @param permittedNumberOfCallsInHalfOpenState the permitted number of calls when the
* CircuitBreaker is half open
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code permittedNumberOfCallsInHalfOpenState < 1}
*/
public Builder permittedNumberOfCallsInHalfOpenState(
int permittedNumberOfCallsInHalfOpenState) {
if (permittedNumberOfCallsInHalfOpenState < 1) {
throw new IllegalArgumentException(
"permittedNumberOfCallsInHalfOpenState must be greater than 0");
}
this.permittedNumberOfCallsInHalfOpenState = permittedNumberOfCallsInHalfOpenState;
return this;
}
/**
* Configures the sliding window which is used to record the outcome of calls when the
* CircuitBreaker is closed. {@code slidingWindowSize} configures the size of the sliding
* window. Sliding window can either be count-based or time-based, specified by {@code
* slidingWindowType}. {@code minimumNumberOfCalls} configures the minimum number of calls
* which are required (per sliding window period) before the CircuitBreaker can calculate
* the error rate. For example, if {@code minimumNumberOfCalls} is 10, then at least 10
* calls must be recorded, before the failure rate can be calculated. If only 9 calls have
* been recorded, the CircuitBreaker will not transition to open, even if all 9 calls have
* failed.
* <p>
* If {@code slidingWindowSize} is 100 and {@code slidingWindowType} is COUNT_BASED, the
* last 100 calls are recorded and aggregated. If {@code slidingWindowSize} is 10 and {@code
* slidingWindowType} is TIME_BASED, the calls of the last 10 seconds are recorded and
* aggregated.
* <p>
* The {@code slidingWindowSize} must be greater than 0. The {@code minimumNumberOfCalls}
* must be greater than 0. If the {@code slidingWindowType} is COUNT_BASED, the {@code
* minimumNumberOfCalls} may not be greater than {@code slidingWindowSize}. If a greater
* value is provided, {@code minimumNumberOfCalls} will be equal to {@code
* slidingWindowSize}. If the {@code slidingWindowType} is TIME_BASED, the {@code
* minimumNumberOfCalls} may be any amount.
* <p>
* Default slidingWindowSize is 100, minimumNumberOfCalls is 100 and slidingWindowType is
* COUNT_BASED.
* <p>
* The sliding window has two types of synchronization strategies,
* for more details check {@link SlidingWindowSynchronizationStrategy}. Default is SYNCHRONIZED.
*
* @param slidingWindowSize the size of the sliding window when the CircuitBreaker is
* closed.
* @param minimumNumberOfCalls the minimum number of calls that must be recorded before the
* failure rate can be calculated.
* @param slidingWindowType the type of the sliding window. Either COUNT_BASED or
* TIME_BASED.
* @param slidingWindowSynchronizationStrategy the type of synchronization for the sliding window.
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code slidingWindowSize < 1 || minimumNumberOfCalls
* < 1}
* or if {@code slidingWindowType == TIME_BASED
* && slidingWindowSynchronizationStrategy == LOCK_FREE
* && slidingWindowSize < 2}
*/
public Builder slidingWindow(
int slidingWindowSize,
int minimumNumberOfCalls,
SlidingWindowType slidingWindowType,
SlidingWindowSynchronizationStrategy slidingWindowSynchronizationStrategy
) {
if (slidingWindowSize < 1) {
throw new IllegalArgumentException("slidingWindowSize must be greater than 0");
}
if (minimumNumberOfCalls < 1) {
throw new IllegalArgumentException("minimumNumberOfCalls must be greater than 0");
}
if (slidingWindowType == SlidingWindowType.COUNT_BASED) {
this.minimumNumberOfCalls = Math.min(minimumNumberOfCalls, slidingWindowSize);
} else {
this.minimumNumberOfCalls = minimumNumberOfCalls;
}
if(slidingWindowType == SlidingWindowType.TIME_BASED && slidingWindowSynchronizationStrategy == SlidingWindowSynchronizationStrategy.LOCK_FREE && slidingWindowSize < 2) {
throw new IllegalArgumentException("For TIME_BASED with LOCK_FREE strategy, slidingWindowSize must be at least 2");
}
this.slidingWindowSize = slidingWindowSize;
this.slidingWindowType = slidingWindowType;
this.slidingWindowSynchronizationStrategy = slidingWindowSynchronizationStrategy;
return this;
}
/**
* Configures the sliding window which is used to record the outcome of calls when the
* CircuitBreaker is closed. {@code slidingWindowSize} configures the size of the sliding
* window. Sliding window can either be count-based or time-based, specified by {@code
* slidingWindowType}. {@code minimumNumberOfCalls} configures the minimum number of calls
* which are required (per sliding window period) before the CircuitBreaker can calculate
* the error rate. For example, if {@code minimumNumberOfCalls} is 10, then at least 10
* calls must be recorded, before the failure rate can be calculated. If only 9 calls have
* been recorded, the CircuitBreaker will not transition to open, even if all 9 calls have
* failed.
* <p>
* If {@code slidingWindowSize} is 100 and {@code slidingWindowType} is COUNT_BASED, the
* last 100 calls are recorded and aggregated. If {@code slidingWindowSize} is 10 and {@code
* slidingWindowType} is TIME_BASED, the calls of the last 10 seconds are recorded and
* aggregated.
* <p>
* The {@code slidingWindowSize} must be greater than 0. The {@code minimumNumberOfCalls}
* must be greater than 0. If the {@code slidingWindowType} is COUNT_BASED, the {@code
* minimumNumberOfCalls} may not be greater than {@code slidingWindowSize}. If a greater
* value is provided, {@code minimumNumberOfCalls} will be equal to {@code
* slidingWindowSize}. If the {@code slidingWindowType} is TIME_BASED, the {@code
* minimumNumberOfCalls} may be any amount.
* <p>
* Default slidingWindowSize is 100, minimumNumberOfCalls is 100 and slidingWindowType is
* COUNT_BASED.
* <p>
* Uses {@link SlidingWindowSynchronizationStrategy#SYNCHRONIZED} strategy by default.
*
* @see #slidingWindow(int, int, SlidingWindowType, SlidingWindowSynchronizationStrategy)
*
* @param slidingWindowSize the size of the sliding window when the CircuitBreaker is
* closed.
* @param minimumNumberOfCalls the minimum number of calls that must be recorded before the
* failure rate can be calculated.
* @param slidingWindowType the type of the sliding window. Either COUNT_BASED or
* TIME_BASED.
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code slidingWindowSize < 1 || minimumNumberOfCalls
* < 1}
*/
public Builder slidingWindow(
int slidingWindowSize,
int minimumNumberOfCalls,
SlidingWindowType slidingWindowType
) {
return slidingWindow(slidingWindowSize, minimumNumberOfCalls, slidingWindowType, DEFAULT_SLIDING_WINDOW_SYNCHRONIZATION_STRATEGY);
}
/**
* Configures the size of the sliding window which is used to record the outcome of calls
* when the CircuitBreaker is closed. {@code slidingWindowSize} configures the size of the
* sliding window.
* <p>
* The {@code slidingWindowSize} must be greater than 0.
* <p>
* Default slidingWindowSize is 100.
*
* @param slidingWindowSize the size of the sliding window when the CircuitBreaker is
* closed.
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code slidingWindowSize < 1}
* @see #slidingWindow(int, int, SlidingWindowType, SlidingWindowSynchronizationStrategy)
*/
public Builder slidingWindowSize(int slidingWindowSize) {
if (slidingWindowSize < 1) {
throw new IllegalArgumentException("slidingWindowSize must be greater than 0");
}
this.slidingWindowSize = slidingWindowSize;
return this;
}
/**
* Configures the minimum number of calls which are required (per sliding window period)
* before the CircuitBreaker can calculate the error rate. For example, if {@code
* minimumNumberOfCalls} is 10, then at least 10 calls must be recorded, before the failure
* rate can be calculated. If only 9 calls have been recorded, the CircuitBreaker will not
* transition to open, even if all 9 calls have failed.
* <p>
* Default minimumNumberOfCalls is 100
*
* @param minimumNumberOfCalls the minimum number of calls that must be recorded before the
* failure rate can be calculated.
* @return the CircuitBreakerConfig.Builder
* @throws IllegalArgumentException if {@code minimumNumberOfCalls < 1}
* @see #slidingWindow(int, int, SlidingWindowType, SlidingWindowSynchronizationStrategy)
*/
public Builder minimumNumberOfCalls(int minimumNumberOfCalls) {
if (minimumNumberOfCalls < 1) {
throw new IllegalArgumentException("minimumNumberOfCalls must be greater than 0");
}
this.minimumNumberOfCalls = minimumNumberOfCalls;
return this;
}
/**
* Configures the type of the sliding window which is used to record the outcome of calls
* when the CircuitBreaker is closed. Sliding window can either be count-based or
* time-based.
* <p>
* Default slidingWindowType is COUNT_BASED.
*
* @param slidingWindowType the type of the sliding window. Either COUNT_BASED or
* TIME_BASED.
* @return the CircuitBreakerConfig.Builder
* @see #slidingWindow(int, int, SlidingWindowType, SlidingWindowSynchronizationStrategy)
*/
public Builder slidingWindowType(SlidingWindowType slidingWindowType) {
this.slidingWindowType = slidingWindowType;
return this;
}
/**
* Configures the synchronization strategy for the sliding window.
* For more details check {@link SlidingWindowSynchronizationStrategy}.
* <p>
* Default slidingWindowSynchonizationStrategy is SYNCHRONIZED.
*
* @param slidingWindowSynchronizationStrategy the synchronization strategy
* @return the CircuitBreakerConfig.Builder
* @see #slidingWindow(int, int, SlidingWindowType, SlidingWindowSynchronizationStrategy)
*/
public Builder slidingWindowSynchronizationStrategy(
SlidingWindowSynchronizationStrategy slidingWindowSynchronizationStrategy
) {
this.slidingWindowSynchronizationStrategy = slidingWindowSynchronizationStrategy;
return this;
}
/**
* Configures a Predicate which evaluates if an exception should be recorded as a failure
* and thus increase the failure rate. The Predicate must return true if the exception
* should count as a failure. The Predicate must return false, if the exception should count
* as a success, unless the exception is explicitly ignored by {@link
* #ignoreExceptions(Class[])} or {@link #ignoreException(Predicate)}.
*
* @param predicate the Predicate which evaluates if an exception should count as a failure
* @return the CircuitBreakerConfig.Builder
*/
public Builder recordException(Predicate<Throwable> predicate) {
this.recordExceptionPredicate = predicate;
return this;
}
/**
* Configures a function that returns current timestamp for CircuitBreaker.
* Default implementation uses System.nanoTime() to compute current timestamp.
* Configure currentTimestampFunction to provide different implementation to compute current timestamp.
* <p>
*
* @param currentTimestampFunction function that computes current timestamp.
* @param timeUnit TimeUnit of timestamp returned by the function.
* @return the CircuitBreakerConfig.Builder
*/
public Builder currentTimestampFunction(Function<Clock, Long> currentTimestampFunction, TimeUnit timeUnit) {
this.timestampUnit = timeUnit;
this.currentTimestampFunction = currentTimestampFunction;
return this;
}
/**
* Configures a Predicate which evaluates if the result of the protected function call
* should be recorded as a failure and thus increase the failure rate.
* The Predicate must return true if the result should count as a failure.
* The Predicate must return false, if the result should count
* as a success.
*
* @param predicate the Predicate which evaluates if a result should count as a failure
* @return the CircuitBreakerConfig.Builder
*/
public Builder recordResult(Predicate<Object> predicate) {
this.recordResultPredicate = predicate;
return this;
}
/**
* Configures a Predicate which evaluates if an exception should be ignored and neither
* count as a failure nor success. The Predicate must return true if the exception should be
* ignored. The Predicate must return false, if the exception should count as a failure.
*
* @param predicate the Predicate which evaluates if an exception should count as a failure
* @return the CircuitBreakerConfig.Builder
*/
public Builder ignoreException(Predicate<Throwable> predicate) {
this.ignoreExceptionPredicate = predicate;
return this;
}
/**
* Configures a list of error classes that are recorded as a failure and thus increase the
* failure rate. Any exception matching or inheriting from one of the list should count as a
* failure, unless ignored via {@link #ignoreExceptions(Class[])} or {@link
* #ignoreException(Predicate)}.
*
* @param errorClasses the error classes that are recorded
* @return the CircuitBreakerConfig.Builder
* @see #ignoreExceptions(Class[]) ). Ignoring an exception has priority over recording an
* exception.
* <p>
* Example: recordExceptions(Throwable.class) and ignoreExceptions(RuntimeException.class)
* would capture all Errors and checked Exceptions, and ignore RuntimeExceptions.
* <p>
* For a more sophisticated exception management use the
* @see #recordException(Predicate) method
*/
@SuppressWarnings("unchecked")
@SafeVarargs
public final Builder recordExceptions(
@Nullable Class<? extends Throwable>... errorClasses) {
this.recordExceptions = errorClasses != null ? errorClasses : new Class[0];
return this;
}
/**
* Configures a list of error classes that are ignored and thus neither count as a failure
* nor success. Any exception matching or inheriting from one of the list will not count as
* a failure nor success, even if marked via {@link #recordExceptions(Class[])} or {@link
* #recordException(Predicate)}.
*
* @param errorClasses the error classes that are ignored
* @return the CircuitBreakerConfig.Builder
* @see #recordExceptions(Class[]) . Ignoring an exception has priority over recording an
* exception.
* <p>
* Example: ignoreExceptions(Throwable.class) and recordExceptions(Exception.class) would
* capture nothing.
* <p>
* Example: ignoreExceptions(Exception.class) and recordExceptions(Throwable.class) would
* capture Errors.
* <p>
* For a more sophisticated exception management use the
* @see #ignoreException(Predicate) method
*/
@SuppressWarnings("unchecked")
@SafeVarargs
public final Builder ignoreExceptions(
@Nullable Class<? extends Throwable>... errorClasses) {
this.ignoreExceptions = errorClasses != null ? errorClasses : new Class[0];
return this;
}
/**
* Enables automatic transition from OPEN to HALF_OPEN state once the
* waitDurationInOpenState has passed.
*
* @return the CircuitBreakerConfig.Builder
*/
public Builder enableAutomaticTransitionFromOpenToHalfOpen() {
this.automaticTransitionFromOpenToHalfOpenEnabled = true;
return this;
}
/**
* Enables automatic transition from OPEN to HALF_OPEN state once the
* waitDurationInOpenState has passed.
*
* @param enableAutomaticTransitionFromOpenToHalfOpen the flag to enable the automatic
* transitioning.
* @return the CircuitBreakerConfig.Builder
*/
public Builder automaticTransitionFromOpenToHalfOpenEnabled(
boolean enableAutomaticTransitionFromOpenToHalfOpen) {
this.automaticTransitionFromOpenToHalfOpenEnabled = enableAutomaticTransitionFromOpenToHalfOpen;
return this;
}
public Builder initialState(State state){
this.initialState = state;
return this;
}
/**
* Configures a custom Clock instance to use for time measurements.
* Default value is Clock.systemUTC().
*
* @param clock the Clock to use
* @return the CircuitBreakerConfig.Builder
*/
public Builder clock(Clock clock) {
if (clock == null) {
this.clock = DEFAULT_CLOCK;
} else {
this.clock = clock;
}
return this;
}
/**
* Enables ignoreExceptions to take precedence over recordExceptions.
* When enabled, if an exception matches both recordExceptions and ignoreExceptions,
* it will be ignored. When disabled (default), the legacy behavior is used where
* recordExceptions takes precedence for backward compatibility.
*
* @param enableIgnoreExceptionsPrecedence true to enable new behavior, false for legacy behavior (default)
* @return the CircuitBreakerConfig.Builder
*/
public Builder ignoreExceptionsPrecedenceEnabled(boolean enableIgnoreExceptionsPrecedence) {
this.ignoreExceptionsPrecedenceEnabled = enableIgnoreExceptionsPrecedence;
return this;
}
/**
* Enables ignoreExceptions to take precedence over recordExceptions.
* This changes the default behavior where recordExceptions takes precedence.
* When enabled, exceptions that match both recordExceptions and ignoreExceptions
* will be ignored.
*
* @return the CircuitBreakerConfig.Builder
*/
public Builder enableIgnoreExceptionsPrecedence() {
this.ignoreExceptionsPrecedenceEnabled = true;
return this;
}
/**
* Builds a CircuitBreakerConfig
*
* @return the CircuitBreakerConfig
* @throws IllegalStateException when the parameter is invalid
*/
public CircuitBreakerConfig build() {
CircuitBreakerConfig config = new CircuitBreakerConfig();
config.waitIntervalFunctionInOpenState = validateWaitIntervalFunctionInOpenState();
config.transitionOnResult = transitionOnResult;
config.slidingWindowType = slidingWindowType;
config.slidingWindowSynchronizationStrategy = slidingWindowSynchronizationStrategy;
config.slowCallDurationThreshold = slowCallDurationThreshold;
config.maxWaitDurationInHalfOpenState = maxWaitDurationInHalfOpenState;
config.transitionToStateAfterWaitDuration = transitionToStateAfterWaitDuration;
config.slowCallRateThreshold = slowCallRateThreshold;
config.failureRateThreshold = failureRateThreshold;
config.slidingWindowSize = slidingWindowSize;
config.minimumNumberOfCalls = minimumNumberOfCalls;
config.permittedNumberOfCallsInHalfOpenState = permittedNumberOfCallsInHalfOpenState;
config.recordExceptions = recordExceptions;
config.ignoreExceptions = ignoreExceptions;
config.automaticTransitionFromOpenToHalfOpenEnabled = automaticTransitionFromOpenToHalfOpenEnabled;
config.initialState = initialState;
config.writableStackTraceEnabled = writableStackTraceEnabled;
config.recordExceptionPredicate = createRecordExceptionPredicate();
config.ignoreExceptionPredicate = createIgnoreFailurePredicate();
config.ignoreExceptionsPrecedenceEnabled = ignoreExceptionsPrecedenceEnabled;
config.currentTimestampFunction = currentTimestampFunction;
config.timestampUnit = timestampUnit;
config.recordResultPredicate = recordResultPredicate;
config.clock = clock;
return config;
}
private Predicate<Throwable> createIgnoreFailurePredicate() {
return PredicateCreator.createExceptionsPredicate(ignoreExceptionPredicate, ignoreExceptions)
.orElse(DEFAULT_IGNORE_EXCEPTION_PREDICATE);
}
private Predicate<Throwable> createRecordExceptionPredicate() {
Predicate<Throwable> baseRecordExceptionPredicate = PredicateCreator.createExceptionsPredicate(recordExceptionPredicate, recordExceptions)
.orElse(DEFAULT_RECORD_EXCEPTION_PREDICATE);
if (ignoreExceptionsPrecedenceEnabled) {
return throwable ->
!createIgnoreFailurePredicate().test(throwable) &&
baseRecordExceptionPredicate.test(throwable);
}
return baseRecordExceptionPredicate;
}
private IntervalFunction validateWaitIntervalFunctionInOpenState() {
if (createWaitIntervalFunctionCounter > 1) {
throw new IllegalStateException("The waitIntervalFunction was configured multiple times " +
"which could result in an undesired state. Please verify that waitIntervalFunctionInOpenState " +
"and waitDurationInOpenState are not used together.");
}
return waitIntervalFunctionInOpenState;
}
}
}
|
Builder
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/ConfigurableApplicationContext.java
|
{
"start": 1124,
"end": 1700
}
|
interface ____ be implemented by most if not all application contexts.
* Provides facilities to configure an application context in addition
* to the application context client methods in the
* {@link org.springframework.context.ApplicationContext} interface.
*
* <p>Configuration and lifecycle methods are encapsulated here to avoid
* making them obvious to ApplicationContext client code. The present
* methods should only be used by startup and shutdown code.
*
* @author Juergen Hoeller
* @author Chris Beams
* @author Sam Brannen
* @since 03.11.2003
*/
public
|
to
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/collection/internal/StandardOrderedMapSemantics.java
|
{
"start": 658,
"end": 1890
}
|
class ____<K,V> extends AbstractMapSemantics<LinkedHashMap<K,V>,K,V> {
/**
* Singleton access
*/
public static final StandardOrderedMapSemantics<?,?> INSTANCE = new StandardOrderedMapSemantics<>();
private StandardOrderedMapSemantics() {
}
@Override
public CollectionClassification getCollectionClassification() {
return CollectionClassification.ORDERED_MAP;
}
@Override
public LinkedHashMap<K,V> instantiateRaw(
int anticipatedSize,
CollectionPersister collectionDescriptor) {
return anticipatedSize < 1 ? CollectionHelper.linkedMap() : CollectionHelper.linkedMapOfSize( anticipatedSize );
}
@Override
public PersistentCollection<V> instantiateWrapper(
Object key,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentMap<>( session );
}
@Override
public PersistentCollection<V> wrap(
LinkedHashMap<K,V> rawCollection,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentMap<>( session, rawCollection );
}
@Override
public Iterator<V> getElementIterator(LinkedHashMap<K,V> rawCollection) {
return rawCollection.values().iterator();
}
}
|
StandardOrderedMapSemantics
|
java
|
quarkusio__quarkus
|
devtools/gradle/gradle-model/src/main/java/io/quarkus/gradle/tooling/GradleApplicationModelBuilder.java
|
{
"start": 34212,
"end": 39353
}
|
class ____ a separate method to prevent that maybeConfigureKotlinJvmCompile() runs into
// a ClassNotFoundException due to actually using KotlinJvmCompile.class.
project.getTasks().withType(KotlinCompileTool.class, t -> configureCompileTask(t.getSources().getAsFileTree(),
t.getDestinationDirectory(), allClassesDirs, sourceDirs, t, sourceSet));
}
private static void configureCompileTask(FileTree sources, DirectoryProperty destinationDirectory,
FileCollection allClassesDirs, List<SourceDir> sourceDirs, Task task, SourceSet sourceSet) {
if (!task.getEnabled() || sources.isEmpty()) {
return;
}
final File destDir = destinationDirectory.getAsFile().get();
if (!allClassesDirs.contains(destDir)) {
return;
}
sources.visit(visitor -> {
// we are looking for the root dirs containing sources
if (visitor.getRelativePath().getSegments().length == 1) {
final File srcDir = visitor.getFile().getParentFile();
sourceDirs.add(new DefaultSourceDir(srcDir.toPath(), destDir.toPath(),
findGeneratedSourceDir(destDir, sourceSet),
Map.of("compiler", task.getName())));
}
});
}
private static Path findGeneratedSourceDir(File destDir, SourceSet sourceSet) {
// destDir appears to be build/classes/java/main
if (destDir.getParentFile() == null) {
return null;
}
String language = destDir.getParentFile().getName(); // java
String sourceSetName = destDir.getName(); // main
// find the corresponding generated sources, same pattern, but under build/generated/sources/annotationProcessor/java/main
for (File generatedDir : sourceSet.getOutput().getGeneratedSourcesDirs().getFiles()) {
if (generatedDir.getParentFile() == null) {
continue;
}
if (generatedDir.getName().equals(sourceSetName)
&& generatedDir.getParentFile().getName().equals(language)) {
return generatedDir.toPath();
}
}
return null;
}
private void addSubstitutedProject(PathList.Builder paths, File projectFile) {
File mainResourceDirectory = new File(projectFile, MAIN_RESOURCES_OUTPUT);
if (mainResourceDirectory.exists()) {
paths.add(mainResourceDirectory.toPath());
}
File classesOutput = new File(projectFile, CLASSES_OUTPUT);
File[] languageDirectories = classesOutput.listFiles();
if (languageDirectories != null) {
for (File languageDirectory : languageDirectories) {
if (languageDirectory.isDirectory()) {
for (File sourceSet : languageDirectory.listFiles()) {
if (sourceSet.isDirectory() && sourceSet.getName().equals(SourceSet.MAIN_SOURCE_SET_NAME)) {
paths.add(sourceSet.toPath());
}
}
}
}
}
}
public static boolean isFlagOn(byte walkingFlags, byte flag) {
return (walkingFlags & flag) > 0;
}
public static byte clearFlag(byte flags, byte flag) {
if ((flags & flag) > 0) {
flags ^= flag;
}
return flags;
}
private static boolean isDependency(ResolvedArtifact a) {
return ArtifactCoords.TYPE_JAR.equalsIgnoreCase(a.getExtension()) || "exe".equalsIgnoreCase(a.getExtension()) ||
a.getFile().isDirectory();
}
/**
* Creates an instance of Dependency and associates it with the ResolvedArtifact's path
*/
static ResolvedDependencyBuilder toDependency(ResolvedArtifact a, int... flags) {
return toDependency(a, PathList.of(a.getFile().toPath()), null, flags);
}
static ResolvedDependencyBuilder toDependency(ResolvedArtifact a, SourceSet s) {
PathList.Builder resolvedPathBuilder = PathList.builder();
for (File classesDir : s.getOutput().getClassesDirs()) {
if (classesDir.exists()) {
resolvedPathBuilder.add(classesDir.toPath());
}
}
File resourceDir = s.getOutput().getResourcesDir();
if (resourceDir != null && resourceDir.exists()) {
resolvedPathBuilder.add(resourceDir.toPath());
}
return ResolvedDependencyBuilder
.newInstance()
.setResolvedPaths(resolvedPathBuilder.build())
.setCoords(getArtifactCoords(a));
}
static ResolvedDependencyBuilder toDependency(ResolvedArtifact a, PathCollection paths, DefaultWorkspaceModule module,
int... flags) {
int allFlags = 0;
for (int f : flags) {
allFlags |= f;
}
return ResolvedDependencyBuilder.newInstance()
.setCoords(getArtifactCoords(a))
.setResolvedPaths(paths)
.setWorkspaceModule(module)
.setFlags(allFlags);
}
}
|
in
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationMasterProtocol.java
|
{
"start": 4807,
"end": 4977
}
|
interface ____ an <code>ApplicationMaster</code> and the
* <code>ResourceManager</code>.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> uses this
|
between
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializer.java
|
{
"start": 19137,
"end": 28293
}
|
class ____ (coming from Twitter's Chill
// library).
// This will be true if Flink's Scala Api is used.
Class<?> chillInstantiatorClazz =
Class.forName("org.apache.flink.runtime.types.FlinkScalaKryoInstantiator");
Object chillInstantiator = chillInstantiatorClazz.newInstance();
// obtain a Kryo instance through Twitter Chill
Method m = chillInstantiatorClazz.getMethod("newKryo");
return Optional.of((Kryo) m.invoke(chillInstantiator));
} catch (ClassNotFoundException
| InstantiationException
| NoSuchMethodException
| IllegalAccessException
| InvocationTargetException e) {
if (LOG.isDebugEnabled()) {
LOG.info("Legacy kryo serializer scala extensions are not available.", e);
} else {
LOG.info("Legacy kryo serializer scala extensions are not available.");
}
return Optional.empty();
}
}
private void checkKryoInitialized() {
if (this.kryo == null) {
this.kryo = getKryoInstance();
// Enable reference tracking.
kryo.setReferences(true);
// Throwable and all subclasses should be serialized via java serialization
// Note: the registered JavaSerializer is Flink's own implementation, and not Kryo's.
// This is due to a know issue with Kryo's JavaSerializer. See FLINK-6025 for
// details.
kryo.addDefaultSerializer(Throwable.class, new JavaSerializer());
// Add default serializers first, so that the type registrations without a serializer
// are registered with a default serializer
for (Map.Entry<Class<?>, SerializableSerializer<?>> entry :
defaultSerializers.entrySet()) {
kryo.addDefaultSerializer(entry.getKey(), entry.getValue().getSerializer());
}
for (Map.Entry<Class<?>, Class<? extends Serializer<?>>> entry :
defaultSerializerClasses.entrySet()) {
kryo.addDefaultSerializer(entry.getKey(), entry.getValue());
}
KryoUtils.applyRegistrations(
this.kryo,
kryoRegistrations.values(),
flinkChillPackageRegistrar != null
? flinkChillPackageRegistrar.getNextRegistrationId()
: kryo.getNextRegistrationId());
kryo.setRegistrationRequired(false);
kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
}
}
// --------------------------------------------------------------------------------------------
// Serializer configuration snapshotting & compatibility
// --------------------------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<T> snapshotConfiguration() {
return new KryoSerializerSnapshot<>(
type, defaultSerializers, defaultSerializerClasses, kryoRegistrations);
}
// --------------------------------------------------------------------------------------------
// Utilities
// --------------------------------------------------------------------------------------------
/**
* Utility method that takes lists of registered types and their serializers, and resolve them
* into a single list such that the result will resemble the final registration result in Kryo.
*/
private static LinkedHashMap<String, KryoRegistration> buildKryoRegistrations(
Class<?> serializedType,
LinkedHashSet<Class<?>> registeredTypes,
LinkedHashMap<Class<?>, Class<? extends Serializer<?>>>
registeredTypesWithSerializerClasses,
LinkedHashMap<Class<?>, SerializableSerializer<?>> registeredTypesWithSerializers,
TernaryBoolean isForceAvroKryoEnabledOpt) {
final LinkedHashMap<String, KryoRegistration> kryoRegistrations = new LinkedHashMap<>();
kryoRegistrations.put(serializedType.getName(), new KryoRegistration(serializedType));
for (Class<?> registeredType : checkNotNull(registeredTypes)) {
kryoRegistrations.put(registeredType.getName(), new KryoRegistration(registeredType));
}
for (Map.Entry<Class<?>, Class<? extends Serializer<?>>>
registeredTypeWithSerializerClassEntry :
checkNotNull(registeredTypesWithSerializerClasses).entrySet()) {
kryoRegistrations.put(
registeredTypeWithSerializerClassEntry.getKey().getName(),
new KryoRegistration(
registeredTypeWithSerializerClassEntry.getKey(),
registeredTypeWithSerializerClassEntry.getValue()));
}
for (Map.Entry<Class<?>, SerializableSerializer<?>> registeredTypeWithSerializerEntry :
checkNotNull(registeredTypesWithSerializers).entrySet()) {
kryoRegistrations.put(
registeredTypeWithSerializerEntry.getKey().getName(),
new KryoRegistration(
registeredTypeWithSerializerEntry.getKey(),
registeredTypeWithSerializerEntry.getValue()));
}
// we always register avro to maintain backward compatibility if this option is not present.
if (isForceAvroKryoEnabledOpt.getAsBoolean() == null) {
// add Avro support if flink-avro is available; a dummy otherwise
AvroUtils.getAvroUtils().addAvroGenericDataArrayRegistration(kryoRegistrations);
} else if (isForceAvroKryoEnabledOpt.getAsBoolean()) {
// we only register if flink-avro is available. That is, we won't register the
// dummy serializer.
AvroUtils.tryGetAvroUtils()
.ifPresent(
avroUtils ->
avroUtils.addAvroGenericDataArrayRegistration(
kryoRegistrations));
}
return kryoRegistrations;
}
static void configureKryoLogging() {
// Kryo uses only DEBUG and TRACE levels
// we only forward TRACE level, because even DEBUG levels results in
// a logging for each object, which is infeasible in Flink.
if (LOG.isTraceEnabled()) {
com.esotericsoftware.minlog.Log.setLogger(new MinlogForwarder(LOG));
com.esotericsoftware.minlog.Log.TRACE();
}
}
// --------------------------------------------------------------------------------------------
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
// kryoRegistrations may be null if this Kryo serializer is deserialized from an old version
if (kryoRegistrations == null) {
this.kryoRegistrations =
buildKryoRegistrations(
type,
registeredTypes,
registeredTypesWithSerializerClasses,
registeredTypesWithSerializers,
TernaryBoolean.UNDEFINED);
}
}
private SerializableSerializer<? extends Serializer<?>> deepCopySerializer(
SerializableSerializer<? extends Serializer<?>> original) {
try {
return InstantiationUtil.clone(
original, Thread.currentThread().getContextClassLoader());
} catch (IOException | ClassNotFoundException ex) {
throw new CloneFailedException(
"Could not clone serializer instance of class " + original.getClass(), ex);
}
}
// --------------------------------------------------------------------------------------------
// For testing
// --------------------------------------------------------------------------------------------
private void enterExclusiveThread() {
// we use simple get, check, set here, rather than CAS
// we don't need lock-style correctness, this is only a sanity-check and we thus
// favor speed at the cost of some false negatives in this check
Thread previous = currentThread;
Thread thisThread = Thread.currentThread();
if (previous == null) {
currentThread = thisThread;
} else if (previous != thisThread) {
throw new IllegalStateException(
"Concurrent access to KryoSerializer. Thread 1: "
+ thisThread.getName()
+ " , Thread 2: "
+ previous.getName());
}
}
private void exitExclusiveThread() {
currentThread = null;
}
@VisibleForTesting
public Kryo getKryo() {
checkKryoInitialized();
return this.kryo;
}
}
|
path
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/OneToManyMappedByJoinedInheritanceTest.java
|
{
"start": 4955,
"end": 5226
}
|
class ____ {
@Id
@GeneratedValue
private long id;
@ManyToOne
@JoinColumn( name = "owner_id" )
private Company owner;
public void setOwner(Company owner) {
this.owner = owner;
}
}
@Entity( name = "CustomerComputerSystem" )
public static
|
ComputerSystem
|
java
|
elastic__elasticsearch
|
modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java
|
{
"start": 6006,
"end": 43420
}
|
class ____ extends ESTestCase {
private MyMockClient client;
private DummyAbstractBulkByScrollRequest testRequest;
private PlainActionFuture<BulkByScrollResponse> listener;
private String scrollId;
private ThreadPool threadPool;
private ThreadPool clientThreadPool;
private TaskManager taskManager;
private BulkByScrollTask testTask;
private WorkerBulkByScrollTaskState worker;
private Map<String, String> expectedHeaders = new HashMap<>();
private DiscoveryNode localNode;
private TaskId taskId;
@Before
public void setupForTest() {
// Fill the context with something random so we can make sure we inherited it appropriately.
expectedHeaders.clear();
expectedHeaders.put(randomSimpleString(random()), randomSimpleString(random()));
threadPool = new TestThreadPool(getTestName());
setupClient(threadPool);
testRequest = new DummyAbstractBulkByScrollRequest(new SearchRequest());
listener = new PlainActionFuture<>();
scrollId = null;
taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet());
testTask = (BulkByScrollTask) taskManager.register("don'tcare", "hereeither", testRequest);
testTask.setWorker(testRequest.getRequestsPerSecond(), null);
worker = testTask.getWorkerState();
localNode = DiscoveryNodeUtils.builder("thenode").roles(emptySet()).build();
taskId = new TaskId(localNode.getId(), testTask.getId());
}
private void setupClient(ThreadPool threadPool) {
if (clientThreadPool != null) {
terminate(clientThreadPool);
}
clientThreadPool = threadPool;
client = new MyMockClient(new NoOpClient(threadPool));
client.threadPool().getThreadContext().putHeader(expectedHeaders);
}
@After
public void tearDownAndVerifyCommonStuff() throws Exception {
terminate(clientThreadPool);
clientThreadPool = null;
terminate(threadPool);
}
/**
* Generates a random scrollId and registers it so that when the test
* finishes we check that it was cleared. Subsequent calls reregister a new
* random scroll id so it is checked instead.
*/
private String scrollId() {
scrollId = randomSimpleString(random(), 1, 10); // Empty strings get special behavior we don't want
return scrollId;
}
public void testStartRetriesOnRejectionAndSucceeds() throws Exception {
client.searchesToReject = randomIntBetween(0, testRequest.getMaxRetries() - 1);
DummyAsyncBulkByScrollAction action = new DummyActionWithoutBackoff();
action.start();
assertBusy(() -> assertEquals(client.searchesToReject + 1, client.searchAttempts.get()));
if (listener.isDone()) {
Object result = listener.get();
fail("Expected listener not to be done but it was and had " + result);
}
assertBusy(() -> assertNotNull("There should be a search attempt pending that we didn't reject", client.lastSearch.get()));
assertEquals(client.searchesToReject, testTask.getStatus().getSearchRetries());
}
public void testStartRetriesOnRejectionButFailsOnTooManyRejections() throws Exception {
client.searchesToReject = testRequest.getMaxRetries() + randomIntBetween(1, 100);
DummyAsyncBulkByScrollAction action = new DummyActionWithoutBackoff();
action.start();
assertBusy(() -> assertEquals(testRequest.getMaxRetries() + 1, client.searchAttempts.get()));
ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get());
assertThat(ExceptionsHelper.stackTrace(e), containsString(EsRejectedExecutionException.class.getSimpleName()));
assertNull("There shouldn't be a search attempt pending that we didn't reject", client.lastSearch.get());
assertEquals(testRequest.getMaxRetries(), testTask.getStatus().getSearchRetries());
}
public void testStartNextScrollRetriesOnRejectionAndSucceeds() throws Exception {
// this test primarily tests ClientScrollableHitSource but left it to test integration to status
client.scrollsToReject = randomIntBetween(0, testRequest.getMaxRetries() - 1);
// use fail() onResponse handler because mocked search never fires on listener.
ClientScrollableHitSource hitSource = new ClientScrollableHitSource(
logger,
buildTestBackoffPolicy(),
threadPool,
testTask.getWorkerState()::countSearchRetry,
r -> fail(),
ExceptionsHelper::reThrowIfNotNull,
new ParentTaskAssigningClient(client, localNode, testTask),
testRequest.getSearchRequest()
);
hitSource.setScroll(scrollId());
hitSource.startNextScroll(TimeValue.timeValueSeconds(0));
assertBusy(() -> assertEquals(client.scrollsToReject + 1, client.scrollAttempts.get()));
if (listener.isDone()) {
Object result = listener.get();
fail("Expected listener not to be done but it was and had " + result);
}
assertBusy(() -> assertNotNull("There should be a scroll attempt pending that we didn't reject", client.lastScroll.get()));
assertEquals(client.scrollsToReject, testTask.getStatus().getSearchRetries());
}
public void testStartNextScrollRetriesOnRejectionButFailsOnTooManyRejections() throws Exception {
// this test primarily tests ClientScrollableHitSource but left it to test integration to status
client.scrollsToReject = testRequest.getMaxRetries() + randomIntBetween(1, 100);
assertExactlyOnce(onFail -> {
Consumer<Exception> validingOnFail = e -> {
assertNotNull(ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class));
onFail.run();
};
ClientScrollableHitSource hitSource = new ClientScrollableHitSource(
logger,
buildTestBackoffPolicy(),
threadPool,
testTask.getWorkerState()::countSearchRetry,
r -> fail(),
validingOnFail,
new ParentTaskAssigningClient(client, localNode, testTask),
testRequest.getSearchRequest()
);
hitSource.setScroll(scrollId());
hitSource.startNextScroll(TimeValue.timeValueSeconds(0));
assertBusy(() -> assertEquals(testRequest.getMaxRetries() + 1, client.scrollAttempts.get()));
});
assertNull("There shouldn't be a scroll attempt pending that we didn't reject", client.lastScroll.get());
assertEquals(testRequest.getMaxRetries(), testTask.getStatus().getSearchRetries());
}
public void testScrollResponseSetsTotal() {
// Default is 0, meaning unstarted
assertEquals(0, testTask.getStatus().getTotal());
long total = randomIntBetween(0, Integer.MAX_VALUE);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null);
simulateScrollResponse(new DummyAsyncBulkByScrollAction(), 0, 0, response);
assertEquals(total, testTask.getStatus().getTotal());
}
/**
* Tests that each scroll response is a batch and that the batch is launched properly.
*/
public void testScrollResponseBatchingBehavior() throws Exception {
int maxBatches = randomIntBetween(0, 100);
for (int batches = 1; batches < maxBatches; batches++) {
Hit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
simulateScrollResponse(action, System.nanoTime(), 0, response);
// Use assert busy because the update happens on another thread
final int expectedBatches = batches;
assertBusy(() -> assertEquals(expectedBatches, testTask.getStatus().getBatches()));
}
}
public void testBulkResponseSetsLotsOfStatus() throws Exception {
testRequest.setAbortOnVersionConflict(false);
int maxBatches = randomIntBetween(0, 100);
long versionConflicts = 0;
long created = 0;
long updated = 0;
long deleted = 0;
var action = new DummyAsyncBulkByScrollAction();
action.setScroll(scrollId());
for (int batches = 0; batches < maxBatches; batches++) {
BulkItemResponse[] responses = new BulkItemResponse[randomIntBetween(0, 100)];
for (int i = 0; i < responses.length; i++) {
ShardId shardId = new ShardId(new Index("name", "uid"), 0);
if (rarely()) {
versionConflicts++;
responses[i] = BulkItemResponse.failure(
i,
randomFrom(DocWriteRequest.OpType.values()),
new Failure(shardId.getIndexName(), "id" + i, new VersionConflictEngineException(shardId, "id", "test"))
);
continue;
}
boolean createdResponse;
DocWriteRequest.OpType opType;
switch (randomIntBetween(0, 2)) {
case 0 -> {
createdResponse = true;
opType = DocWriteRequest.OpType.CREATE;
created++;
}
case 1 -> {
createdResponse = false;
opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE);
updated++;
}
case 2 -> {
createdResponse = false;
opType = DocWriteRequest.OpType.DELETE;
deleted++;
}
default -> throw new RuntimeException("Bad scenario");
}
final int seqNo = randomInt(20);
final int primaryTerm = randomIntBetween(1, 16);
final IndexResponse response = new IndexResponse(shardId, "id" + i, seqNo, primaryTerm, randomInt(), createdResponse);
responses[i] = BulkItemResponse.success(i, opType, response);
}
assertExactlyOnce(onSuccess -> action.onBulkResponse(new BulkResponse(responses, 0), onSuccess));
assertEquals(versionConflicts, testTask.getStatus().getVersionConflicts());
assertEquals(updated, testTask.getStatus().getUpdated());
assertEquals(created, testTask.getStatus().getCreated());
assertEquals(deleted, testTask.getStatus().getDeleted());
assertEquals(versionConflicts, testTask.getStatus().getVersionConflicts());
}
}
public void testHandlesBulkWithNoScroll() {
// given a request that should not open scroll
var maxDocs = between(1, 100);
testRequest.setMaxDocs(maxDocs);
testRequest.getSearchRequest().source().size(100);
// when receiving bulk response
var responses = randomArray(0, maxDocs, BulkItemResponse[]::new, AsyncBulkByScrollActionTests::createBulkResponse);
new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), () -> fail("should not be called"));
// then should refresh and finish
assertThat(listener.isDone(), equalTo(true));
var status = listener.actionGet().getStatus();
assertThat(status.getCreated() + status.getUpdated() + status.getDeleted(), equalTo((long) responses.length));
}
public void testHandlesBulkWhenMaxDocsIsReached() {
// given a request with max docs
var size = between(1, 10);
testRequest.setMaxDocs(size);
testRequest.getSearchRequest().source().size(100);
// when receiving bulk response with max docs
var responses = randomArray(size, size, BulkItemResponse[]::new, AsyncBulkByScrollActionTests::createBulkResponse);
new DummyAsyncBulkByScrollAction().onBulkResponse(new BulkResponse(responses, 0), () -> fail("should not be called"));
// then should refresh and finish
assertThat(listener.isDone(), equalTo(true));
var status = listener.actionGet().getStatus();
assertThat(status.getCreated() + status.getUpdated() + status.getDeleted(), equalTo((long) responses.length));
}
private static BulkItemResponse createBulkResponse() {
return BulkItemResponse.success(
0,
randomFrom(DocWriteRequest.OpType.values()),
new IndexResponse(
new ShardId(new Index("name", "uid"), 0),
"id",
randomInt(20),
randomIntBetween(1, 16),
randomIntBetween(0, Integer.MAX_VALUE),
true
)
);
}
/**
* Mimicks a ThreadPool rejecting execution of the task.
*/
public void testThreadPoolRejectionsAbortRequest() throws Exception {
worker.rethrottle(1);
setupClient(new TestThreadPool(getTestName()) {
@Override
public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor executor) {
// While we're here we can check that the sleep made it through
assertThat(delay.nanos(), greaterThan(0L));
assertThat(delay.seconds(), lessThanOrEqualTo(10L));
final EsRejectedExecutionException exception = new EsRejectedExecutionException("test");
if (command instanceof AbstractRunnable) {
((AbstractRunnable) command).onRejection(exception);
return null;
} else {
throw exception;
}
}
});
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null);
simulateScrollResponse(new DummyAsyncBulkByScrollAction(), System.nanoTime(), 10, response);
ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get());
assertThat(e.getCause(), instanceOf(EsRejectedExecutionException.class));
assertThat(e.getCause(), hasToString(containsString("test")));
assertThat(client.scrollsCleared, contains(scrollId));
// When the task is rejected we don't increment the throttled timer
assertEquals(timeValueMillis(0), testTask.getStatus().getThrottled());
}
/**
* Mimicks shard search failures usually caused by the data node serving the
* scroll request going down.
*/
public void testShardFailuresAbortRequest() throws Exception {
SearchFailure shardFailure = new SearchFailure(new RuntimeException("test"));
ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(
false,
singletonList(shardFailure),
0,
emptyList(),
null
);
simulateScrollResponse(new DummyAsyncBulkByScrollAction(), System.nanoTime(), 0, scrollResponse);
BulkByScrollResponse response = listener.get();
assertThat(response.getBulkFailures(), empty());
assertThat(response.getSearchFailures(), contains(shardFailure));
assertFalse(response.isTimedOut());
assertNull(response.getReasonCancelled());
assertThat(client.scrollsCleared, contains(scrollId));
}
/**
* Mimicks search timeouts.
*/
public void testSearchTimeoutsAbortRequest() throws Exception {
ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(true, emptyList(), 0, emptyList(), null);
simulateScrollResponse(new DummyAsyncBulkByScrollAction(), System.nanoTime(), 0, scrollResponse);
BulkByScrollResponse response = listener.get();
assertThat(response.getBulkFailures(), empty());
assertThat(response.getSearchFailures(), empty());
assertTrue(response.isTimedOut());
assertNull(response.getReasonCancelled());
assertThat(client.scrollsCleared, contains(scrollId));
}
/**
* Mimicks bulk indexing failures.
*/
public void testBulkFailuresAbortRequest() throws Exception {
Failure failure = new Failure("index", "id", new RuntimeException("test"));
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
BulkResponse bulkResponse = new BulkResponse(
new BulkItemResponse[] { BulkItemResponse.failure(0, DocWriteRequest.OpType.CREATE, failure) },
randomLong()
);
action.onBulkResponse(bulkResponse, Assert::fail);
BulkByScrollResponse response = listener.get();
assertThat(response.getBulkFailures(), contains(failure));
assertThat(response.getSearchFailures(), empty());
assertNull(response.getReasonCancelled());
}
/**
* Mimicks script failures or general wrongness by implementers.
*/
public void testBuildRequestThrowsException() throws Exception {
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction() {
@Override
protected AbstractAsyncBulkByScrollAction.RequestWrapper<?> buildRequest(Hit doc) {
throw new RuntimeException("surprise");
}
};
ScrollableHitSource.BasicHit hit = new ScrollableHitSource.BasicHit("index", "id", 0);
hit.setSource(new BytesArray("{}"), XContentType.JSON);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null);
simulateScrollResponse(action, System.nanoTime(), 0, response);
ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get());
assertThat(e.getCause(), instanceOf(RuntimeException.class));
assertThat(e.getCause().getMessage(), equalTo("surprise"));
}
/**
* Mimicks bulk rejections. These should be retried and eventually succeed.
*/
public void testBulkRejectionsRetryWithEnoughRetries() throws Exception {
int bulksToTry = randomIntBetween(1, 10);
long retryAttempts = 0;
for (int i = 0; i < bulksToTry; i++) {
bulkRetryTestCase(false);
retryAttempts += testRequest.getMaxRetries();
assertEquals(retryAttempts, testTask.getStatus().getBulkRetries());
}
}
/**
* Mimicks bulk rejections. These should be retried but we fail anyway because we run out of retries.
*/
public void testBulkRejectionsRetryAndFailAnyway() throws Exception {
bulkRetryTestCase(true);
assertEquals(testRequest.getMaxRetries(), testTask.getStatus().getBulkRetries());
}
public void testScrollDelay() throws Exception {
/*
* Replace the thread pool with one that will save the delay sent for the command. We'll use that to check that we used a proper
* delay for throttling.
*/
AtomicReference<TimeValue> capturedDelay = new AtomicReference<>();
AtomicReference<Runnable> capturedCommand = new AtomicReference<>();
setupClient(new TestThreadPool(getTestName()) {
@Override
public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor executor) {
capturedDelay.set(delay);
capturedCommand.set(command);
return new ScheduledCancellable() {
private boolean cancelled = false;
@Override
public long getDelay(TimeUnit unit) {
return unit.convert(delay.millis(), TimeUnit.MILLISECONDS);
}
@Override
public int compareTo(Delayed o) {
return 0;
}
@Override
public boolean cancel() {
cancelled = true;
return true;
}
@Override
public boolean isCancelled() {
return cancelled;
}
};
}
});
// Set the base for the scroll to wait - this is added to the figure we calculate below
testRequest.getSearchRequest().scroll(timeValueSeconds(10));
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction() {
@Override
protected RequestWrapper<?> buildRequest(Hit doc) {
return wrap(new IndexRequest().index("test"));
}
};
action.setScroll(scrollId());
// Set throttle to 1 request per second to make the math simpler
worker.rethrottle(1f);
action.start();
// create a simulated response.
SearchHit hit = SearchHit.unpooled(0, "id").sourceRef(new BytesArray("{}"));
SearchHits hits = SearchHits.unpooled(
IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new),
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
0
);
SearchResponse searchResponse = SearchResponseUtils.response(hits).scrollId(scrollId()).shards(5, 4, 0).build();
try {
client.lastSearch.get().listener.onResponse(searchResponse);
assertEquals(0, capturedDelay.get().seconds());
capturedCommand.get().run();
// So the next request is going to have to wait an extra 100 seconds or so (base was 10 seconds, so 110ish)
assertThat(client.lastScroll.get().request.scroll().seconds(), either(equalTo(110L)).or(equalTo(109L)));
// Now we can simulate a response and check the delay that we used for the task
if (randomBoolean()) {
client.lastScroll.get().listener.onResponse(searchResponse);
assertEquals(99, capturedDelay.get().seconds());
} else {
// Let's rethrottle between the starting the scroll and getting the response
worker.rethrottle(10f);
client.lastScroll.get().listener.onResponse(searchResponse);
// The delay uses the new throttle
assertEquals(9, capturedDelay.get().seconds());
}
// Running the command ought to increment the delay counter on the task.
capturedCommand.get().run();
assertEquals(capturedDelay.get(), testTask.getStatus().getThrottled());
} finally {
searchResponse.decRef();
}
}
/**
* Execute a bulk retry test case. The total number of failures is random and the number of retries attempted is set to
* testRequest.getMaxRetries and controlled by the failWithRejection parameter.
*/
private void bulkRetryTestCase(boolean failWithRejection) throws Exception {
int totalFailures = randomIntBetween(1, testRequest.getMaxRetries());
int size = randomIntBetween(1, 100);
testRequest.setMaxRetries(totalFailures - (failWithRejection ? 1 : 0));
client.bulksToReject = client.bulksAttempts.get() + totalFailures;
DummyAsyncBulkByScrollAction action = new DummyActionWithoutBackoff();
action.setScroll(scrollId());
BulkRequest request = new BulkRequest();
for (int i = 0; i < size + 1; i++) {
request.add(new IndexRequest("index").id("id" + i));
}
if (failWithRejection) {
action.sendBulkRequest(request, Assert::fail);
BulkByScrollResponse response = listener.get();
assertThat(response.getBulkFailures(), hasSize(1));
assertEquals(response.getBulkFailures().get(0).getStatus(), RestStatus.TOO_MANY_REQUESTS);
assertThat(response.getSearchFailures(), empty());
assertNull(response.getReasonCancelled());
} else {
assertExactlyOnce(onSuccess -> action.sendBulkRequest(request, onSuccess));
}
}
/**
* The default retry time matches what we say it is in the javadoc for the request.
*/
public void testDefaultRetryTimes() {
Iterator<TimeValue> policy = new DummyAsyncBulkByScrollAction().buildBackoffPolicy().iterator();
long millis = 0;
while (policy.hasNext()) {
millis += policy.next().millis();
}
/*
* This is the total number of milliseconds that a reindex made with the default settings will backoff before attempting one final
* time. If that request is rejected then the whole process fails with a rejected exception.
*/
int defaultBackoffBeforeFailing = 59460;
assertEquals(defaultBackoffBeforeFailing, millis);
}
public void testRefreshIsFalseByDefault() throws Exception {
refreshTestCase(null, true, false);
}
public void testRefreshFalseDoesntExecuteRefresh() throws Exception {
refreshTestCase(false, true, false);
}
public void testRefreshTrueExecutesRefresh() throws Exception {
refreshTestCase(true, true, true);
}
public void testRefreshTrueSkipsRefreshIfNoDestinationIndexes() throws Exception {
refreshTestCase(true, false, false);
}
private void refreshTestCase(Boolean refresh, boolean addDestinationIndexes, boolean shouldRefresh) {
if (refresh != null) {
testRequest.setRefresh(refresh);
}
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
if (addDestinationIndexes) {
action.addDestinationIndices(singleton("foo"));
}
action.refreshAndFinish(emptyList(), emptyList(), false);
if (shouldRefresh) {
assertArrayEquals(new String[] { "foo" }, client.lastRefreshRequest.get().indices());
} else {
assertNull("No refresh was attempted", client.lastRefreshRequest.get());
}
}
public void testCancelBeforeInitialSearch() throws Exception {
cancelTaskCase((DummyAsyncBulkByScrollAction action) -> action.start());
}
public void testCancelBeforeScrollResponse() throws Exception {
cancelTaskCase(
(DummyAsyncBulkByScrollAction action) -> simulateScrollResponse(
action,
System.nanoTime(),
1,
new ScrollableHitSource.Response(false, emptyList(), between(1, 100000), emptyList(), null)
)
);
}
public void testCancelBeforeSendBulkRequest() throws Exception {
cancelTaskCase((DummyAsyncBulkByScrollAction action) -> action.sendBulkRequest(new BulkRequest(), Assert::fail));
}
public void testCancelBeforeOnBulkResponse() throws Exception {
cancelTaskCase(
(DummyAsyncBulkByScrollAction action) -> action.onBulkResponse(new BulkResponse(new BulkItemResponse[0], 0), Assert::fail)
);
}
public void testCancelBeforeStartNextScroll() throws Exception {
long now = System.nanoTime();
cancelTaskCase((DummyAsyncBulkByScrollAction action) -> action.notifyDone(now, null, 0));
}
public void testCancelBeforeRefreshAndFinish() throws Exception {
// Refresh or not doesn't matter - we don't try to refresh.
testRequest.setRefresh(usually());
cancelTaskCase((DummyAsyncBulkByScrollAction action) -> action.refreshAndFinish(emptyList(), emptyList(), false));
assertNull("No refresh was attempted", client.lastRefreshRequest.get());
}
/**
* Tests that we can cancel the request during its throttling delay. This can't use {@link #cancelTaskCase(Consumer)} because it needs
* to send the request un-canceled and cancel it at a specific time.
*/
public void testCancelWhileDelayedAfterScrollResponse() throws Exception {
String reason = randomSimpleString(random());
/*
* Replace the thread pool with one that will cancel the task as soon as anything is scheduled, which reindex tries to do when there
* is a delay.
*/
setupClient(new TestThreadPool(getTestName()) {
@Override
public ScheduledCancellable schedule(Runnable command, TimeValue delay, Executor executor) {
/*
* This is called twice:
* 1. To schedule the throttling. When that happens we immediately cancel the task.
* 2. After the task is canceled.
* Both times we use delegate to the standard behavior so the task is scheduled as expected so it can be cancelled and all
* that good stuff.
*/
if (delay.nanos() > 0) {
generic().execute(() -> taskManager.cancel(testTask, reason, () -> {}));
}
return super.schedule(command, delay, executor);
}
});
// Send the scroll response which will trigger the custom thread pool above, canceling the request before running the response
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
boolean previousScrollSet = usually();
if (previousScrollSet) {
action.setScroll(scrollId());
}
long total = randomIntBetween(0, Integer.MAX_VALUE);
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null);
// Use a long delay here so the test will time out if the cancellation doesn't reschedule the throttled task
worker.rethrottle(1);
simulateScrollResponse(action, System.nanoTime(), 1000, response);
// Now that we've got our cancel we'll just verify that it all came through all right
assertEquals(reason, listener.get(10, TimeUnit.SECONDS).getReasonCancelled());
if (previousScrollSet) {
// Canceled tasks always start to clear the scroll before they die.
assertThat(client.scrollsCleared, contains(scrollId));
}
}
private void cancelTaskCase(Consumer<DummyAsyncBulkByScrollAction> testMe) throws Exception {
DummyAsyncBulkByScrollAction action = new DummyAsyncBulkByScrollAction();
boolean previousScrollSet = usually();
if (previousScrollSet) {
action.setScroll(scrollId());
}
String reason = randomSimpleString(random());
taskManager.cancel(testTask, reason, () -> {});
testMe.accept(action);
assertEquals(reason, listener.get().getReasonCancelled());
if (previousScrollSet) {
// Canceled tasks always start to clear the scroll before they die.
assertThat(client.scrollsCleared, contains(scrollId));
}
}
public void testScrollConsumableHitsResponseCanBeConsumedInChunks() {
List<ScrollableHitSource.BasicHit> hits = new ArrayList<>();
int numberOfHits = randomIntBetween(0, 300);
for (int i = 0; i < numberOfHits; i++) {
hits.add(new ScrollableHitSource.BasicHit("idx", "id-" + i, -1));
}
final ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(
false,
emptyList(),
hits.size(),
hits,
"scrollid"
);
final AbstractAsyncBulkByScrollAction.ScrollConsumableHitsResponse response =
new AbstractAsyncBulkByScrollAction.ScrollConsumableHitsResponse(new ScrollableHitSource.AsyncResponse() {
@Override
public ScrollableHitSource.Response response() {
return scrollResponse;
}
@Override
public void done(TimeValue extraKeepAlive) {}
});
assertThat(response.remainingHits(), equalTo(numberOfHits));
assertThat(response.hasRemainingHits(), equalTo(numberOfHits > 0));
int totalConsumedHits = 0;
while (response.hasRemainingHits()) {
final int numberOfHitsToConsume;
final List<? extends ScrollableHitSource.Hit> consumedHits;
if (randomBoolean()) {
numberOfHitsToConsume = numberOfHits - totalConsumedHits;
consumedHits = response.consumeRemainingHits();
} else {
numberOfHitsToConsume = randomIntBetween(1, numberOfHits - totalConsumedHits);
consumedHits = response.consumeHits(numberOfHitsToConsume);
}
assertThat(consumedHits.size(), equalTo(numberOfHitsToConsume));
assertThat(consumedHits, equalTo(hits.subList(totalConsumedHits, totalConsumedHits + numberOfHitsToConsume)));
totalConsumedHits += numberOfHitsToConsume;
assertThat(response.remainingHits(), equalTo(numberOfHits - totalConsumedHits));
}
assertThat(response.consumeRemainingHits().isEmpty(), equalTo(true));
}
public void testScrollConsumableHitsResponseErrorHandling() {
List<ScrollableHitSource.BasicHit> hits = new ArrayList<>();
int numberOfHits = randomIntBetween(2, 300);
for (int i = 0; i < numberOfHits; i++) {
hits.add(new ScrollableHitSource.BasicHit("idx", "id-" + i, -1));
}
final ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(
false,
emptyList(),
hits.size(),
hits,
"scrollid"
);
final AbstractAsyncBulkByScrollAction.ScrollConsumableHitsResponse response =
new AbstractAsyncBulkByScrollAction.ScrollConsumableHitsResponse(new ScrollableHitSource.AsyncResponse() {
@Override
public ScrollableHitSource.Response response() {
return scrollResponse;
}
@Override
public void done(TimeValue extraKeepAlive) {}
});
assertThat(response.remainingHits(), equalTo(numberOfHits));
assertThat(response.hasRemainingHits(), equalTo(true));
expectThrows(IllegalArgumentException.class, () -> response.consumeHits(-1));
expectThrows(IllegalArgumentException.class, () -> response.consumeHits(numberOfHits + 1));
if (randomBoolean()) {
response.consumeHits(numberOfHits - 1);
// Unable to consume more than remaining hits
expectThrows(IllegalArgumentException.class, () -> response.consumeHits(response.remainingHits() + 1));
response.consumeHits(1);
} else {
response.consumeRemainingHits();
}
expectThrows(IllegalArgumentException.class, () -> response.consumeHits(1));
}
public void testEnableScrollByDefault() {
var preparedSearchRequest = AbstractAsyncBulkByScrollAction.prepareSearchRequest(testRequest, false, false, false);
assertThat(preparedSearchRequest.scroll(), notNullValue());
}
public void testEnableScrollWhenMaxDocsIsGreaterThenScrollSize() {
testRequest.setMaxDocs(between(101, 1000));
testRequest.getSearchRequest().source().size(100);
var preparedSearchRequest = AbstractAsyncBulkByScrollAction.prepareSearchRequest(testRequest, false, false, false);
assertThat(preparedSearchRequest.scroll(), notNullValue());
}
public void testDisableScrollWhenMaxDocsIsLessThenScrollSize() {
testRequest.setMaxDocs(between(1, 100));
testRequest.getSearchRequest().source().size(100);
var preparedSearchRequest = AbstractAsyncBulkByScrollAction.prepareSearchRequest(testRequest, false, false, false);
assertThat(preparedSearchRequest.scroll(), nullValue());
}
public void testEnableScrollWhenProceedOnVersionConflict() {
testRequest.setMaxDocs(between(1, 110));
testRequest.getSearchRequest().source().size(100);
testRequest.setAbortOnVersionConflict(false);
var preparedSearchRequest = AbstractAsyncBulkByScrollAction.prepareSearchRequest(testRequest, false, false, false);
assertThat(preparedSearchRequest.scroll(), notNullValue());
}
/**
* Simulate a scroll response by setting the scroll id and firing the onScrollResponse method.
*/
private void simulateScrollResponse(
DummyAsyncBulkByScrollAction action,
long lastBatchTime,
int lastBatchSize,
ScrollableHitSource.Response response
) {
action.setScroll(scrollId());
action.onScrollResponse(
lastBatchTime,
lastBatchSize,
new AbstractAsyncBulkByScrollAction.ScrollConsumableHitsResponse(new ScrollableHitSource.AsyncResponse() {
@Override
public ScrollableHitSource.Response response() {
return response;
}
@Override
public void done(TimeValue extraKeepAlive) {
fail();
}
})
);
}
private
|
AsyncBulkByScrollActionTests
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/handler/ErrorsLoggingHandler.java
|
{
"start": 908,
"end": 1678
}
|
class ____ extends ChannelDuplexHandler {
private static final Logger log = LoggerFactory.getLogger(ErrorsLoggingHandler.class);
private static final Pattern IGNORABLE_ERROR_MESSAGE = Pattern.compile(
"^.*(?:connection.*(?:reset|closed|abort|broken)|broken.*pipe).*$", Pattern.CASE_INSENSITIVE);
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if (cause instanceof IOException) {
String message = String.valueOf(cause.getMessage()).toLowerCase();
if (IGNORABLE_ERROR_MESSAGE.matcher(message).matches()) {
return;
}
}
log.error("Exception occured. Channel: {}", ctx.channel(), cause);
}
}
|
ErrorsLoggingHandler
|
java
|
apache__kafka
|
storage/src/main/java/org/apache/kafka/server/log/remote/quota/RLMQuotaManager.java
|
{
"start": 1636,
"end": 4438
}
|
class ____ {
private static final Logger LOGGER = LoggerFactory.getLogger(RLMQuotaManager.class);
private final RLMQuotaManagerConfig config;
private final Metrics metrics;
private final QuotaType quotaType;
private final String description;
private final Time time;
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private final SensorAccess sensorAccess;
private Quota quota;
public RLMQuotaManager(RLMQuotaManagerConfig config, Metrics metrics, QuotaType quotaType, String description, Time time) {
this.config = config;
this.metrics = metrics;
this.quotaType = quotaType;
this.description = description;
this.time = time;
this.quota = new Quota(config.quotaBytesPerSecond(), true);
this.sensorAccess = new SensorAccess(lock, metrics);
}
public void updateQuota(Quota newQuota) {
lock.writeLock().lock();
try {
this.quota = newQuota;
Map<MetricName, KafkaMetric> allMetrics = metrics.metrics();
MetricName quotaMetricName = metricName();
KafkaMetric metric = allMetrics.get(quotaMetricName);
if (metric != null) {
LOGGER.info("Sensor for quota-id {} already exists. Setting quota to {} in MetricConfig", quotaMetricName, newQuota);
metric.config(getQuotaMetricConfig(newQuota));
}
} finally {
lock.writeLock().unlock();
}
}
public long getThrottleTimeMs() {
Sensor sensorInstance = sensor();
try {
sensorInstance.checkQuotas();
} catch (QuotaViolationException qve) {
LOGGER.debug("Quota violated for sensor ({}), metric: ({}), metric-value: ({}), bound: ({})",
sensorInstance.name(), qve.metric().metricName(), qve.value(), qve.bound());
return QuotaUtils.throttleTime(qve, time.milliseconds());
}
return 0L;
}
public void record(double value) {
sensor().record(value, time.milliseconds(), false);
}
private MetricConfig getQuotaMetricConfig(Quota quota) {
return new MetricConfig()
.timeWindow(config.quotaWindowSizeSeconds(), TimeUnit.SECONDS)
.samples(config.numQuotaSamples())
.quota(quota);
}
private MetricName metricName() {
return metrics.metricName("byte-rate", quotaType.toString(), description, Map.of());
}
private Sensor sensor() {
return sensorAccess.getOrCreate(
quotaType.toString(),
RLMQuotaManagerConfig.INACTIVE_SENSOR_EXPIRATION_TIME_SECONDS,
sensor -> sensor.add(metricName(), new SimpleRate(), getQuotaMetricConfig(quota))
);
}
}
|
RLMQuotaManager
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/stomp/websocket/StompVersion.java
|
{
"start": 816,
"end": 2115
}
|
enum ____ {
STOMP_V11("1.1", "v11.stomp"),
STOMP_V12("1.2", "v12.stomp");
public static final AttributeKey<StompVersion> CHANNEL_ATTRIBUTE_KEY = AttributeKey.valueOf("stomp_version");
public static final String SUB_PROTOCOLS;
static {
List<String> subProtocols = new ArrayList<String>(values().length);
for (StompVersion stompVersion : values()) {
subProtocols.add(stompVersion.subProtocol);
}
SUB_PROTOCOLS = StringUtil.join(",", subProtocols).toString();
}
private final String version;
private final String subProtocol;
StompVersion(String version, String subProtocol) {
this.version = version;
this.subProtocol = subProtocol;
}
public String version() {
return version;
}
public String subProtocol() {
return subProtocol;
}
public static StompVersion findBySubProtocol(String subProtocol) {
if (subProtocol != null) {
for (StompVersion stompVersion : values()) {
if (stompVersion.subProtocol().equals(subProtocol)) {
return stompVersion;
}
}
}
throw new IllegalArgumentException("Not found StompVersion for '" + subProtocol + "'");
}
}
|
StompVersion
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAttributes.java
|
{
"start": 733,
"end": 3417
}
|
class ____ implements Releasable {
public static final String NAMEID_SYNTHENTIC_ATTRIBUTE = "nameid";
public static final String PERSISTENT_NAMEID_SYNTHENTIC_ATTRIBUTE = "nameid:persistent";
private final SamlNameId name;
private final String session;
private final List<SamlAttribute> attributes;
private final List<SamlPrivateAttribute> privateAttributes;
SamlAttributes(SamlNameId name, String session, List<SamlAttribute> attributes, List<SamlPrivateAttribute> privateAttributes) {
this.name = name;
this.session = session;
this.attributes = attributes;
this.privateAttributes = privateAttributes;
}
/**
* Finds all values for the specified attribute
*
* @param attributeId The name of the attribute - either its {@code name} or @{code friendlyName}
* @return A list of all matching attribute values (may be empty).
*/
List<String> getAttributeValues(String attributeId) {
if (Strings.isNullOrEmpty(attributeId)) {
return List.of();
}
if (attributeId.equals(NAMEID_SYNTHENTIC_ATTRIBUTE)) {
return name == null ? List.of() : List.of(name.value);
}
if (attributeId.equals(PERSISTENT_NAMEID_SYNTHENTIC_ATTRIBUTE) && name != null && NameIDType.PERSISTENT.equals(name.format)) {
return List.of(name.value);
}
return attributes.stream()
.filter(attr -> attributeId.equals(attr.name) || attributeId.equals(attr.friendlyName))
.flatMap(attr -> attr.values.stream())
.toList();
}
List<SecureString> getPrivateAttributeValues(String attributeId) {
if (Strings.isNullOrEmpty(attributeId)) {
return List.of();
}
return privateAttributes.stream()
.filter(attr -> attributeId.equals(attr.name) || attributeId.equals(attr.friendlyName))
.flatMap(attr -> attr.values.stream())
.toList();
}
List<SamlAttribute> attributes() {
return attributes;
}
List<SamlPrivateAttribute> privateAttributes() {
return privateAttributes;
}
boolean isEmpty() {
return attributes.isEmpty() && privateAttributes.isEmpty();
}
SamlNameId name() {
return name;
}
String session() {
return session;
}
@Override
public String toString() {
return getClass().getSimpleName() + "(" + name + ")[" + session + "]{" + attributes + "}{" + privateAttributes + "}";
}
@Override
public void close() {
IOUtils.closeWhileHandlingException(privateAttributes);
}
abstract static
|
SamlAttributes
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/naming/pojo/healthcheck/impl/Mysql.java
|
{
"start": 913,
"end": 2484
}
|
class ____ extends AbstractHealthChecker {
public static final String TYPE = "MYSQL";
private static final long serialVersionUID = 7928108094599401491L;
private String user;
private String pwd;
private String cmd;
public Mysql() {
super(Mysql.TYPE);
}
public String getCmd() {
return this.cmd;
}
public String getPwd() {
return this.pwd;
}
public String getUser() {
return this.user;
}
public void setUser(final String user) {
this.user = user;
}
public void setCmd(final String cmd) {
this.cmd = cmd;
}
public void setPwd(final String pwd) {
this.pwd = pwd;
}
@Override
public int hashCode() {
return Objects.hash(user, pwd, cmd);
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof Mysql)) {
return false;
}
final Mysql other = (Mysql) obj;
if (!StringUtils.equals(user, other.getUser())) {
return false;
}
if (!StringUtils.equals(pwd, other.getPwd())) {
return false;
}
return StringUtils.equals(cmd, other.getCmd());
}
@Override
public Mysql clone() throws CloneNotSupportedException {
final Mysql config = new Mysql();
config.setUser(getUser());
config.setPwd(getPwd());
config.setCmd(getCmd());
return config;
}
}
|
Mysql
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloServerEndpointBuilderFactory.java
|
{
"start": 1956,
"end": 7808
}
|
interface ____
extends
EndpointConsumerBuilder {
default MiloServerEndpointConsumerBuilder basic() {
return (MiloServerEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedMiloServerEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the OPC UA Server component.
*/
public
|
AdvancedMiloServerEndpointConsumerBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/format/jackson/JacksonIntegration.java
|
{
"start": 3633,
"end": 3805
}
|
class ____ in a Jakarta EE deployment.
// We have to check if the ObjectMapper that is visible to Hibernate ORM is the same that is visible
// to the application
|
loader
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/handler/PerConnectionWebSocketHandler.java
|
{
"start": 1446,
"end": 1755
}
|
class ____ create for each connection, and then pass it to any
* API method that expects a {@link WebSocketHandler}.
*
* <p>If initializing the target {@link WebSocketHandler} type requires a Spring
* BeanFactory, then the {@link #setBeanFactory(BeanFactory)} property accordingly. Simply
* declaring this
|
to
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/NFAStateSerializerSnapshot.java
|
{
"start": 1351,
"end": 4465
}
|
class ____
extends CompositeTypeSerializerSnapshot<NFAState, NFAStateSerializer> {
private static final int CURRENT_VERSION = 2;
private static final int FIRST_VERSION_WITH_PREVIOUS_TIMESTAMP = 2;
private boolean supportsPreviousTimestamp = true;
/** Constructor for read instantiation. */
public NFAStateSerializerSnapshot() {}
/** Constructor to create the snapshot for writing. */
public NFAStateSerializerSnapshot(NFAStateSerializer serializerInstance) {
super(serializerInstance);
supportsPreviousTimestamp = serializerInstance.isSupportsPreviousTimestamp();
}
@Override
protected int getCurrentOuterSnapshotVersion() {
return CURRENT_VERSION;
}
@Override
protected void readOuterSnapshot(
int readOuterSnapshotVersion, DataInputView in, ClassLoader userCodeClassLoader)
throws IOException {
if (readOuterSnapshotVersion < FIRST_VERSION_WITH_PREVIOUS_TIMESTAMP) {
supportsPreviousTimestamp = false;
} else {
supportsPreviousTimestamp = in.readBoolean();
}
}
@Override
protected void writeOuterSnapshot(DataOutputView out) throws IOException {
out.writeBoolean(supportsPreviousTimestamp);
}
@Override
protected OuterSchemaCompatibility resolveOuterSchemaCompatibility(
TypeSerializerSnapshot<NFAState> oldSerializerSnapshot) {
if (!(oldSerializerSnapshot instanceof NFAStateSerializerSnapshot)) {
return OuterSchemaCompatibility.INCOMPATIBLE;
}
if (supportsPreviousTimestamp
!= ((NFAStateSerializerSnapshot) oldSerializerSnapshot).supportsPreviousTimestamp) {
return OuterSchemaCompatibility.COMPATIBLE_AFTER_MIGRATION;
}
return OuterSchemaCompatibility.COMPATIBLE_AS_IS;
}
@Override
protected TypeSerializer<?>[] getNestedSerializers(NFAStateSerializer outerSerializer) {
TypeSerializer<DeweyNumber> versionSerializer = outerSerializer.getVersionSerializer();
TypeSerializer<NodeId> nodeIdSerializer = outerSerializer.getNodeIdSerializer();
TypeSerializer<EventId> eventIdSerializer = outerSerializer.getEventIdSerializer();
return new TypeSerializer[] {versionSerializer, nodeIdSerializer, eventIdSerializer};
}
@Override
protected NFAStateSerializer createOuterSerializerWithNestedSerializers(
TypeSerializer<?>[] nestedSerializers) {
@SuppressWarnings("unchecked")
TypeSerializer<DeweyNumber> versionSerializer =
(TypeSerializer<DeweyNumber>) nestedSerializers[0];
@SuppressWarnings("unchecked")
TypeSerializer<NodeId> nodeIdSerializer = (TypeSerializer<NodeId>) nestedSerializers[1];
@SuppressWarnings("unchecked")
TypeSerializer<EventId> eventIdSerializer = (TypeSerializer<EventId>) nestedSerializers[2];
return new NFAStateSerializer(
versionSerializer, nodeIdSerializer, eventIdSerializer, supportsPreviousTimestamp);
}
}
|
NFAStateSerializerSnapshot
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/tofix/ObjectIdDeserializationFailTest.java
|
{
"start": 1186,
"end": 1277
}
|
class ____ {
public List<DefensiveEmployee> employees;
static
|
DefensiveCompany
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/TestCustomValueInstDefaults.java
|
{
"start": 15083,
"end": 23037
}
|
class ____ extends SimpleModule {
public ClassWith32Module() {
super("test", Version.unknownVersion());
}
@Override
public void setupModule(SetupContext context) {
context.addValueInstantiators(new ValueInstantiators.Base() {
@Override
public ValueInstantiator modifyValueInstantiator(DeserializationConfig config,
BeanDescription.Supplier beanDescRef, ValueInstantiator defaultInstantiator) {
if (beanDescRef.getBeanClass() == ClassWith32Props.class) {
return new VerifyingValueInstantiator((StdValueInstantiator)
defaultInstantiator);
}
return defaultInstantiator;
}
});
}
}
/*
/**********************************************************
/* Test methods
/**********************************************************
*/
// When all values are in the source, no defaults should be used.
@Test
public void testAllPresent() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
Bucket allPresent = mapper.readValue(
"{\"a\":8,\"b\":9,\"c\":\"y\",\"d\":\"z\"}",
Bucket.class);
assertEquals(8, allPresent.a);
assertEquals(9, allPresent.b);
assertEquals("y", allPresent.c);
assertEquals("z", allPresent.d);
}
// When no values are in the source, all defaults should be used.
@Test
public void testAllAbsent() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
Bucket allAbsent = mapper.readValue(
"{}",
Bucket.class);
assertEquals(Bucket.DEFAULT_A, allAbsent.a);
assertEquals(Bucket.DEFAULT_B, allAbsent.b);
assertEquals(Bucket.DEFAULT_C, allAbsent.c);
assertEquals(Bucket.DEFAULT_D, allAbsent.d);
}
// When some values are in the source and some are not, defaults should only
// be used for the missing values.
@Test
public void testMixedPresentAndAbsent() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
Bucket aAbsent = mapper.readValue(
"{\"b\":9,\"c\":\"y\",\"d\":\"z\"}",
Bucket.class);
assertEquals(Bucket.DEFAULT_A, aAbsent.a);
assertEquals(9, aAbsent.b);
assertEquals("y", aAbsent.c);
assertEquals("z", aAbsent.d);
Bucket bAbsent = mapper.readValue(
"{\"a\":8,\"c\":\"y\",\"d\":\"z\"}",
Bucket.class);
assertEquals(8, bAbsent.a);
assertEquals(Bucket.DEFAULT_B, bAbsent.b);
assertEquals("y", bAbsent.c);
assertEquals("z", bAbsent.d);
Bucket cAbsent = mapper.readValue(
"{\"a\":8,\"b\":9,\"d\":\"z\"}",
Bucket.class);
assertEquals(8, cAbsent.a);
assertEquals(9, cAbsent.b);
assertEquals(Bucket.DEFAULT_C, cAbsent.c);
assertEquals("z", cAbsent.d);
Bucket dAbsent = mapper.readValue(
"{\"a\":8,\"b\":9,\"c\":\"y\"}",
Bucket.class);
assertEquals(8, dAbsent.a);
assertEquals(9, dAbsent.b);
assertEquals("y", dAbsent.c);
assertEquals(Bucket.DEFAULT_D, dAbsent.d);
}
// Ensure that 0 is not mistaken for a missing int value.
@Test
public void testPresentZeroPrimitive() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
Bucket aZeroRestAbsent = mapper.readValue(
"{\"a\":0}",
Bucket.class);
assertEquals(0, aZeroRestAbsent.a);
assertEquals(Bucket.DEFAULT_B, aZeroRestAbsent.b);
assertEquals(Bucket.DEFAULT_C, aZeroRestAbsent.c);
assertEquals(Bucket.DEFAULT_D, aZeroRestAbsent.d);
}
// Ensure that null is not mistaken for a missing String value.
@Test
public void testPresentNullReference() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
Bucket cNullRestAbsent = mapper.readValue(
"{\"c\":null}",
Bucket.class);
assertEquals(Bucket.DEFAULT_A, cNullRestAbsent.a);
assertEquals(Bucket.DEFAULT_B, cNullRestAbsent.b);
assertEquals(null, cNullRestAbsent.c);
assertEquals(Bucket.DEFAULT_D, cNullRestAbsent.d);
}
// When we have more than 32 creator parameters, the buffer will use a
// BitSet instead of a primitive int to keep track of which parameters it
// has seen. Ensure that nothing breaks in that case.
@Test
public void testMoreThan32CreatorParams() throws Exception
{
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new BucketModule())
.build();
BigBucket big = mapper.readValue(
"{\"i03\":0,\"i11\":1,\"s05\":null,\"s08\":\"x\"}",
BigBucket.class);
assertEquals(BigBucket.DEFAULT_I, big.i01);
assertEquals(BigBucket.DEFAULT_I, big.i02);
assertEquals(0, big.i03);
assertEquals(BigBucket.DEFAULT_I, big.i04);
assertEquals(BigBucket.DEFAULT_I, big.i05);
assertEquals(BigBucket.DEFAULT_I, big.i06);
assertEquals(BigBucket.DEFAULT_I, big.i07);
assertEquals(BigBucket.DEFAULT_I, big.i08);
assertEquals(BigBucket.DEFAULT_I, big.i09);
assertEquals(BigBucket.DEFAULT_I, big.i10);
assertEquals(1, big.i11);
assertEquals(BigBucket.DEFAULT_I, big.i12);
assertEquals(BigBucket.DEFAULT_I, big.i13);
assertEquals(BigBucket.DEFAULT_I, big.i14);
assertEquals(BigBucket.DEFAULT_I, big.i15);
assertEquals(BigBucket.DEFAULT_I, big.i16);
assertEquals(BigBucket.DEFAULT_S, big.s01);
assertEquals(BigBucket.DEFAULT_S, big.s02);
assertEquals(BigBucket.DEFAULT_S, big.s03);
assertEquals(BigBucket.DEFAULT_S, big.s04);
assertEquals(null, big.s05);
assertEquals(BigBucket.DEFAULT_S, big.s06);
assertEquals(BigBucket.DEFAULT_S, big.s07);
assertEquals("x", big.s08);
assertEquals(BigBucket.DEFAULT_S, big.s09);
assertEquals(BigBucket.DEFAULT_S, big.s10);
assertEquals(BigBucket.DEFAULT_S, big.s11);
assertEquals(BigBucket.DEFAULT_S, big.s12);
assertEquals(BigBucket.DEFAULT_S, big.s13);
assertEquals(BigBucket.DEFAULT_S, big.s14);
assertEquals(BigBucket.DEFAULT_S, big.s15);
assertEquals(BigBucket.DEFAULT_S, big.s16);
}
// [databind#1432]
@Test
public void testClassWith32CreatorParams() throws Exception
{
StringBuilder sb = new StringBuilder()
.append("{\n");
for (int i = 1; i <= 32; ++i) {
sb.append("\"p").append(i).append("\" : \"NotNull")
.append(i).append("\"");
if (i < 32) {
sb.append(",\n");
}
}
sb.append("\n}\n");
String json = sb.toString();
ObjectMapper mapper = jsonMapperBuilder()
.addModule(new ClassWith32Module())
.build();
ClassWith32Props result = mapper.readValue(json, ClassWith32Props.class);
// let's assume couple of first, last ones suffice
assertEquals("NotNull1", result.p1);
assertEquals("NotNull2", result.p2);
assertEquals("NotNull31", result.p31);
assertEquals("NotNull32", result.p32);
}
}
|
ClassWith32Module
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/LoadGraphMergeTest.java
|
{
"start": 1009,
"end": 3863
}
|
class ____ {
private static final Long PARENT_ID_1 = 1L;
private static final Long PARENT_ID_2 = 2L;
@BeforeAll
public static void init(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
GrandChild grandChild = new GrandChild( 1L, "grand child 1" );
Child child = new Child( 1L, grandChild );
Parent parent = new Parent( PARENT_ID_1, child );
entityManager.persist( parent );
GrandChild grandChild2 = new GrandChild( 2L, "grand child 2" );
Child child2 = new Child( 2L, grandChild2 );
Parent parent2 = new Parent( PARENT_ID_2, child2 );
entityManager.persist( parent2 );
}
);
}
@Test
public void testGrandChildHasNotBeenInitializedByMerge(EntityManagerFactoryScope scope) {
Parent parent = scope.fromTransaction( entityManager ->
entityManager.find( LoadGraphMergeTest_.Parent_._parent_child, PARENT_ID_1 )
);
Parent parent2 = scope.fromTransaction( entityManager ->
entityManager.find( LoadGraphMergeTest_.Parent_._parent_child, PARENT_ID_2)
);
scope.inTransaction( entityManager -> {
assertTrue( Hibernate.isInitialized( parent.getChild() ) );
assertFalse( Hibernate.isInitialized( parent.getChild().getGrandChild() ) );
Session session = entityManager.unwrap( Session.class );
Parent mergedParent = session.merge( parent, LoadGraphMergeTest_.Parent_._parent_child );
Child child = mergedParent.getChild();
assertTrue( Hibernate.isInitialized( child ) );
assertFalse( Hibernate.isInitialized( child.getGrandChild() ),
"Merge has initialized `parent.child` lazy association" );
assertTrue( Hibernate.isInitialized( parent2.getChild() ) );
assertFalse( Hibernate.isInitialized( parent2.getChild().getGrandChild() ) );
Parent mergedParent2 = session.merge( parent2 );
Child child2 = mergedParent2.getChild();
assertTrue( Hibernate.isInitialized( child2 ) );
assertTrue( Hibernate.isInitialized( child2.getGrandChild() ) );
} );
}
@Test
public void testChildHasNotBeenInitializedByMerge(EntityManagerFactoryScope scope) {
Parent parent = scope.fromTransaction( entityManager ->
entityManager.find(
Parent.class,
PARENT_ID_1 )
);
scope.inTransaction( entityManager -> {
Child child1 = parent.getChild();
assertFalse( Hibernate.isInitialized( child1 ) );
Session session = entityManager.unwrap( Session.class );
Parent mergedParent = session.merge( parent, LoadGraphMergeTest_.Parent_._parent );
Child child = mergedParent.getChild();
assertFalse( Hibernate.isInitialized( child ),
"Merge has initialized `parent.child` lazy association" );
} );
}
@Entity(name = "Parent")
@NamedEntityGraph(
name = "parent.child",
attributeNodes = @NamedAttributeNode("child")
)
@NamedEntityGraph(
name = "parent"
)
public static
|
LoadGraphMergeTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/messages/FlinkJobNotFoundException.java
|
{
"start": 1032,
"end": 1281
}
|
class ____ extends FlinkException {
private static final long serialVersionUID = 2294698055059659025L;
public FlinkJobNotFoundException(JobID jobId) {
super("Could not find Flink job (" + jobId + ')');
}
}
|
FlinkJobNotFoundException
|
java
|
quarkusio__quarkus
|
integration-tests/cache/src/test/java/io/quarkus/it/cache/TreeITCase.java
|
{
"start": 115,
"end": 157
}
|
class ____ extends TreeTestCase {
}
|
TreeITCase
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/runtime/TupleSerializerUpgradeTest.java
|
{
"start": 1511,
"end": 2471
}
|
class ____
extends TypeSerializerUpgradeTestBase<
Tuple3<String, String, Integer>, Tuple3<String, String, Integer>> {
public Collection<TestSpecification<?, ?>> createTestSpecifications(FlinkVersion flinkVersion)
throws Exception {
ArrayList<TestSpecification<?, ?>> testSpecifications = new ArrayList<>();
testSpecifications.add(
new TestSpecification<>(
"tuple-serializer",
flinkVersion,
TupleSerializerSetup.class,
TupleSerializerVerifier.class));
return testSpecifications;
}
// ----------------------------------------------------------------------------------------------
// Specification for "tuple-serializer"
// ----------------------------------------------------------------------------------------------
public static final
|
TupleSerializerUpgradeTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/promql/TranslatePromqlToTimeSeriesAggregate.java
|
{
"start": 4251,
"end": 16787
}
|
class ____ extends OptimizerRules.OptimizerRule<PromqlCommand> {
public static final Duration DEFAULT_LOOKBACK = Duration.ofMinutes(5);
public TranslatePromqlToTimeSeriesAggregate() {
super(OptimizerRules.TransformDirection.UP);
}
@Override
protected LogicalPlan rule(PromqlCommand promqlCommand) {
// Safety check: this should never occur as the parser should reject PromQL when disabled,
// but we check here as an additional safety measure
if (PromqlFeatures.isEnabled() == false) {
throw new EsqlIllegalArgumentException(
"PromQL translation attempted but feature is disabled. This should have been caught by the parser."
);
}
// Extract the promqlPlan from the container
LogicalPlan promqlPlan = promqlCommand.promqlPlan();
// first replace the Placeholder relation with the child plan
promqlPlan = promqlPlan.transformUp(PlaceholderRelation.class, pr -> withTimestampFilter(promqlCommand, promqlCommand.child()));
// Translate based on plan type by converting the plan bottom-up
return map(promqlCommand, promqlPlan).plan();
}
private static LogicalPlan withTimestampFilter(PromqlCommand promqlCommand, LogicalPlan plan) {
// start and end are either both set or both null
if (promqlCommand.start().value() != null && promqlCommand.end().value() != null) {
Source promqlSource = promqlCommand.source();
Expression timestamp = promqlCommand.timestamp();
plan = new Filter(
promqlSource,
plan,
new And(
promqlSource,
new GreaterThanOrEqual(promqlSource, timestamp, promqlCommand.start()),
new LessThanOrEqual(promqlSource, timestamp, promqlCommand.end())
)
);
}
return plan;
}
private record MapResult(LogicalPlan plan, Map<String, Expression> extras) {}
// Will pattern match on PromQL plan types:
// - AcrossSeriesAggregate -> Aggregate over TimeSeriesAggregate
// - WithinSeriesAggregate -> TimeSeriesAggregate
// - Selector -> EsRelation + Filter
private static MapResult map(PromqlCommand promqlCommand, LogicalPlan p) {
if (p instanceof Selector selector) {
return mapSelector(selector);
}
if (p instanceof PromqlFunctionCall functionCall) {
return mapFunction(promqlCommand, functionCall);
}
throw new QlIllegalArgumentException("Unsupported PromQL plan node: {}", p);
}
private static MapResult mapSelector(Selector selector) {
// Create a placeholder relation to be replaced later
var matchers = selector.labelMatchers();
Expression matcherCondition = translateLabelMatchers(selector.source(), selector.labels(), matchers);
List<Expression> selectorConditions = new ArrayList<>();
// name into is not null
selectorConditions.add(new IsNotNull(selector.source(), selector.series()));
// convert the matchers into a filter expression
if (matcherCondition != null) {
selectorConditions.add(matcherCondition);
}
Map<String, Expression> extras = new HashMap<>();
extras.put("field", selector.series());
// return the condition as filter
LogicalPlan p = new Filter(selector.source(), selector.child(), Predicates.combineAnd(selectorConditions));
return new MapResult(p, extras);
}
private static MapResult mapFunction(PromqlCommand promqlCommand, PromqlFunctionCall functionCall) {
MapResult childResult = map(promqlCommand, functionCall.child());
Map<String, Expression> extras = childResult.extras;
MapResult result;
Expression target = extras.get("field"); // nested expression
if (functionCall instanceof WithinSeriesAggregate withinAggregate) {
// expects selector
Function esqlFunction = PromqlFunctionRegistry.INSTANCE.buildEsqlFunction(
withinAggregate.functionName(),
withinAggregate.source(),
List.of(target, promqlCommand.timestamp())
);
extras.put("field", esqlFunction);
result = new MapResult(childResult.plan, extras);
} else if (functionCall instanceof AcrossSeriesAggregate acrossAggregate) {
List<NamedExpression> aggs = new ArrayList<>();
List<Expression> groupings = new ArrayList<>(acrossAggregate.groupings().size());
Alias stepBucket = createStepBucketAlias(promqlCommand, acrossAggregate);
initAggregatesAndGroupings(acrossAggregate, target, aggs, groupings, stepBucket.toAttribute());
LogicalPlan p = childResult.plan;
p = new Eval(stepBucket.source(), p, List.of(stepBucket));
TimeSeriesAggregate tsAggregate = new TimeSeriesAggregate(acrossAggregate.source(), p, groupings, aggs, null);
p = tsAggregate;
// ToDouble conversion of the metric using an eval to ensure a consistent output type
Alias convertedValue = new Alias(
acrossAggregate.source(),
acrossAggregate.sourceText(),
new ToDouble(acrossAggregate.source(), p.output().getFirst().toAttribute()),
acrossAggregate.valueId()
);
p = new Eval(acrossAggregate.source(), p, List.of(convertedValue));
// Project to maintain the correct output order, as declared in AcrossSeriesAggregate#output:
// [value, step, ...groupings]
List<NamedExpression> projections = new ArrayList<>();
projections.add(convertedValue.toAttribute());
List<Attribute> output = tsAggregate.output();
for (int i = 1; i < output.size(); i++) {
projections.add(output.get(i));
}
p = new Project(acrossAggregate.source(), p, projections);
result = new MapResult(p, extras);
} else {
throw new QlIllegalArgumentException("Unsupported PromQL function call: {}", functionCall);
}
return result;
}
private static void initAggregatesAndGroupings(
AcrossSeriesAggregate acrossAggregate,
Expression target,
List<NamedExpression> aggs,
List<Expression> groupings,
Attribute stepBucket
) {
// main aggregation
Function esqlFunction = PromqlFunctionRegistry.INSTANCE.buildEsqlFunction(
acrossAggregate.functionName(),
acrossAggregate.source(),
List.of(target)
);
Alias value = new Alias(acrossAggregate.source(), acrossAggregate.sourceText(), esqlFunction);
aggs.add(value);
// timestamp/step
aggs.add(stepBucket);
groupings.add(stepBucket);
// additional groupings (by)
for (NamedExpression grouping : acrossAggregate.groupings()) {
aggs.add(grouping);
groupings.add(grouping.toAttribute());
}
}
private static Alias createStepBucketAlias(PromqlCommand promqlCommand, AcrossSeriesAggregate acrossAggregate) {
Expression timeBucketSize;
if (promqlCommand.isRangeQuery()) {
timeBucketSize = promqlCommand.step();
} else {
// use default lookback for instant queries
timeBucketSize = Literal.timeDuration(promqlCommand.source(), DEFAULT_LOOKBACK);
}
Bucket b = new Bucket(
promqlCommand.source(),
promqlCommand.timestamp(),
timeBucketSize,
null,
null,
ConfigurationAware.CONFIGURATION_MARKER
);
return new Alias(b.source(), "step", b, acrossAggregate.stepId());
}
/**
* Translates PromQL label matchers into ESQL filter expressions.
*
* Uses AutomatonUtils to detect optimizable patterns:
* - Exact match β field == "value"
* - Prefix pattern (prefix.*) β field STARTS_WITH "prefix"
* - Suffix pattern (.*suffix) β field ENDS_WITH "suffix"
* - Simple alternation (a|b|c) β field IN ("a", "b", "c")
* - Disjoint prefixes β field STARTS_WITH "p1" OR field STARTS_WITH "p2"
* - Disjoint suffixes β field ENDS_WITH "s1" OR field ENDS_WITH "s2"
* - Complex patterns β field RLIKE "pattern"
*
* @param source the source location for error reporting
* @param labelMatchers the PromQL label matchers to translate
* @return an ESQL Expression combining all label matcher conditions with AND
*/
static Expression translateLabelMatchers(Source source, List<Expression> fields, LabelMatchers labelMatchers) {
List<Expression> conditions = new ArrayList<>();
boolean hasNameMatcher = false;
var matchers = labelMatchers.matchers();
for (int i = 0, s = matchers.size(); i < s; i++) {
LabelMatcher matcher = matchers.get(i);
// special handling for name label
if (LabelMatcher.NAME.equals(matcher.name())) {
hasNameMatcher = true;
} else {
Expression field = fields.get(hasNameMatcher ? i - 1 : i); // adjust index if name matcher was seen
Expression condition = translateLabelMatcher(source, field, matcher);
if (condition != null) {
conditions.add(condition);
}
}
}
// could happen in case of an optimization that removes all matchers
if (conditions.isEmpty()) {
return null;
}
return Predicates.combineAnd(conditions);
}
/**
* Translates a single PromQL label matcher into an ESQL filter expression.
*
* @param source the source location
* @param matcher the label matcher to translate
* @return the ESQL Expression, or null if the matcher matches all or none
*/
private static Expression translateLabelMatcher(Source source, Expression field, LabelMatcher matcher) {
// Check for universal matchers
if (matcher.matchesAll()) {
return Literal.fromBoolean(source, true); // No filter needed (matches everything)
}
if (matcher.matchesNone()) {
// This is effectively FALSE - could use a constant false expression
return Literal.fromBoolean(source, false);
}
// Try to extract exact match
String exactMatch = AutomatonUtils.matchesExact(matcher.automaton());
if (exactMatch != null) {
return new Equals(source, field, Literal.keyword(source, exactMatch));
}
// Try to extract disjoint patterns (handles mixed prefix/suffix/exact)
List<AutomatonUtils.PatternFragment> fragments = AutomatonUtils.extractFragments(matcher.value());
if (fragments != null && fragments.isEmpty() == false) {
return translateDisjointPatterns(source, field, fragments);
}
// Fallback to RLIKE with the full automaton pattern
// Note: We need to ensure the pattern is properly anchored for PromQL semantics
return new RLike(source, field, new RLikePattern(matcher.toString()));
}
/**
* Translates disjoint pattern fragments into optimized ESQL expressions.
*
* Homogeneous patterns (all same type):
* - All EXACT β field IN ("a", "b", "c")
* - All PREFIX β field STARTS_WITH "p1" OR field STARTS_WITH "p2" ...
* - All SUFFIX β field ENDS_WITH "s1" OR field ENDS_WITH "s2" ...
*
* Heterogeneous patterns:
* - Mixed β (field == "exact") OR (field STARTS_WITH "prefix") OR (field ENDS_WITH "suffix") OR (field RLIKE "regex")
*
* Fragments are sorted by type for optimal query execution order:
* 1. EXACT (most selective, can use IN clause)
* 2. PREFIX (index-friendly)
* 3. SUFFIX (index-friendly)
* 4. REGEX (least selective, fallback)
*
* @param source the source location
* @param field the field attribute
* @param fragments the list of pattern fragments
* @return the ESQL Expression combining all fragments
*/
private static Expression translateDisjointPatterns(Source source, Expression field, List<AutomatonUtils.PatternFragment> fragments) {
// Sort fragments by type priority using
|
TranslatePromqlToTimeSeriesAggregate
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/entities/components/Component1.java
|
{
"start": 269,
"end": 1301
}
|
class ____ {
private String str1;
private String str2;
public Component1(String str1, String str2) {
this.str1 = str1;
this.str2 = str2;
}
public Component1() {
}
public String getStr2() {
return str2;
}
public void setStr2(String str2) {
this.str2 = str2;
}
public String getStr1() {
return str1;
}
public void setStr1(String str1) {
this.str1 = str1;
}
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof Component1) ) {
return false;
}
Component1 that = (Component1) o;
if ( str1 != null ? !str1.equals( that.str1 ) : that.str1 != null ) {
return false;
}
if ( str2 != null ? !str2.equals( that.str2 ) : that.str2 != null ) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (str1 != null ? str1.hashCode() : 0);
result = 31 * result + (str2 != null ? str2.hashCode() : 0);
return result;
}
public String toString() {
return "Comp1(str1 = " + str1 + ", " + str2 + ")";
}
}
|
Component1
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryLambdaTest.java
|
{
"start": 4624,
"end": 5089
}
|
class ____ {
private void f(String x) {
System.err.println(x);
}
void g() {
Consumer<String> f = this::f;
f("world");
}
}
""")
.doTest();
}
@Test
public void variable_instance() {
testHelper
.addInputLines(
"Test.java",
"""
import java.util.function.Function;
|
Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java
|
{
"start": 1997,
"end": 2645
}
|
class ____ {
public String getMessage(boolean b) {
if (b) {
// BUG: Diagnostic contains: @Nullable
return (null);
} else {
return "negative";
}
}
}
""")
.doTest();
}
@Test
public void assignmentOfLiteralNullReturn() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
public
|
LiteralNullReturnTest
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/function/client/WebClientResponseException.java
|
{
"start": 15825,
"end": 16379
}
|
class ____ extends WebClientResponseException {
UnprocessableContent(
String statusText, HttpHeaders headers, byte[] body, @Nullable Charset charset,
@Nullable HttpRequest request) {
super(HttpStatus.UNPROCESSABLE_CONTENT, statusText, headers, body, charset, request);
}
}
/**
* {@link WebClientResponseException} for status HTTP 422 Unprocessable Entity.
* @since 5.1
* @deprecated since 7.0 in favor of {@link UnprocessableContent}
*/
@SuppressWarnings("serial")
@Deprecated(since = "7.0")
public static
|
UnprocessableContent
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/util/RegularToMutableObjectIterator.java
|
{
"start": 996,
"end": 2028
}
|
class ____<T> implements MutableObjectIterator<T> {
private final Iterator<T> iterator;
private final TypeSerializer<T> serializer;
public RegularToMutableObjectIterator(Iterator<T> iterator, TypeSerializer<T> serializer) {
this.iterator = iterator;
this.serializer = serializer;
}
@Override
public T next(T reuse) {
// -----------------------------------------------------------------------------------------
// IMPORTANT: WE NEED TO COPY INTO THE REUSE OBJECT TO SIMULATE THE MUTABLE OBJECT RUNTIME
// -----------------------------------------------------------------------------------------
if (this.iterator.hasNext()) {
return this.serializer.copy(this.iterator.next(), reuse);
} else {
return null;
}
}
@Override
public T next() {
if (this.iterator.hasNext()) {
return this.iterator.next();
} else {
return null;
}
}
}
|
RegularToMutableObjectIterator
|
java
|
dropwizard__dropwizard
|
dropwizard-health/src/main/java/io/dropwizard/health/HealthCheckType.java
|
{
"start": 38,
"end": 84
}
|
enum ____ {
ALIVE,
READY
}
|
HealthCheckType
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleBigQueryEndpointBuilderFactory.java
|
{
"start": 6823,
"end": 9392
}
|
interface ____ {
/**
* Google BigQuery (camel-google-bigquery)
* Google BigQuery data warehouse for analytics.
*
* Category: cloud,bigdata
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-google-bigquery
*
* @return the dsl builder for the headers' name.
*/
default GoogleBigQueryHeaderNameBuilder googleBigquery() {
return GoogleBigQueryHeaderNameBuilder.INSTANCE;
}
/**
* Google BigQuery (camel-google-bigquery)
* Google BigQuery data warehouse for analytics.
*
* Category: cloud,bigdata
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-google-bigquery
*
* Syntax: <code>google-bigquery:projectId:datasetId:tableId</code>
*
* Path parameter: projectId (required)
* Google Cloud Project Id
*
* Path parameter: datasetId (required)
* BigQuery Dataset Id
*
* Path parameter: tableId
* BigQuery table id
*
* @param path projectId:datasetId:tableId
* @return the dsl builder
*/
default GoogleBigQueryEndpointBuilder googleBigquery(String path) {
return GoogleBigQueryEndpointBuilderFactory.endpointBuilder("google-bigquery", path);
}
/**
* Google BigQuery (camel-google-bigquery)
* Google BigQuery data warehouse for analytics.
*
* Category: cloud,bigdata
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-google-bigquery
*
* Syntax: <code>google-bigquery:projectId:datasetId:tableId</code>
*
* Path parameter: projectId (required)
* Google Cloud Project Id
*
* Path parameter: datasetId (required)
* BigQuery Dataset Id
*
* Path parameter: tableId
* BigQuery table id
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path projectId:datasetId:tableId
* @return the dsl builder
*/
default GoogleBigQueryEndpointBuilder googleBigquery(String componentName, String path) {
return GoogleBigQueryEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Google BigQuery component.
*/
public static
|
GoogleBigQueryBuilders
|
java
|
micronaut-projects__micronaut-core
|
http-server-netty/src/main/java/io/micronaut/http/server/netty/CompositeNettyServerCustomizer.java
|
{
"start": 823,
"end": 1805
}
|
class ____
extends AbstractCompositeCustomizer<NettyServerCustomizer, NettyServerCustomizer.ChannelRole>
implements NettyServerCustomizer {
private CompositeNettyServerCustomizer(List<NettyServerCustomizer> members) {
super(members);
}
CompositeNettyServerCustomizer() {
super();
}
@Override
protected NettyServerCustomizer specializeForChannel(NettyServerCustomizer member, Channel channel, ChannelRole role) {
return member.specializeForChannel(channel, role);
}
@Override
protected NettyServerCustomizer makeNewComposite(List<NettyServerCustomizer> members) {
return new CompositeNettyServerCustomizer(members);
}
@Override
public void onInitialPipelineBuilt() {
forEach(NettyServerCustomizer::onInitialPipelineBuilt);
}
@Override
public void onStreamPipelineBuilt() {
forEach(NettyServerCustomizer::onStreamPipelineBuilt);
}
}
|
CompositeNettyServerCustomizer
|
java
|
spring-projects__spring-boot
|
module/spring-boot-hateoas/src/test/java/org/springframework/boot/hateoas/autoconfigure/HypermediaWebMvcTestIntegrationTests.java
|
{
"start": 1528,
"end": 1936
}
|
class ____ {
@Autowired
private MockMvcTester mvc;
@Test
void plainResponse() {
assertThat(this.mvc.get().uri("/hateoas/plain")).hasContentType("application/json");
}
@Test
void hateoasResponse() {
assertThat(this.mvc.get().uri("/hateoas/resource")).hasContentType("application/hal+json");
}
@SpringBootConfiguration
@Import(HateoasController.class)
static
|
HypermediaWebMvcTestIntegrationTests
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/GsonBuilderTest.java
|
{
"start": 4441,
"end": 4986
}
|
class ____ {
static final String NO_ARG_CONSTRUCTOR_VALUE = "default instance";
final String s;
public CustomClass3(String s) {
this.s = s;
}
public CustomClass3() {
this(NO_ARG_CONSTRUCTOR_VALUE);
}
}
@Test
public void testExcludeFieldsWithModifiers() {
Gson gson =
new GsonBuilder().excludeFieldsWithModifiers(Modifier.VOLATILE, Modifier.PRIVATE).create();
assertThat(gson.toJson(new HasModifiers())).isEqualTo("{\"d\":\"d\"}");
}
@SuppressWarnings("unused")
static
|
CustomClass3
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metamodel/attributeInSuper/WorkOrderComponent.java
|
{
"start": 344,
"end": 642
}
|
class ____ {
@EmbeddedId
private WorkOrderComponentId id;
@ManyToOne
@JoinColumn(name = "WORK_ORDER", nullable = false, insertable = false, updatable = false)
@JoinColumn(name = "PLANT_ID", nullable = false, insertable = false, updatable = false)
private WorkOrder workOrder;
}
|
WorkOrderComponent
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/engine/support/descriptor/DemoMethodTestDescriptor.java
|
{
"start": 863,
"end": 1760
}
|
class ____ extends AbstractTestDescriptor {
private final Method testMethod;
public DemoMethodTestDescriptor(UniqueId uniqueId, Method testMethod) {
super(uniqueId,
"%s(%s)".formatted(Preconditions.notNull(testMethod, "Method must not be null").getName(),
ClassUtils.nullSafeToString(Class::getSimpleName, testMethod.getParameterTypes())),
MethodSource.from(testMethod));
this.testMethod = testMethod;
}
@Override
public Set<TestTag> getTags() {
Set<TestTag> methodTags = findRepeatableAnnotations(this.testMethod, Tag.class).stream() //
.map(Tag::value) //
.filter(TestTag::isValid) //
.map(TestTag::create) //
.collect(toCollection(LinkedHashSet::new));
getParent().ifPresent(parentDescriptor -> methodTags.addAll(parentDescriptor.getTags()));
return methodTags;
}
@Override
public Type getType() {
return Type.TEST;
}
}
|
DemoMethodTestDescriptor
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/client/reactive/HttpComponentsClientHttpConnector.java
|
{
"start": 4716,
"end": 5736
}
|
class ____ implements FutureCallback<Message<HttpResponse, Publisher<ByteBuffer>>> {
private final MonoSink<ClientHttpResponse> sink;
private final DataBufferFactory dataBufferFactory;
private final HttpClientContext context;
public ResponseCallback(MonoSink<ClientHttpResponse> sink,
DataBufferFactory dataBufferFactory, HttpClientContext context) {
this.sink = sink;
this.dataBufferFactory = dataBufferFactory;
this.context = context;
}
@Override
public void completed(Message<HttpResponse, Publisher<ByteBuffer>> result) {
this.sink.success(new HttpComponentsClientHttpResponse(this.dataBufferFactory, result, this.context));
}
@Override
public void failed(Exception ex) {
this.sink.error(ex instanceof HttpStreamResetException && ex.getCause() != null ? ex.getCause() : ex);
}
@Override
public void cancelled() {
this.sink.error(new CancellationException());
}
}
/**
* Callback that invoked when a request is executed.
*/
private static
|
ResponseCallback
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
|
{
"start": 30862,
"end": 38299
}
|
class ____ its superclasses to serialize.
private static Field[] getCachedFields(Class<?> recordClass) {
return FIELDS_CACHE.computeIfAbsent(recordClass, rc -> getFields(rc, true));
}
private static Field[] getFields(Class<?> recordClass, boolean excludeJava) {
Field[] fieldsList;
Map<String, Field> fields = new LinkedHashMap<>();
Class<?> c = recordClass;
do {
if (excludeJava && c.getPackage() != null && c.getPackage().getName().startsWith("java."))
break; // skip java built-in classes
Field[] declaredFields = c.getDeclaredFields();
Arrays.sort(declaredFields, Comparator.comparing(Field::getName));
for (Field field : declaredFields)
if ((field.getModifiers() & (Modifier.TRANSIENT | Modifier.STATIC)) == 0)
if (fields.put(field.getName(), field) != null)
throw new AvroTypeException(c + " contains two fields named: " + field);
c = c.getSuperclass();
} while (c != null);
fieldsList = fields.values().toArray(new Field[0]);
return fieldsList;
}
/** Create a schema for a field. */
protected Schema createFieldSchema(Field field, Map<String, Schema> names) {
AvroEncode enc = ReflectionUtil.getAvroEncode(field);
if (enc != null)
try {
return enc.using().getDeclaredConstructor().newInstance().getSchema();
} catch (Exception e) {
throw new AvroRuntimeException("Could not create schema from custom serializer for " + field.getName());
}
AvroSchema explicit = field.getAnnotation(AvroSchema.class);
if (explicit != null) // explicit schema
return new Schema.Parser().parse(explicit.value());
Union union = field.getAnnotation(Union.class);
if (union != null)
return getAnnotatedUnion(union, names);
Schema schema = createSchema(field.getGenericType(), names);
if (field.isAnnotationPresent(Stringable.class)) { // Stringable
schema = Schema.create(Schema.Type.STRING);
}
if (field.isAnnotationPresent(Nullable.class)) // nullable
schema = makeNullable(schema);
return schema;
}
/**
* Return the protocol for a Java interface.
*
* <p>
* The correct name of the method parameters needs the <code>-parameters</code>
* java compiler argument. More info at https://openjdk.java.net/jeps/118
*/
@Override
public Protocol getProtocol(Class iface) {
Protocol protocol = new Protocol(simpleName(iface), iface.getPackage() == null ? "" : iface.getPackage().getName());
Map<String, Schema> names = new LinkedHashMap<>();
Map<String, Message> messages = protocol.getMessages();
Map<TypeVariable<?>, Type> genericTypeVariableMap = ReflectionUtil.resolveTypeVariables(iface);
for (Method method : iface.getMethods()) {
if ((method.getModifiers() & Modifier.STATIC) == 0) {
String name = method.getName();
if (messages.containsKey(name))
throw new AvroTypeException("Two methods with same name: " + name);
messages.put(name, getMessage(method, protocol, names, genericTypeVariableMap));
}
}
protocol.setTypes(new ArrayList<>(names.values()));
return protocol;
}
private Message getMessage(Method method, Protocol protocol, Map<String, Schema> names,
Map<? extends Type, Type> genericTypeMap) {
List<Schema.Field> fields = new ArrayList<>();
for (Parameter parameter : method.getParameters()) {
Schema paramSchema = getSchema(genericTypeMap.getOrDefault(parameter.getParameterizedType(), parameter.getType()),
names);
for (Annotation annotation : parameter.getAnnotations()) {
if (annotation instanceof AvroSchema) // explicit schema
paramSchema = new Schema.Parser().parse(((AvroSchema) annotation).value());
else if (annotation instanceof Union) // union
paramSchema = getAnnotatedUnion(((Union) annotation), names);
else if (annotation instanceof Nullable) // nullable
paramSchema = makeNullable(paramSchema);
}
fields.add(new Schema.Field(unmangle(parameter.getName()), paramSchema, null /* doc */, null));
}
Schema request = Schema.createRecord(fields);
Type genericReturnType = method.getGenericReturnType();
Type returnType = genericTypeMap.getOrDefault(genericReturnType, genericReturnType);
Union union = method.getAnnotation(Union.class);
Schema response = union == null ? getSchema(returnType, names) : getAnnotatedUnion(union, names);
if (method.isAnnotationPresent(Nullable.class)) // nullable
response = makeNullable(response);
AvroSchema explicit = method.getAnnotation(AvroSchema.class);
if (explicit != null) // explicit schema
response = new Schema.Parser().parse(explicit.value());
List<Schema> errs = new ArrayList<>();
errs.add(Protocol.SYSTEM_ERROR); // every method can throw
for (Type err : method.getGenericExceptionTypes())
errs.add(getSchema(err, names));
Schema errors = Schema.createUnion(errs);
return protocol.createMessage(method.getName(), null /* doc */, Collections.emptyMap() /* propMap */, request,
response, errors);
}
private Schema getSchema(Type type, Map<String, Schema> names) {
try {
return createSchema(type, names);
} catch (AvroTypeException e) { // friendly exception
throw new AvroTypeException("Error getting schema for " + type + ": " + e.getMessage(), e);
}
}
@Override
protected int compare(Object o1, Object o2, Schema s, boolean equals) {
switch (s.getType()) {
case ARRAY:
if (!o1.getClass().isArray())
break;
Schema elementType = s.getElementType();
int l1 = java.lang.reflect.Array.getLength(o1);
int l2 = java.lang.reflect.Array.getLength(o2);
int l = Math.min(l1, l2);
for (int i = 0; i < l; i++) {
int compare = compare(java.lang.reflect.Array.get(o1, i), java.lang.reflect.Array.get(o2, i), elementType,
equals);
if (compare != 0)
return compare;
}
return Integer.compare(l1, l2);
case BYTES:
if (!o1.getClass().isArray())
break;
byte[] b1 = (byte[]) o1;
byte[] b2 = (byte[]) o2;
return BinaryData.compareBytes(b1, 0, b1.length, b2, 0, b2.length);
}
return super.compare(o1, o2, s, equals);
}
@Override
protected Object getRecordState(Object record, Schema schema) {
return getFieldAccessors(record.getClass(), schema);
}
private void consumeAvroAliasAnnotation(Class<?> c, Schema schema) {
AvroAlias[] aliases = c.getAnnotationsByType(AvroAlias.class);
for (AvroAlias alias : aliases) {
String space = alias.space();
if (AvroAlias.NULL.equals(space))
space = null;
schema.addAlias(alias.alias(), space);
}
}
private void consumeFieldAlias(Field field, Schema.Field recordField) {
AvroAlias[] aliases = field.getAnnotationsByType(AvroAlias.class);
for (AvroAlias alias : aliases) {
if (!alias.space().equals(AvroAlias.NULL)) {
throw new AvroRuntimeException(
"Namespaces are not allowed on field aliases. " + "Offending field: " + recordField.name());
}
recordField.addAlias(alias.alias());
}
}
@Override
public Object createFixed(Object old, Schema schema) {
// SpecificData will try to instantiate the type returned by getClass, but
// that is the converted
|
and
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/LocalDateTimeMappingTests.java
|
{
"start": 2033,
"end": 2420
}
|
class ____ {
@Id
private Integer id;
//tag::basic-localDateTime-example[]
// mapped as TIMESTAMP
private LocalDateTime localDateTime;
//end::basic-localDateTime-example[]
public EntityWithLocalDateTime() {
}
public EntityWithLocalDateTime(Integer id, LocalDateTime localDateTime) {
this.id = id;
this.localDateTime = localDateTime;
}
}
}
|
EntityWithLocalDateTime
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.