language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/util/ReflectionUtilsTests.java | {
"start": 73502,
"end": 74006
} | class ____ extends SuperclassWithInstanceMethods {
// foo() is now special.
@Special
@Override
void foo() {
}
// No longer special.
// Simulates overriding a @Test method without redeclaring @Test.
@Override
void specialFoo(char ch) {
}
// No longer special.
// Simulates overriding a @TestFactory method without redeclaring @TestFactory.
@Override
String specialBaz() {
return super.specialBaz();
}
}
}
@Nested
| SubclassWithOverriddenInstanceMethods |
java | apache__avro | lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/Test.java | {
"start": 113909,
"end": 121061
} | enum ____ with the given numeric wire value.
*/
public static N forNumber(int value) {
switch (value) {
case 1:
return A;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<N> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<N> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<N>() {
public N findValueByNumber(int number) {
return N.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.M.getDescriptor().getEnumTypes().get(0);
}
private static final N[] VALUES = values();
public static N valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private N(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.noopt.M.N)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1)
return true;
if (isInitialized == 0)
return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1)
return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.avro.protobuf.noopt.Test.M)) {
return super.equals(obj);
}
org.apache.avro.protobuf.noopt.Test.M other = (org.apache.avro.protobuf.noopt.Test.M) obj;
if (!getUnknownFields().equals(other.getUnknownFields()))
return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseDelimitedFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessage.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.avro.protobuf.noopt.Test.M prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* a nested enum
* </pre>
*
* Protobuf type {@code org.apache.avro.protobuf.noopt.M}
*/
public static final | associated |
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/typeutils/serializers/python/ArrayDataSerializer.java | {
"start": 1980,
"end": 5022
} | class ____
extends org.apache.flink.table.runtime.typeutils.ArrayDataSerializer {
private static final long serialVersionUID = 1L;
private final LogicalType elementType;
private final TypeSerializer elementTypeSerializer;
private final ArrayData.ElementGetter elementGetter;
private final int elementSize;
private final BinaryArrayWriter.NullSetter nullSetter;
public ArrayDataSerializer(LogicalType eleType, TypeSerializer elementTypeSerializer) {
super(eleType);
this.elementType = eleType;
this.elementTypeSerializer = elementTypeSerializer;
this.elementSize = BinaryArrayData.calculateFixLengthPartSize(this.elementType);
this.elementGetter = ArrayData.createElementGetter(elementType);
this.nullSetter = BinaryArrayWriter.createNullSetter(eleType);
}
@Override
public void serialize(ArrayData array, DataOutputView target) throws IOException {
int len = array.size();
target.writeInt(len);
for (int i = 0; i < len; i++) {
if (array.isNullAt(i)) {
target.writeBoolean(false);
} else {
target.writeBoolean(true);
Object element = elementGetter.getElementOrNull(array, i);
elementTypeSerializer.serialize(element, target);
}
}
}
@Override
public ArrayData deserialize(DataInputView source) throws IOException {
BinaryArrayData array = new BinaryArrayData();
deserializeInternal(source, array);
return array;
}
@Override
public ArrayData deserialize(ArrayData reuse, DataInputView source) throws IOException {
return deserializeInternal(source, toBinaryArray(reuse));
}
private ArrayData deserializeInternal(DataInputView source, BinaryArrayData array)
throws IOException {
int len = source.readInt();
BinaryArrayWriter writer = new BinaryArrayWriter(array, len, elementSize);
for (int i = 0; i < len; i++) {
boolean isNonNull = source.readBoolean();
if (isNonNull) {
Object element = elementTypeSerializer.deserialize(source);
BinaryWriter.write(writer, i, element, elementType, elementTypeSerializer);
} else {
nullSetter.setNull(writer, i);
}
}
writer.complete();
return array;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
serialize(deserialize(source), target);
}
@Override
public TypeSerializer<ArrayData> duplicate() {
return new ArrayDataSerializer(elementType, elementTypeSerializer);
}
@Override
public TypeSerializerSnapshot<ArrayData> snapshotConfiguration() {
return new ArrayDataSerializerSnapshot(elementType, elementTypeSerializer);
}
/** {@link TypeSerializerSnapshot} for {@link ArrayDataSerializer}. */
public static final | ArrayDataSerializer |
java | google__guice | core/test/com/google/inject/ImplicitBindingTest.java | {
"start": 14429,
"end": 14507
} | class ____ {
@Inject B1 b;
@Inject Unresolved unresolved;
}
static | D1 |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/TwilioEndpointBuilderFactory.java | {
"start": 31380,
"end": 32197
} | interface ____
extends
TwilioEndpointConsumerBuilder,
TwilioEndpointProducerBuilder {
default AdvancedTwilioEndpointBuilder advanced() {
return (AdvancedTwilioEndpointBuilder) this;
}
/**
* Sets the name of a parameter to be passed in the exchange In Body.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param inBody the value to set
* @return the dsl builder
*/
default TwilioEndpointBuilder inBody(String inBody) {
doSetProperty("inBody", inBody);
return this;
}
}
/**
* Advanced builder for endpoint for the Twilio component.
*/
public | TwilioEndpointBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/LittleGenius.java | {
"start": 523,
"end": 592
} | class ____ extends Child {
public String particularSkill;
}
| LittleGenius |
java | redisson__redisson | redisson/src/main/java/org/redisson/spring/support/RedissonLiveObjectRegistrationDefinitionParser.java | {
"start": 2085,
"end": 2595
} | class ____.", ex);
}
builder.addPropertyValue("targetObject", new RuntimeBeanReference(
helper.getAttribute(element,
RedissonNamespaceParserSupport.LIVE_OBJECT_SERVICE_REF_ATTRIBUTE)));
builder.addPropertyValue("targetMethod", "registerClass");
builder.addPropertyValue("arguments", new Object[] {apiClass});
}
@Override
protected Class<?> getBeanClass(Element element) {
return BeanMethodInvoker.class;
}
}
| path |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedAction.java | {
"start": 4408,
"end": 5097
} | class ____ from TransportMasterNodeAction for cluster state observing purposes.
The stop datafeed api also redirect the elected master node.
The master node will wait for the datafeed to be started by checking the persistent task's status and then return.
To ensure that a subsequent stop datafeed call will see that same task status (and sanity validation doesn't fail)
both start and stop datafeed apis redirect to the elected master node.
In case of instability persistent tasks checks may fail and that is ok, in that case all bets are off.
The start datafeed api is a low through put api, so the fact that we redirect to elected master node shouldn't be an issue.
*/
public | extends |
java | grpc__grpc-java | xds/src/test/java/io/grpc/xds/RingHashLoadBalancerTest.java | {
"start": 56398,
"end": 56938
} | class ____ extends SocketAddress {
private final String name;
FakeSocketAddress(String name) {
this.name = name;
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FakeSocketAddress)) {
return false;
}
return name.equals(((FakeSocketAddress) other).name);
}
@Override
public String toString() {
return "FakeSocketAddress-" + name;
}
}
private | FakeSocketAddress |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindySimpleKeyValuePairSortedMarshallTest.java | {
"start": 3420,
"end": 3794
} | class ____ extends RouteBuilder {
BindyKeyValuePairDataFormat kvpBindyDataFormat
= new BindyKeyValuePairDataFormat(org.apache.camel.dataformat.bindy.model.fix.sorted.body.Order.class);
@Override
public void configure() {
from(URI_DIRECT_START).marshal(kvpBindyDataFormat).to(URI_MOCK_RESULT);
}
}
}
| ContextConfig |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/completable/CompletableError.java | {
"start": 783,
"end": 1076
} | class ____ extends Completable {
final Throwable error;
public CompletableError(Throwable error) {
this.error = error;
}
@Override
protected void subscribeActual(CompletableObserver observer) {
EmptyDisposable.error(error, observer);
}
}
| CompletableError |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SlidingWindowHdrHistogram.java | {
"start": 1682,
"end": 12300
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
SlidingWindowHdrHistogram.class);
private static final int PERCENTILE_50 = 50;
private static final int PERCENTILE_90 = 90;
private static final int PERCENTILE_99 = 99;
// Configuration
private final long windowSizeMillis; // Total analysis window
private final long timeSegmentDurationMillis; // Subdivision on analysis window
private final int numSegments;
private final long highestTrackableValue;
private final int significantFigures;
// Ring buffer of immutable snapshots for completed time segments
private final Histogram[] completedSegments;
private final AtomicInteger currentIndex = new AtomicInteger(0);
// Active Time Segment
private volatile Recorder activeSegmentRecorder;
private Histogram currentSegmentAccumulation;
private volatile long currentSegmentStartMillis;
private final AtomicLong currentTotalCount = new AtomicLong(0L);
// Synchronization
// Writers never take locks. Readers (queries) and rotation use this lock
// to mutate currentAccumulation and ring-buffer pointers safely.
private final ReentrantLock rotateLock = new ReentrantLock();
// Reusable temp histograms to minimize allocations
private Histogram tmpForDelta;
private Histogram tmpForMerge;
private final AbfsRestOperationType operationType;
private boolean isAnalysisWindowFilled = false;
private int minSampleSize;
private double tailLatencyPercentile;
private int tailLatencyMinDeviation;
private double p50 = ZERO_D;
private double p90 = ZERO_D;
private double p99 = ZERO_D;
private double tailLatency = ZERO_D;
private int deviation = ZERO;
public SlidingWindowHdrHistogram(long windowSizeMillis,
int numberOfSegments,
int minSampleSize,
int tailLatencyPercentile,
int tailLatencyMinDeviation,
long highestTrackableValue,
int significantFigures,
final AbfsRestOperationType operationType) {
if (windowSizeMillis <= ZERO) {
throw new IllegalArgumentException("windowSizeMillis > 0");
}
if (numberOfSegments <= ZERO) {
throw new IllegalArgumentException("numberOfSegments > 0");
}
if (highestTrackableValue <= ZERO) {
throw new IllegalArgumentException("highestTrackableValue > 0");
}
if (significantFigures < 1 || significantFigures > 5) {
throw new IllegalArgumentException("significantFigures in [1,5]");
}
this.windowSizeMillis = windowSizeMillis;
this.numSegments = numberOfSegments;
this.timeSegmentDurationMillis = windowSizeMillis / numberOfSegments;
this.highestTrackableValue = highestTrackableValue;
this.significantFigures = significantFigures;
this.operationType = operationType;
this.minSampleSize = minSampleSize;
this.tailLatencyPercentile = adjustPercentile(tailLatencyPercentile);
this.tailLatencyMinDeviation = tailLatencyMinDeviation; // 5ms
this.completedSegments = new Histogram[numSegments];
long now = System.currentTimeMillis();
this.currentSegmentStartMillis = alignToSegmentDuration(now);
currentIndex.set(0);
this.activeSegmentRecorder = new Recorder(highestTrackableValue,
significantFigures);
this.currentSegmentAccumulation = new Histogram(highestTrackableValue,
significantFigures);
this.tmpForDelta = new Histogram(highestTrackableValue, significantFigures);
this.tmpForMerge = new Histogram(highestTrackableValue, significantFigures);
LOG.debug(
"[{}] Initialized SlidingWindowHdrHistogram with WindowSize {}, TimeSegmentDur: {}, "
+ "NumOfSegments: {}", operationType, windowSizeMillis, timeSegmentDurationMillis,
numSegments);
}
/**
* Record a single latency value (in your chosen time unit). Thread-safe and lock-free.
* @param value latency value to record
*/
public void recordValue(long value) {
if (value < 0 || value > highestTrackableValue) {
LOG.warn("[{}] Value {} outside of range [0, {}]. Ignoring",
operationType, value, highestTrackableValue);
return;
}
activeSegmentRecorder.recordValue(value);
currentTotalCount.incrementAndGet();
LOG.debug("[{}] Recorded latency value: {}. Current total count: {}",
operationType, value, currentTotalCount.get());
}
/**
* Get any percentile over the current sliding window.
*/
public void computeLatency() {
if (getCurrentTotalCount() < minSampleSize) {
LOG.debug(
"[{}] Not enough data to report percentiles. Current total count: {}",
operationType, getCurrentTotalCount());
return;
} else {
rotateLock.lock();
try {
tmpForMerge.reset();
for (int i = 0; i < numSegments; i++) {
Histogram h = completedSegments[i];
if (h != null && h.getTotalCount() > 0) {
tmpForMerge.add(h);
}
}
if (tmpForMerge.getTotalCount() == 0) {
return;
}
tailLatency = tmpForMerge.getValueAtPercentile(tailLatencyPercentile);
p50 = tmpForMerge.getValueAtPercentile(PERCENTILE_50);
p90 = tmpForMerge.getValueAtPercentile(PERCENTILE_90);
p99 = tmpForMerge.getValueAtPercentile(PERCENTILE_99);
if (p50 == ZERO || tailLatency < p50) {
deviation = ZERO;
} else {
deviation = (int) ((tailLatency - p50) / p50 * HUNDRED);
}
} finally {
rotateLock.unlock();
}
}
LOG.debug(
"[{}] Computed Latencies. p50: {}, p90: {}, p99: {}, tailLatency: {}, "
+ "deviation with p50: {} Current total count: {}",
operationType, p50, p90, p99, tailLatency, deviation,
getCurrentTotalCount());
}
private long alignToSegmentDuration(long timeMs) {
return timeMs - (timeMs % timeSegmentDurationMillis);
}
/**
* Ensure active bucket is aligned to current time; rotate if we've crossed a boundary.
*/
public void rotateIfNeeded() {
LOG.debug("[{}] Triggering Histogram Rotation", operationType);
long expectedStart = alignToSegmentDuration(System.currentTimeMillis());
if (expectedStart == currentSegmentStartMillis) {
LOG.debug(
"[{}] Current Time Segment Still Active at {}. Skipping Rotation",
operationType, expectedStart);
return; // still current
}
rotateLock.lock();
try {
// Re-check inside lock
expectedStart = alignToSegmentDuration(System.currentTimeMillis());
if (expectedStart == currentSegmentStartMillis) {
return;
}
// Finalize the current bucket:
// Pull any remaining deltas from active recorder and add to currentAccumulation
tmpForDelta.reset();
activeSegmentRecorder.getIntervalHistogramInto(tmpForDelta);
currentSegmentAccumulation.add(tmpForDelta);
if (currentSegmentAccumulation.getTotalCount() <= ZERO) {
currentSegmentStartMillis = alignToSegmentDuration(
System.currentTimeMillis());
LOG.debug(
"[{}] No data recorded in current time segment at {}. Skipping Rotation. Current Index is {}.",
operationType, currentSegmentStartMillis, currentIndex.get());
return;
}
LOG.debug(
"[{}] Rotating current segment with total count {} into slot {}",
operationType, currentSegmentAccumulation.getTotalCount(),
currentIndex.get());
// Place the finished currentAccumulation into the ring buffer slot ahead.
int currentIdx = (currentIndex.getAndIncrement()) % numSegments;
// Next slot is now going to be eradicated. Remove its count from total.
currentTotalCount.set(
currentTotalCount.get() - (completedSegments[currentIdx] == null
? ZERO
: completedSegments[currentIdx].getTotalCount()));
// Store an immutable snapshot (make sure we don't mutate the instance after storing)
completedSegments[currentIdx] = currentSegmentAccumulation;
currentSegmentStartMillis = alignToSegmentDuration(
System.currentTimeMillis());
// Start a fresh current bucket
currentSegmentAccumulation = new Histogram(highestTrackableValue,
significantFigures);
activeSegmentRecorder = new Recorder(highestTrackableValue,
significantFigures);
if (currentIndex.get() >= numSegments) {
LOG.debug("[{}] Analysis window is now filled", operationType);
isAnalysisWindowFilled = true;
// Prevent overflow of currentIndex
currentIndex.set(currentIndex.get() % numSegments);
}
LOG.debug(
"[{}] Completed rotation. New current index {}, New segment start time {}, New total count {}",
operationType, currentIndex.get(), currentSegmentStartMillis,
currentTotalCount.get());
} finally {
rotateLock.unlock();
}
}
/**
* If percentile is configured to more than 100, adjust it to a decimal value.
* @param number configured percentile
* @return adjusted percentile
*/
public static double adjustPercentile(int number) {
if (number <= HUNDRED) {
return number; // No change for numbers ≤ 100
}
String numStr = String.valueOf(number);
String withDecimal = numStr.substring(0, 2) + "." + numStr.substring(2);
return Double.parseDouble(withDecimal);
}
@VisibleForTesting
public double getTailLatency() {
LOG.debug(
"[{}] Getting Tail Latency. Current total count: {}, Deviation: {}%, "
+ "p50: {}, Tail Latency: {}, isAnalysisWindowFilled: {}",
operationType, getCurrentTotalCount(), deviation, p50, tailLatency,
isAnalysisWindowFilled);
if (!isAnalysisWindowFilled()) {
LOG.debug(
"[{}] Analysis window not yet filled. Not reporting tail latency",
operationType);
return ZERO_D;
}
if (deviation < tailLatencyMinDeviation) {
LOG.debug(
"[{}] Tail latency deviation {}% is less than minimum required {}%. Not reporting tail latency",
operationType, deviation, tailLatencyMinDeviation);
return ZERO_D;
}
return tailLatency;
}
@VisibleForTesting
public long getCurrentTotalCount() {
return currentTotalCount.get();
}
@VisibleForTesting
public int getCurrentIndex() {
return currentIndex.get();
}
@VisibleForTesting
public double getP50() {
return p50;
}
@VisibleForTesting
public boolean isAnalysisWindowFilled() {
return isAnalysisWindowFilled;
}
}
| SlidingWindowHdrHistogram |
java | spring-projects__spring-framework | spring-context/src/testFixtures/java/org/springframework/context/testfixture/context/annotation/AutowiredCglibConfiguration.java | {
"start": 943,
"end": 1123
} | class ____ {
@Autowired
private Environment environment;
@Bean
public String text() {
return this.environment.getProperty("hello") + " World";
}
}
| AutowiredCglibConfiguration |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/inject/BeanConfiguration.java | {
"start": 956,
"end": 1491
} | interface ____ extends AnnotationMetadataProvider, BeanContextConditional {
/**
* @return The package for the bean configuration
*/
Package getPackage();
/**
* @return The package name this configuration
*/
String getName();
/**
* The version of this configuration. Note: returns null when called on a configuration not provided by a JAR.
*
* @return The version or null
*/
String getVersion();
/**
* Check whether the specified bean definition | BeanConfiguration |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/service/relay/LocalProxyRelayService.java | {
"start": 1723,
"end": 5113
} | class ____ extends AbstractProxyRelayService {
private final BrokerController brokerController;
public LocalProxyRelayService(BrokerController brokerController, TransactionService transactionService) {
super(transactionService);
this.brokerController = brokerController;
}
@Override
public CompletableFuture<ProxyRelayResult<ConsumerRunningInfo>> processGetConsumerRunningInfo(
ProxyContext context, RemotingCommand command, GetConsumerRunningInfoRequestHeader header) {
CompletableFuture<ProxyRelayResult<ConsumerRunningInfo>> future = new CompletableFuture<>();
future.thenAccept(proxyOutResult -> {
RemotingServer remotingServer = this.brokerController.getRemotingServer();
if (remotingServer instanceof NettyRemotingAbstract) {
NettyRemotingAbstract nettyRemotingAbstract = (NettyRemotingAbstract) remotingServer;
RemotingCommand remotingCommand = RemotingCommand.createResponseCommand(null);
remotingCommand.setOpaque(command.getOpaque());
remotingCommand.setCode(proxyOutResult.getCode());
remotingCommand.setRemark(proxyOutResult.getRemark());
if (proxyOutResult.getCode() == ResponseCode.SUCCESS && proxyOutResult.getResult() != null) {
ConsumerRunningInfo consumerRunningInfo = proxyOutResult.getResult();
remotingCommand.setBody(consumerRunningInfo.encode());
}
SimpleChannel simpleChannel = new SimpleChannel(context.getRemoteAddress(), context.getLocalAddress());
nettyRemotingAbstract.processResponseCommand(simpleChannel.getChannelHandlerContext(), remotingCommand);
}
});
return future;
}
@Override
public CompletableFuture<ProxyRelayResult<ConsumeMessageDirectlyResult>> processConsumeMessageDirectly(
ProxyContext context, RemotingCommand command,
ConsumeMessageDirectlyResultRequestHeader header) {
CompletableFuture<ProxyRelayResult<ConsumeMessageDirectlyResult>> future = new CompletableFuture<>();
future.thenAccept(proxyOutResult -> {
RemotingServer remotingServer = this.brokerController.getRemotingServer();
if (remotingServer instanceof NettyRemotingAbstract) {
NettyRemotingAbstract nettyRemotingAbstract = (NettyRemotingAbstract) remotingServer;
RemotingCommand remotingCommand = RemotingCommand.createResponseCommand(null);
remotingCommand.setOpaque(command.getOpaque());
remotingCommand.setCode(proxyOutResult.getCode());
remotingCommand.setRemark(proxyOutResult.getRemark());
if (proxyOutResult.getCode() == ResponseCode.SUCCESS && proxyOutResult.getResult() != null) {
ConsumeMessageDirectlyResult consumeMessageDirectlyResult = proxyOutResult.getResult();
remotingCommand.setBody(consumeMessageDirectlyResult.encode());
}
SimpleChannel simpleChannel = new SimpleChannel(context.getRemoteAddress(), context.getLocalAddress());
nettyRemotingAbstract.processResponseCommand(simpleChannel.getChannelHandlerContext(), remotingCommand);
}
});
return future;
}
}
| LocalProxyRelayService |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/configuration/HierarchyConfig.java | {
"start": 449,
"end": 1055
} | class ____<T> {
private String name;
public String getName() {
return name;
}
public final T withName(String name) {
this.name = name;
return getSubclass();
}
public final T withName(NameHolder name) {
this.name = name.name;
return getSubclass();
}
public NameHolder build() {
return new NameHolder(name);
}
@SuppressWarnings("unchecked")
protected final T getSubclass() {
return (T) this;
}
}
public static | Builder |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java | {
"start": 1703,
"end": 8060
} | class ____ implements ToXContentObject {
private Trigger trigger;
private Input input = NoneInput.INSTANCE;
private Condition condition = AlwaysCondition.INSTANCE;
private Transform transform = null;
private Map<String, TransformedAction> actions = new HashMap<>();
private TimeValue defaultThrottlePeriod = null;
private Map<String, Object> metadata;
public WatchSourceBuilder trigger(Trigger.Builder<? extends Trigger> trigger) {
return trigger(trigger.build());
}
public WatchSourceBuilder trigger(Trigger trigger) {
this.trigger = trigger;
return this;
}
public WatchSourceBuilder input(Input.Builder<? extends Input> input) {
return input(input.build());
}
public WatchSourceBuilder input(Input input) {
this.input = input;
return this;
}
public WatchSourceBuilder condition(Condition condition) {
this.condition = condition;
return this;
}
public WatchSourceBuilder transform(Transform transform) {
this.transform = transform;
return this;
}
public WatchSourceBuilder transform(Transform.Builder<? extends Transform> transform) {
return transform(transform.build());
}
public WatchSourceBuilder defaultThrottlePeriod(TimeValue throttlePeriod) {
this.defaultThrottlePeriod = throttlePeriod;
return this;
}
public WatchSourceBuilder addAction(String id, Action.Builder<? extends Action> action) {
return addAction(id, null, null, action.build());
}
public WatchSourceBuilder addAction(String id, TimeValue throttlePeriod, Action.Builder<? extends Action> action) {
return addAction(id, throttlePeriod, null, action.build());
}
public WatchSourceBuilder addAction(
String id,
Transform.Builder<? extends Transform> transformBuilder,
Action.Builder<? extends Action> action
) {
return addAction(id, null, transformBuilder.build(), action.build());
}
@SuppressWarnings("HiddenField")
public WatchSourceBuilder addAction(String id, Condition condition, Action.Builder<? extends Action> action) {
return addAction(id, null, condition, null, action.build());
}
public WatchSourceBuilder addAction(
String id,
TimeValue throttlePeriod,
Transform.Builder<? extends Transform> transformBuilder,
Action.Builder<? extends Action> action
) {
return addAction(id, throttlePeriod, transformBuilder.build(), action.build());
}
public WatchSourceBuilder addAction(String id, TimeValue throttlePeriod, Transform aTransform, Action action) {
actions.put(id, new TransformedAction(id, action, throttlePeriod, null, aTransform, null));
return this;
}
@SuppressWarnings("HiddenField")
public WatchSourceBuilder addAction(
String id,
TimeValue throttlePeriod,
Condition condition,
Transform.Builder<? extends Transform> transform,
Action.Builder<? extends Action> action
) {
return addAction(id, throttlePeriod, condition, transform.build(), action.build());
}
@SuppressWarnings("HiddenField")
public WatchSourceBuilder addAction(String id, TimeValue throttlePeriod, Condition condition, Transform transform, Action action) {
actions.put(id, new TransformedAction(id, action, throttlePeriod, condition, transform, null));
return this;
}
@SuppressWarnings("HiddenField")
public WatchSourceBuilder addAction(
String id,
TimeValue throttlePeriod,
Condition condition,
Transform transform,
String path,
Action action
) {
actions.put(id, new TransformedAction(id, action, throttlePeriod, condition, transform, path));
return this;
}
public WatchSourceBuilder metadata(Map<String, Object> metadata) {
this.metadata = metadata;
return this;
}
public XContentSource build() throws IOException {
try (XContentBuilder builder = jsonBuilder()) {
return new XContentSource(toXContent(builder, ToXContent.EMPTY_PARAMS));
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (trigger == null) {
throw Exceptions.illegalState("failed to build watch source. no trigger defined");
}
builder.startObject(WatchField.TRIGGER.getPreferredName()).field(trigger.type(), trigger, params).endObject();
builder.startObject(WatchField.INPUT.getPreferredName()).field(input.type(), input, params).endObject();
builder.startObject(WatchField.CONDITION.getPreferredName()).field(condition.type(), condition, params).endObject();
if (transform != null) {
builder.startObject(WatchField.TRANSFORM.getPreferredName()).field(transform.type(), transform, params).endObject();
}
if (defaultThrottlePeriod != null) {
builder.humanReadableField(
WatchField.THROTTLE_PERIOD.getPreferredName(),
WatchField.THROTTLE_PERIOD_HUMAN.getPreferredName(),
defaultThrottlePeriod
);
}
builder.startObject(WatchField.ACTIONS.getPreferredName());
for (Map.Entry<String, TransformedAction> entry : actions.entrySet()) {
builder.field(entry.getKey(), entry.getValue(), params);
}
builder.endObject();
if (metadata != null) {
builder.field(WatchField.METADATA.getPreferredName(), metadata);
}
return builder.endObject();
}
/**
* Returns a {@link org.elasticsearch.common.bytes.BytesReference}
* containing the {@link ToXContent} output in binary format. Builds the
* request as the provided <code>contentType</code>
*/
public final BytesReference buildAsBytes(XContentType contentType) {
try {
WatcherParams params = WatcherParams.builder().hideSecrets(false).build();
return XContentHelper.toXContent(this, contentType, params, false);
} catch (Exception e) {
throw new ElasticsearchException("Failed to build ToXContent", e);
}
}
static | WatchSourceBuilder |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/JacksonHintsIntegrationTests.java | {
"start": 7927,
"end": 8772
} | class ____ {
@JsonView(MyJacksonView1.class)
private String withView1;
@JsonView(MyJacksonView2.class)
private String withView2;
private String withoutView;
public JacksonViewBean() {
}
public JacksonViewBean(String withView1, String withView2, String withoutView) {
this.withView1 = withView1;
this.withView2 = withView2;
this.withoutView = withoutView;
}
public String getWithView1() {
return withView1;
}
public void setWithView1(String withView1) {
this.withView1 = withView1;
}
public String getWithView2() {
return withView2;
}
public void setWithView2(String withView2) {
this.withView2 = withView2;
}
public String getWithoutView() {
return withoutView;
}
public void setWithoutView(String withoutView) {
this.withoutView = withoutView;
}
}
}
| JacksonViewBean |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java | {
"start": 137597,
"end": 137691
} | class ____ multiple segments contained within a single
* file
*/
private | provisions |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/cobar/DMLUpdateParserTest.java | {
"start": 900,
"end": 3303
} | class ____ extends TestCase {
public void test_update_0() throws Exception {
String sql = "upDate LOw_PRIORITY IGNORE test.t1 sEt t1.col1=?, col2=DefaulT";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("UPDATE LOW_PRIORITY IGNORE test.t1\nSET t1.col1 = ?, col2 = DEFAULT", output);
}
public void test_update_1() throws Exception {
String sql = "upDate IGNORE (t1) set col2=DefaulT order bY t1.col2 ";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("UPDATE IGNORE t1\nSET col2 = DEFAULT\nORDER BY t1.col2", output);
}
public void test_update_2() throws Exception {
String sql = "upDate (test.t1) SET col2=DefaulT order bY t1.col2 limit ? offset 1";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("UPDATE test.t1\nSET col2 = DEFAULT\nORDER BY t1.col2\nLIMIT ? OFFSET 1", output);
}
public void test_update_3() throws Exception {
String sql = "upDate LOW_PRIORITY t1, test.t2 SET col2=DefaulT , col2='123''4'";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("UPDATE LOW_PRIORITY t1, test.t2\nSET col2 = DEFAULT, col2 = '123''4'", output);
}
public void test_update_4() throws Exception {
String sql = "upDate LOW_PRIORITY t1, test.t2 SET col2:=DefaulT , col2='123''4' where id='a'";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
String output = SQLUtils.toMySqlString(stmt);
assertEquals("UPDATE LOW_PRIORITY t1, test.t2\nSET col2 = DEFAULT, col2 = '123''4'\nWHERE id = 'a'", output);
}
}
| DMLUpdateParserTest |
java | spring-projects__spring-boot | module/spring-boot-jms/src/main/java/org/springframework/boot/jms/autoconfigure/JndiConnectionFactoryAutoConfiguration.java | {
"start": 3239,
"end": 3462
} | class ____ extends AnyNestedCondition {
JndiOrPropertyCondition() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@ConditionalOnJndi({ "java:/JmsXA", "java:/XAConnectionFactory" })
static | JndiOrPropertyCondition |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/pop/PopConsumerRocksdbStore.java | {
"start": 1639,
"end": 7240
} | class ____ extends AbstractRocksDBStorage implements PopConsumerKVStore {
private static final Logger log = LoggerFactory.getLogger(LoggerName.ROCKETMQ_POP_LOGGER_NAME);
private static final byte[] COLUMN_FAMILY_NAME = "popState".getBytes(StandardCharsets.UTF_8);
private WriteOptions writeOptions;
private WriteOptions deleteOptions;
protected ColumnFamilyHandle columnFamilyHandle;
public PopConsumerRocksdbStore(String filePath) {
super(filePath);
}
// https://www.cnblogs.com/renjc/p/rocksdb-class-db.html
// https://github.com/johnzeng/rocksdb-doc-cn/blob/master/doc/RocksDB-Tuning-Guide.md
protected void initOptions() {
this.options = RocksDBOptionsFactory.createDBOptions();
this.writeOptions = new WriteOptions();
this.writeOptions.setSync(true);
this.writeOptions.setDisableWAL(false);
this.writeOptions.setNoSlowdown(false);
this.deleteOptions = new WriteOptions();
this.deleteOptions.setSync(true);
this.deleteOptions.setDisableWAL(false);
this.deleteOptions.setNoSlowdown(false);
this.compactRangeOptions = new CompactRangeOptions();
this.compactRangeOptions.setBottommostLevelCompaction(
CompactRangeOptions.BottommostLevelCompaction.kForce);
this.compactRangeOptions.setAllowWriteStall(true);
this.compactRangeOptions.setExclusiveManualCompaction(false);
this.compactRangeOptions.setChangeLevel(true);
this.compactRangeOptions.setTargetLevel(-1);
this.compactRangeOptions.setMaxSubcompactions(4);
}
@Override
protected boolean postLoad() {
try {
UtilAll.ensureDirOK(this.dbPath);
initOptions();
// init column family here
ColumnFamilyOptions defaultOptions = RocksDBOptionsFactory.createPopCFOptions();
ColumnFamilyOptions popStateOptions = RocksDBOptionsFactory.createPopCFOptions();
this.cfOptions.add(defaultOptions);
this.cfOptions.add(popStateOptions);
List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>();
cfDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY, defaultOptions));
cfDescriptors.add(new ColumnFamilyDescriptor(COLUMN_FAMILY_NAME, popStateOptions));
this.open(cfDescriptors);
this.defaultCFHandle = cfHandles.get(0);
this.columnFamilyHandle = cfHandles.get(1);
log.debug("PopConsumerRocksdbStore init, filePath={}", this.dbPath);
} catch (final Exception e) {
log.error("PopConsumerRocksdbStore init error, filePath={}", this.dbPath, e);
return false;
}
return true;
}
public String getFilePath() {
return this.dbPath;
}
@Override
public void writeRecords(List<PopConsumerRecord> consumerRecordList) {
if (!consumerRecordList.isEmpty()) {
try (WriteBatch writeBatch = new WriteBatch()) {
for (PopConsumerRecord record : consumerRecordList) {
writeBatch.put(columnFamilyHandle, record.getKeyBytes(), record.getValueBytes());
}
this.db.write(writeOptions, writeBatch);
} catch (RocksDBException e) {
throw new RuntimeException("Write record error", e);
}
}
}
@Override
public void deleteRecords(List<PopConsumerRecord> consumerRecordList) {
if (!consumerRecordList.isEmpty()) {
try (WriteBatch writeBatch = new WriteBatch()) {
for (PopConsumerRecord record : consumerRecordList) {
writeBatch.delete(columnFamilyHandle, record.getKeyBytes());
}
this.db.write(deleteOptions, writeBatch);
} catch (RocksDBException e) {
throw new RuntimeException("Delete record error", e);
}
}
}
@Override
// https://github.com/facebook/rocksdb/issues/10300
public List<PopConsumerRecord> scanExpiredRecords(long lower, long upper, int maxCount) {
// In RocksDB, we can use SstPartitionerFixedPrefixFactory in cfOptions
// and new ColumnFamilyOptions().useFixedLengthPrefixExtractor() to
// configure prefix indexing to improve the performance of scans.
// However, in the current implementation, this is not the bottleneck.
List<PopConsumerRecord> consumerRecordList = new ArrayList<>();
try (ReadOptions scanOptions = new ReadOptions()
.setIterateLowerBound(new Slice(ByteBuffer.allocate(Long.BYTES).putLong(lower).array()))
.setIterateUpperBound(new Slice(ByteBuffer.allocate(Long.BYTES).putLong(upper).array()));
RocksIterator iterator = db.newIterator(this.columnFamilyHandle, scanOptions)) {
iterator.seek(ByteBuffer.allocate(Long.BYTES).putLong(lower).array());
while (iterator.isValid() && consumerRecordList.size() < maxCount) {
consumerRecordList.add(PopConsumerRecord.decode(iterator.value()));
iterator.next();
}
}
return consumerRecordList;
}
@Override
protected void preShutdown() {
if (this.writeOptions != null) {
this.writeOptions.close();
}
if (this.deleteOptions != null) {
this.deleteOptions.close();
}
if (this.columnFamilyHandle != null) {
this.columnFamilyHandle.close();
}
}
}
| PopConsumerRocksdbStore |
java | apache__logging-log4j2 | log4j-api-test/src/test/java/org/apache/logging/log4j/util/SystemPropertiesMain.java | {
"start": 1014,
"end": 1684
} | class ____ {
/**
* Prints system properties to the console.
*
* @param args
* unused
*/
public static void main(final String[] args) {
@SuppressWarnings("unchecked")
final Enumeration<String> keyEnum =
(Enumeration<String>) System.getProperties().propertyNames();
final List<String> list = new ArrayList<>();
while (keyEnum.hasMoreElements()) {
list.add(keyEnum.nextElement());
}
Collections.sort(list);
for (final String key : list) {
System.out.println(key + " = " + System.getProperty(key));
}
}
}
| SystemPropertiesMain |
java | netty__netty | microbench/src/main/java/io/netty/buffer/ByteBufUtilDecodeStringBenchmark.java | {
"start": 1305,
"end": 3771
} | enum ____ {
DIRECT {
@Override
ByteBuf newBuffer(byte[] bytes, int length) {
ByteBuf buffer = Unpooled.directBuffer(length);
buffer.writeBytes(bytes, 0, length);
return buffer;
}
},
HEAP_OFFSET {
@Override
ByteBuf newBuffer(byte[] bytes, int length) {
return Unpooled.wrappedBuffer(bytes, 1, length);
}
},
HEAP {
@Override
ByteBuf newBuffer(byte[] bytes, int length) {
return Unpooled.wrappedBuffer(bytes, 0, length);
}
},
COMPOSITE {
@Override
ByteBuf newBuffer(byte[] bytes, int length) {
CompositeByteBuf buffer = Unpooled.compositeBuffer();
int offset = 0;
// 8 buffers per composite.
int capacity = length / 8;
while (length > 0) {
buffer.addComponent(true, Unpooled.wrappedBuffer(bytes, offset, Math.min(length, capacity)));
length -= capacity;
offset += capacity;
}
return buffer;
}
};
abstract ByteBuf newBuffer(byte[] bytes, int length);
}
@Param({
"8",
"64",
"1024",
"10240",
"1073741824",
})
public int size;
@Param({
"US-ASCII",
"UTF-8",
})
public String charsetName;
@Param
public ByteBufType bufferType;
private ByteBuf buffer;
private Charset charset;
@Override
protected String[] jvmArgs() {
// Ensure we minimize the GC overhead by sizing the heap big enough.
return new String[] { "-XX:MaxDirectMemorySize=2g", "-Xmx8g", "-Xms8g", "-Xmn6g" };
}
@Setup
public void setup() {
byte[] bytes = new byte[size + 2];
Arrays.fill(bytes, (byte) 'a');
// Use an offset to not allow any optimizations because we use the exact passed in byte[] for heap buffers.
buffer = bufferType.newBuffer(bytes, size);
charset = Charset.forName(charsetName);
}
@TearDown
public void teardown() {
buffer.release();
}
@Benchmark
public String decodeString() {
return ByteBufUtil.decodeString(buffer, buffer.readerIndex(), size, charset);
}
}
| ByteBufType |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/testing/springbootapplications/autoconfiguredspringrestdocs/withwebtestclient/MyRestDocsConfiguration.java | {
"start": 1059,
"end": 1290
} | class ____ implements RestDocsWebTestClientConfigurationCustomizer {
@Override
public void customize(WebTestClientRestDocumentationConfigurer configurer) {
configurer.snippets().withEncoding("UTF-8");
}
}
| MyRestDocsConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/sql/DdlType.java | {
"start": 1145,
"end": 1807
} | interface ____ extends Serializable {
/**
* The {@linkplain SqlTypes SQL type code} of the descriptor.
*
* @return a SQL type code
*/
int getSqlTypeCode();
/**
* Return a type with length, precision, and scale specified by the given
* {@linkplain Size size object}. The given type may be used to
* determine additional aspects of the returned SQL type.
*
* @since 6.3
*/
default String getTypeName(Size columnSize, Type type, DdlTypeRegistry ddlTypeRegistry) {
return getTypeName( columnSize );
}
/**
* Returns the default type name without precision/length and scale parameters.
*
* @deprecated not appropriate for named | DdlType |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/validation/beanvalidation/SpringValidatorAdapterTests.java | {
"start": 11263,
"end": 12327
} | class ____ {
@Size(min = 8, max = 128)
private String password;
private String confirmPassword;
@Pattern(regexp = "[\\w.'-]{1,}@[\\w.'-]{1,}")
private String email;
@Pattern(regexp = "[\\p{L} -]*", message = "Email required")
private String confirmEmail;
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getConfirmPassword() {
return confirmPassword;
}
public void setConfirmPassword(String confirmPassword) {
this.confirmPassword = confirmPassword;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getConfirmEmail() {
return confirmEmail;
}
public void setConfirmEmail(String confirmEmail) {
this.confirmEmail = confirmEmail;
}
}
@Documented
@Constraint(validatedBy = {SameValidator.class})
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Repeatable(SameGroup.class)
@ | TestBean |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java | {
"start": 30901,
"end": 30971
} | class ____ extends BaseParameterExtension<TestParameter> {
}
}
| Extension |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java | {
"start": 4592,
"end": 4990
} | class ____ {
private int dynamicMapperSize = 0;
public void add(int mapperSize) {
dynamicMapperSize += mapperSize;
}
public int get() {
return dynamicMapperSize;
}
}
/**
* Defines the scope parser is currently in.
* This is used for synthetic source related logic during parsing.
*/
private | DynamicMapperSize |
java | apache__camel | components/camel-spring-parent/camel-spring/src/main/java/org/apache/camel/spring/util/CamelContextResolverHelper.java | {
"start": 1071,
"end": 1522
} | class ____ {
private CamelContextResolverHelper() {
// The helper class
}
public static CamelContext getCamelContextWithId(ApplicationContext context, String contextId) {
try {
return context.getBean(contextId, CamelContext.class);
} catch (Exception e) {
throw new IllegalArgumentException("Cannot find the CamelContext with id " + contextId, e);
}
}
}
| CamelContextResolverHelper |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/vendor/MySqlExceptionSorterTest.java | {
"start": 181,
"end": 1868
} | class ____ extends PoolTestCase {
public void test_true() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertTrue(sorter.isExceptionFatal(new SQLException("", "", 1040)));
}
public void test_true_1() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertTrue(sorter.isExceptionFatal(new SQLException("", "", 1042)));
}
public void test_true_2() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertTrue(sorter.isExceptionFatal(new SQLException("", "08xx", 0)));
}
public void test_false_2() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertFalse(sorter.isExceptionFatal(new SQLException("", null, 0)));
}
public void test_false() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertFalse(sorter.isExceptionFatal(new SQLException("", "", -10001)));
}
public void test_false_1() throws Exception {
MySqlExceptionSorter sorter = new MySqlExceptionSorter();
assertTrue(sorter.isExceptionFatal(new SQLException("", "", -8000)));
assertFalse(sorter.isExceptionFatal(new SQLException("", "", -9100)));
}
// public void test_true_3() throws Exception {
// MySqlExceptionSorter sorter = new MySqlExceptionSorter();
// Class clazz = Class.forName("new com.mysql.jdbc.CommunicationsException");
// clazz.getConstructor()
// assertTrue(sorter.isExceptionFatal(new com.mysql.jdbc.CommunicationsException(null, 0, 0, null)));
// }
}
| MySqlExceptionSorterTest |
java | alibaba__nacos | ai/src/main/java/com/alibaba/nacos/ai/service/McpExternalDataAdaptor.java | {
"start": 13921,
"end": 18521
} | class ____ {
private final String scheme;
private final String host;
private final int port;
private final String path;
public UrlComponents(String scheme, String host, int port, String path) {
this.scheme = scheme;
this.host = host;
this.port = port;
this.path = path;
}
public String getScheme() {
return scheme;
}
public String getHost() {
return host;
}
public int getPort() {
return port;
}
public String getPath() {
return path;
}
}
/**
* URL import wrapper: fetch contents from specified URL and adapt to Nacos mcp servers.
* Fetch specified contents from specified URL and adapt to Nacos mcp servers.
*
* @param urlData URL data to parse. Only support official mcp registry api.
* @param cursor Cursor for pagination
* @param limit Limit for pagination. Fetch all pages when limit = -1
* @param search fuzzy search keyword
* @return list of adapted mcp servers
* @throws Exception if adaptation failed
*/
private List<McpServerDetailInfo> adaptOfficialRegistryUrl(String urlData, String cursor, Integer limit, String search)
throws Exception {
if (StringUtils.isBlank(urlData)) {
throw new IllegalArgumentException("URL is blank");
}
// If limit = -1, fetch all pages
if (limit != null && limit == FETCH_ALL_LIMIT_MARK) {
return fetchUrlServersAll(urlData.trim(), search);
}
// Otherwise, fetch a single page using fetchUrlPage
UrlPageResult page = fetchUrlPage(urlData.trim(), cursor, limit, search);
return page.getServers();
}
/**
* File import wrapper: parse into a list of RegistryDetails and convert to
* Nacos servers.
*/
private List<McpServerDetailInfo> adaptOfficialSeedFile(String data) {
return unmarshaledSeedToServerList(data).stream()
.map(this::adaptOfficialMcpServer)
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
private List<McpServerDetailInfo> adaptOfficialMcpServerJsonText(String data) {
McpRegistryServerDetail detail = JacksonUtils.toObj(data, McpRegistryServerDetail.class);
return Collections.singletonList(adaptOfficialMcpServer(detail));
}
private List<McpRegistryServerDetail> unmarshaledSeedToServerList(String data) {
return JacksonUtils.toObj(data, new TypeReference<>() { });
}
private HttpClient getHttpClient() {
if (httpClient == null) {
httpClient = HttpClient.newBuilder()
.followRedirects(HttpClient.Redirect.NORMAL)
.connectTimeout(Duration.ofSeconds(CONNECT_TIMEOUT_SECONDS))
.build();
}
return httpClient;
}
public void setHttpClient(HttpClient client) {
this.httpClient = client;
}
private String buildPageUrl(String base, String cursor, Integer limit, String search) {
StringBuilder url = new StringBuilder(base);
boolean hasQuery = base.contains(QUERY_MARK);
if (StringUtils.isNotBlank(cursor)) {
String enc = URLEncoder.encode(cursor, StandardCharsets.UTF_8);
url.append(hasQuery ? AMPERSAND : QUERY_MARK).append(CURSOR_QUERY_NAME).append("=").append(enc);
hasQuery = true;
}
if (limit != null && limit > 0) {
url.append(hasQuery ? AMPERSAND : QUERY_MARK).append(LIMIT_QUERY_NAME).append("=").append(limit);
hasQuery = true;
}
if (StringUtils.isNotBlank(search)) {
String encSearch = URLEncoder.encode(search, StandardCharsets.UTF_8);
url.append(hasQuery ? AMPERSAND : QUERY_MARK).append(SEARCH_QUERY_NAME).append("=").append(encSearch);
}
return url.toString();
}
private HttpRequest buildGetRequest(String url) {
return HttpRequest.newBuilder(URI.create(url))
.timeout(Duration.ofSeconds(READ_TIMEOUT_SECONDS))
.GET()
.header(HEADER_ACCEPT, HEADER_ACCEPT_JSON).build();
}
private boolean isSuccessStatus(int code) {
return code >= HTTP_STATUS_SUCCESS_MIN && code <= HTTP_STATUS_SUCCESS_MAX;
}
private String generateMcpServerId(String name) {
return UUID.nameUUIDFromBytes(name.getBytes(StandardCharsets.UTF_8)).toString();
}
} | UrlComponents |
java | apache__camel | components/camel-oaipmh/src/main/java/org/apache/camel/oaipmh/handler/ProducerResponseHandler.java | {
"start": 958,
"end": 1378
} | class ____ implements ResponseHandler {
private List<String> responseList;
public ProducerResponseHandler() {
this.responseList = new ArrayList<>();
}
@Override
public void process(OAIPMHResponse anyResponse) {
this.responseList.add(anyResponse.getRawResponse());
}
@Override
public List<String> flush() {
return this.responseList;
}
}
| ProducerResponseHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ParameterNameTest.java | {
"start": 10691,
"end": 11109
} | class ____ {
void test(int x) {
test(
// newX =
// (x ^ 2)
x * x);
}
}
""")
.doTest();
}
@Test
public void namedParametersChecker_ignoresComment_wrongNameWithNoEquals() {
testHelper
.addSourceLines(
"Test.java",
"""
abstract | Test |
java | apache__camel | components/camel-infinispan/camel-infinispan/src/main/java/org/apache/camel/component/infinispan/remote/cluster/InfinispanRemoteClusterService.java | {
"start": 1232,
"end": 3586
} | class ____ extends InfinispanClusterService {
private InfinispanRemoteClusterConfiguration configuration;
public InfinispanRemoteClusterService() {
this.configuration = new InfinispanRemoteClusterConfiguration();
}
public InfinispanRemoteClusterService(InfinispanRemoteClusterConfiguration configuration) {
this.configuration = configuration.clone();
}
// *********************************************
// Properties
// *********************************************
public InfinispanRemoteClusterConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(InfinispanRemoteClusterConfiguration configuration) {
this.configuration = configuration.clone();
}
public void setConfigurationUri(String configurationUri) {
configuration.setConfigurationUri(configurationUri);
}
public RemoteCacheManager getCacheContainer() {
return configuration.getCacheContainer();
}
public void setCacheContainer(RemoteCacheManager cacheContainer) {
configuration.setCacheContainer(cacheContainer);
}
public Configuration getCacheContainerConfiguration() {
return configuration.getCacheContainerConfiguration();
}
public void setCacheContainerConfiguration(Configuration cacheContainerConfiguration) {
configuration.setCacheContainerConfiguration(cacheContainerConfiguration);
}
public long getLifespan() {
return configuration.getLifespan();
}
public void setLifespan(long lifespan) {
configuration.setLifespan(lifespan);
}
public TimeUnit getLifespanTimeUnit() {
return configuration.getLifespanTimeUnit();
}
public void setLifespanTimeUnit(TimeUnit lifespanTimeUnit) {
configuration.setLifespanTimeUnit(lifespanTimeUnit);
}
// *********************************************
// Impl
// *********************************************
@Override
protected InfinispanClusterView createView(String namespace) throws Exception {
// Validate parameters
ObjectHelper.notNull(getCamelContext(), "Camel Context");
ObjectHelper.notNull(getId(), "Cluster ID");
return new InfinispanRemoteClusterView(this, configuration, namespace);
}
}
| InfinispanRemoteClusterService |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/MainConfigurationProperties.java | {
"start": 1384,
"end": 23859
} | class ____ extends DefaultConfigurationProperties<MainConfigurationProperties>
implements BootstrapCloseable {
@Metadata(enums = "dev,test,prod")
private String profile;
private boolean autoConfigurationEnabled = true;
private boolean autoConfigurationEnvironmentVariablesEnabled = true;
private boolean autoConfigurationSystemPropertiesEnabled = true;
private boolean autoConfigurationFailFast = true;
private boolean autoConfigurationLogSummary = true;
private int durationHitExitCode;
private int extraShutdownTimeout = 15;
private String basePackageScan;
private boolean basePackageScanEnabled = true;
private String mainListenerClasses;
private String routesBuilderClasses;
private String configurationClasses;
private List<RoutesBuilder> routesBuilders = new ArrayList<>();
private List<CamelConfiguration> configurations = new ArrayList<>();
private List<MainListener> mainListeners = new ArrayList<>();
// extended configuration
private HealthConfigurationProperties healthConfigurationProperties;
private StartupConditionConfigurationProperties startupConditionConfigurationProperties;
private LraConfigurationProperties lraConfigurationProperties;
private OtelConfigurationProperties otelConfigurationProperties;
private Otel2ConfigurationProperties otel2ConfigurationProperties;
private MdcConfigurationProperties mdcConfigurationProperties;
private TelemetryDevConfigurationProperties telemetryDevConfigurationProperties;
private MetricsConfigurationProperties metricsConfigurationProperties;
private ThreadPoolConfigurationProperties threadPoolProperties;
private Resilience4jConfigurationProperties resilience4jConfigurationProperties;
private FaultToleranceConfigurationProperties faultToleranceConfigurationProperties;
private RestConfigurationProperties restConfigurationProperties;
private VaultConfigurationProperties vaultConfigurationProperties;
private HttpServerConfigurationProperties httpServerConfigurationProperties;
private HttpManagementServerConfigurationProperties httpManagementServerConfigurationProperties;
private SSLConfigurationProperties sslConfigurationProperties;
private DebuggerConfigurationProperties debuggerConfigurationProperties;
private TracerConfigurationProperties tracerConfigurationProperties;
private RouteControllerConfigurationProperties routeControllerConfigurationProperties;
@Override
public void close() {
if (healthConfigurationProperties != null) {
healthConfigurationProperties.close();
healthConfigurationProperties = null;
}
if (lraConfigurationProperties != null) {
lraConfigurationProperties.close();
lraConfigurationProperties = null;
}
if (otelConfigurationProperties != null) {
otelConfigurationProperties.close();
otelConfigurationProperties = null;
}
if (otel2ConfigurationProperties != null) {
otel2ConfigurationProperties.close();
otel2ConfigurationProperties = null;
}
if (telemetryDevConfigurationProperties != null) {
telemetryDevConfigurationProperties.close();
telemetryDevConfigurationProperties = null;
}
if (mdcConfigurationProperties != null) {
mdcConfigurationProperties.close();
mdcConfigurationProperties = null;
}
if (metricsConfigurationProperties != null) {
metricsConfigurationProperties.close();
metricsConfigurationProperties = null;
}
if (threadPoolProperties != null) {
threadPoolProperties.close();
threadPoolProperties = null;
}
if (resilience4jConfigurationProperties != null) {
resilience4jConfigurationProperties.close();
resilience4jConfigurationProperties = null;
}
if (faultToleranceConfigurationProperties != null) {
faultToleranceConfigurationProperties.close();
faultToleranceConfigurationProperties = null;
}
if (restConfigurationProperties != null) {
restConfigurationProperties.close();
restConfigurationProperties = null;
}
if (vaultConfigurationProperties != null) {
vaultConfigurationProperties.close();
vaultConfigurationProperties = null;
}
if (httpServerConfigurationProperties != null) {
httpServerConfigurationProperties.close();
httpServerConfigurationProperties = null;
}
if (httpManagementServerConfigurationProperties != null) {
httpManagementServerConfigurationProperties.close();
httpManagementServerConfigurationProperties = null;
}
if (sslConfigurationProperties != null) {
sslConfigurationProperties.close();
sslConfigurationProperties = null;
}
if (debuggerConfigurationProperties != null) {
debuggerConfigurationProperties.close();
debuggerConfigurationProperties = null;
}
if (tracerConfigurationProperties != null) {
tracerConfigurationProperties.close();
tracerConfigurationProperties = null;
}
if (routeControllerConfigurationProperties != null) {
routeControllerConfigurationProperties.close();
routeControllerConfigurationProperties = null;
}
if (routesBuilders != null) {
routesBuilders.clear();
routesBuilders = null;
}
if (configurations != null) {
configurations.clear();
configurations = null;
}
if (mainListeners != null) {
mainListeners.clear();
mainListeners = null;
}
}
// extended
// --------------------------------------------------------------
/**
* To configure Health Check
*/
public HealthConfigurationProperties health() {
if (healthConfigurationProperties == null) {
healthConfigurationProperties = new HealthConfigurationProperties(this);
}
return healthConfigurationProperties;
}
/**
* To configure startup conditions
*/
public StartupConditionConfigurationProperties startupCondition() {
if (startupConditionConfigurationProperties == null) {
startupConditionConfigurationProperties = new StartupConditionConfigurationProperties(this);
}
return startupConditionConfigurationProperties;
}
/**
* Whether there has been any health check configuration specified
*/
public boolean hasHealthCheckConfiguration() {
return healthConfigurationProperties != null;
}
/**
* To configure Saga LRA
*/
public LraConfigurationProperties lra() {
if (lraConfigurationProperties == null) {
lraConfigurationProperties = new LraConfigurationProperties(this);
}
return lraConfigurationProperties;
}
/**
* Whether there has been any Saga LRA configuration specified
*/
public boolean hasLraConfiguration() {
return lraConfigurationProperties != null;
}
/**
* To configure OpenTelemetry.
*/
public OtelConfigurationProperties otel() {
if (otelConfigurationProperties == null) {
otelConfigurationProperties = new OtelConfigurationProperties(this);
}
return otelConfigurationProperties;
}
/**
* Whether there has been any OpenTelemetry configuration specified
*/
public boolean hasOtelConfiguration() {
return otelConfigurationProperties != null;
}
/**
* Whether there has been any OpenTelemetry configuration specified
*/
public boolean hasOtel2Configuration() {
return otel2ConfigurationProperties != null;
}
/**
* Whether there has been any TelemetryDev configuration specified
*/
public boolean hasTelemetryDevConfiguration() {
return telemetryDevConfigurationProperties != null;
}
/**
* Whether there has been any MDC configuration specified
*/
public boolean hasMdcConfiguration() {
return mdcConfigurationProperties != null;
}
/**
* To configure Micrometer metrics.
*/
public MetricsConfigurationProperties metrics() {
if (metricsConfigurationProperties == null) {
metricsConfigurationProperties = new MetricsConfigurationProperties(this);
}
return metricsConfigurationProperties;
}
/**
* Whether there has been any Micrometer metrics configuration specified
*/
public boolean hasMetricsConfiguration() {
return metricsConfigurationProperties != null;
}
/**
* To configure embedded HTTP server (for standalone applications; not Spring Boot or Quarkus)
*/
public HttpServerConfigurationProperties httpServer() {
if (httpServerConfigurationProperties == null) {
httpServerConfigurationProperties = new HttpServerConfigurationProperties(this);
}
return httpServerConfigurationProperties;
}
/**
* To configure embedded HTTP management server (for standalone applications; not Spring Boot or Quarkus)
*/
public HttpManagementServerConfigurationProperties httpManagementServer() {
if (httpManagementServerConfigurationProperties == null) {
httpManagementServerConfigurationProperties = new HttpManagementServerConfigurationProperties(this);
}
return httpManagementServerConfigurationProperties;
}
/**
* Whether there has been any embedded HTTP server configuration specified
*/
public boolean hasHttpServerConfiguration() {
return httpServerConfigurationProperties != null;
}
/**
* Whether there has been any embedded HTTP management server configuration specified
*/
public boolean hasHttpManagementServerConfiguration() {
return httpManagementServerConfigurationProperties != null;
}
/**
* To configure SSL.
*/
public SSLConfigurationProperties sslConfig() {
if (sslConfigurationProperties == null) {
sslConfigurationProperties = new SSLConfigurationProperties(this);
}
return sslConfigurationProperties;
}
/**
* Whether there has been any SSL configuration specified.
*/
public boolean hasSslConfiguration() {
return sslConfigurationProperties != null;
}
/**
* To configure Debugger.
*/
public DebuggerConfigurationProperties debuggerConfig() {
if (debuggerConfigurationProperties == null) {
debuggerConfigurationProperties = new DebuggerConfigurationProperties(this);
}
return debuggerConfigurationProperties;
}
/**
* Whether there has been any Debugger configuration specified.
*/
public boolean hasDebuggerConfiguration() {
return debuggerConfigurationProperties != null;
}
/**
* To configure Tracer.
*/
public TracerConfigurationProperties tracerConfig() {
if (tracerConfigurationProperties == null) {
tracerConfigurationProperties = new TracerConfigurationProperties(this);
}
return tracerConfigurationProperties;
}
/**
* Whether there has been any Tracer configuration specified.
*/
public boolean hasTracerConfiguration() {
return tracerConfigurationProperties != null;
}
/**
* To configure Route Controller.
*/
public RouteControllerConfigurationProperties routeControllerConfig() {
if (routeControllerConfigurationProperties == null) {
routeControllerConfigurationProperties = new RouteControllerConfigurationProperties(this);
}
return routeControllerConfigurationProperties;
}
/**
* Whether there has been any Route Controller configuration specified.
*/
public boolean hasRouteControllerConfiguration() {
return routeControllerConfigurationProperties != null;
}
/**
* To configure thread pools
*/
public ThreadPoolConfigurationProperties threadPool() {
if (threadPoolProperties == null) {
threadPoolProperties = new ThreadPoolConfigurationProperties(this);
}
return threadPoolProperties;
}
/**
* Whether there has been any thread pool configuration specified
*/
public boolean hasThreadPoolConfiguration() {
return threadPoolProperties != null;
}
/**
* To configure Circuit Breaker EIP with Resilience4j
*/
public Resilience4jConfigurationProperties resilience4j() {
if (resilience4jConfigurationProperties == null) {
resilience4jConfigurationProperties = new Resilience4jConfigurationProperties(this);
}
return resilience4jConfigurationProperties;
}
/**
* Whether there has been any Resilience4j EIP configuration specified
*/
public boolean hasResilience4jConfiguration() {
return resilience4jConfigurationProperties != null;
}
/**
* To configure Circuit Breaker EIP with MicroProfile Fault Tolerance
*/
public FaultToleranceConfigurationProperties faultTolerance() {
if (faultToleranceConfigurationProperties == null) {
faultToleranceConfigurationProperties = new FaultToleranceConfigurationProperties(this);
}
return faultToleranceConfigurationProperties;
}
/**
* Whether there has been any MicroProfile Fault Tolerance EIP configuration specified
*/
public boolean hasFaultToleranceConfiguration() {
return faultToleranceConfigurationProperties != null;
}
/**
* To configure Rest DSL
*/
public RestConfigurationProperties rest() {
if (restConfigurationProperties == null) {
restConfigurationProperties = new RestConfigurationProperties(this);
}
return restConfigurationProperties;
}
/**
* Whether there has been any rest configuration specified
*/
public boolean hasRestConfiguration() {
return restConfigurationProperties != null;
}
/**
* To configure access to AWS vaults
*/
public VaultConfigurationProperties vault() {
if (vaultConfigurationProperties == null) {
vaultConfigurationProperties = new VaultConfigurationProperties(this);
}
return vaultConfigurationProperties;
}
/**
* Whether there has been any vault configuration specified
*/
public boolean hasVaultConfiguration() {
return vaultConfigurationProperties != null;
}
// getter and setters
// --------------------------------------------------------------
public String getProfile() {
return profile;
}
/**
* Camel profile to use when running.
*
* The dev profile is for development, which enables a set of additional developer focus functionality, tracing,
* debugging, and gathering additional runtime statistics that are useful during development. However, those
* additional features has a slight overhead cost, and are not enabled for production profile.
*
* The default profile is prod.
*/
public void setProfile(String profile) {
this.profile = profile;
}
public boolean isAutoConfigurationEnabled() {
return autoConfigurationEnabled;
}
/**
* Whether auto configuration of components, dataformats, languages is enabled or not. When enabled the
* configuration parameters are loaded from the properties component. You can prefix the parameters in the
* properties file with: - camel.component.name.option1=value1 - camel.component.name.option2=value2 -
* camel.dataformat.name.option1=value1 - camel.dataformat.name.option2=value2 - camel.language.name.option1=value1
* - camel.language.name.option2=value2 Where name is the name of the component, dataformat or language such as
* seda,direct,jaxb.
* <p/>
* The auto configuration also works for any options on components that is a complex type (not standard Java type)
* and there has been an explicit single bean instance registered to the Camel registry via the
* {@link org.apache.camel.spi.Registry#bind(String, Object)} method or by using the
* {@link org.apache.camel.BindToRegistry} annotation style.
* <p/>
* This option is default enabled.
*/
public void setAutoConfigurationEnabled(boolean autoConfigurationEnabled) {
this.autoConfigurationEnabled = autoConfigurationEnabled;
}
public boolean isAutoConfigurationEnvironmentVariablesEnabled() {
return autoConfigurationEnvironmentVariablesEnabled;
}
/**
* Whether auto configuration should include OS environment variables as well. When enabled this allows to overrule
* any configuration using an OS environment variable. For example to set a shutdown timeout of 5 seconds:
* CAMEL_MAIN_SHUTDOWNTIMEOUT=5.
* <p/>
* This option is default enabled.
*/
public void setAutoConfigurationEnvironmentVariablesEnabled(boolean autoConfigurationEnvironmentVariablesEnabled) {
this.autoConfigurationEnvironmentVariablesEnabled = autoConfigurationEnvironmentVariablesEnabled;
}
public boolean isAutoConfigurationSystemPropertiesEnabled() {
return autoConfigurationSystemPropertiesEnabled;
}
/**
* Whether auto configuration should include JVM system properties as well. When enabled this allows to overrule any
* configuration using a JVM system property. For example to set a shutdown timeout of 5 seconds: -D
* camel.main.shutdown-timeout=5.
* <p/>
* Note that JVM system properties take precedence over OS environment variables.
* <p/>
* This option is default enabled.
*/
public void setAutoConfigurationSystemPropertiesEnabled(boolean autoConfigurationSystemPropertiesEnabled) {
this.autoConfigurationSystemPropertiesEnabled = autoConfigurationSystemPropertiesEnabled;
}
public boolean isAutoConfigurationFailFast() {
return autoConfigurationFailFast;
}
/**
* Whether auto configuration should fail fast when configuring one ore more properties fails for whatever reason
* such as a invalid property name, etc.
* <p/>
* This option is default enabled.
*/
public void setAutoConfigurationFailFast(boolean autoConfigurationFailFast) {
this.autoConfigurationFailFast = autoConfigurationFailFast;
}
public boolean isAutoConfigurationLogSummary() {
return autoConfigurationLogSummary;
}
/**
* Whether auto configuration should log a summary with the configured properties.
* <p/>
* This option is default enabled.
*/
public void setAutoConfigurationLogSummary(boolean autoConfigurationLogSummary) {
this.autoConfigurationLogSummary = autoConfigurationLogSummary;
}
public String getBasePackageScan() {
return basePackageScan;
}
/**
* Package name to use as base (offset) for classpath scanning of {@link RouteBuilder},
* {@link org.apache.camel.TypeConverter}, {@link CamelConfiguration} classes, and also classes annotated with
* {@link org.apache.camel.Converter}, or {@link org.apache.camel.BindToRegistry}.
*
* If you are using Spring Boot then it is instead recommended to use Spring Boots component scanning and annotate
* your route builder classes with `@Component`. In other words only use this for Camel Main in standalone mode.
*/
public void setBasePackageScan(String basePackageScan) {
this.basePackageScan = basePackageScan;
}
public boolean isBasePackageScanEnabled() {
return basePackageScanEnabled;
}
/**
* Whether base package scan is enabled.
*/
public void setBasePackageScanEnabled(boolean basePackageScanEnabled) {
this.basePackageScanEnabled = basePackageScanEnabled;
}
public int getDurationHitExitCode() {
return durationHitExitCode;
}
/**
* Sets the exit code for the application if duration was hit
*/
public void setDurationHitExitCode(int durationHitExitCode) {
this.durationHitExitCode = durationHitExitCode;
}
public int getExtraShutdownTimeout() {
return extraShutdownTimeout;
}
/**
* Extra timeout in seconds to graceful shutdown Camel.
*
* When Camel is shutting down then Camel first shutdown all the routes (shutdownTimeout). Then additional services
* is shutdown (extraShutdownTimeout).
*/
public void setExtraShutdownTimeout(int extraShutdownTimeout) {
this.extraShutdownTimeout = extraShutdownTimeout;
}
// getter and setters - main listener
// --------------------------------------------------------------
public String getMainListenerClasses() {
return mainListenerClasses;
}
/**
* Sets classes names that will be used for {@link MainListener} that makes it possible to do custom logic during
* starting and stopping camel-main.
*/
public void setMainListenerClasses(String mainListenerClasses) {
this.mainListenerClasses = mainListenerClasses;
}
/**
* Adds {@link MainListener} object to the known list of main listener classes.
*/
@SuppressWarnings("unchecked")
private void addMainListenerClass(Class<? extends MainListener>... listener) {
StringJoiner existing = new StringJoiner(",");
if (mainListenerClasses != null && !mainListenerClasses.isEmpty()) {
existing.add(mainListenerClasses);
}
if (listener != null) {
for (Class<? extends MainListener> clazz : listener) {
existing.add(clazz.getName());
}
}
setMainListenerClasses(existing.toString());
}
/**
* Adds main listener object to the known list of listener objects.
*/
public void addMainListener(MainListener listener) {
mainListeners.add(listener);
}
/**
* Adds main listener | MainConfigurationProperties |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/testkit/WithPlayerData.java | {
"start": 781,
"end": 1905
} | class ____ {
protected static Player jordan;
protected static Player magic;
protected static Player kobe;
protected static Player duncan;
protected static List<Player> players;
@BeforeAll
public static void setUpOnce() {
jordan = new Player(new Name("Michael", "Jordan"), "Chicago Bulls");
jordan.setAssistsPerGame(8);
jordan.setPointsPerGame(30);
jordan.setReboundsPerGame(6);
jordan.setHighestScore(69);
magic = new Player(new Name("Magic", "Johnson"), "Los Angeles Lakers");
magic.setAssistsPerGame(11);
magic.setPointsPerGame(19);
magic.setReboundsPerGame(7);
magic.setHighestScore(56);
kobe = new Player(new Name("Kobe", "Bryant"), "Los Angeles Lakers");
kobe.setAssistsPerGame(5);
kobe.setPointsPerGame(25);
kobe.setReboundsPerGame(5);
kobe.setHighestScore(81);
duncan = new Player(new Name("Tim", "Duncan"), "San Antonio Spurs");
duncan.setAssistsPerGame(3);
duncan.setPointsPerGame(19);
duncan.setReboundsPerGame(11);
duncan.setHighestScore(53);
players = newArrayList(jordan, magic, kobe, duncan);
}
}
| WithPlayerData |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/h12/CompressibleEncoder.java | {
"start": 1179,
"end": 1992
} | class ____ implements HttpMessageEncoder {
private final HttpMessageEncoder delegate;
private Compressor compressor = Compressor.NONE;
public CompressibleEncoder(HttpMessageEncoder delegate) {
this.delegate = delegate;
}
public void setCompressor(Compressor compressor) {
this.compressor = compressor;
}
public void encode(OutputStream outputStream, Object data, Charset charset) throws EncodeException {
delegate.encode(compressor.decorate(outputStream), data, charset);
}
public void encode(OutputStream outputStream, Object[] data, Charset charset) throws EncodeException {
delegate.encode(outputStream, data, charset);
}
@Override
public MediaType mediaType() {
return delegate.mediaType();
}
}
| CompressibleEncoder |
java | alibaba__nacos | plugin-default-impl/nacos-default-auth-plugin/src/main/java/com/alibaba/nacos/plugin/auth/impl/roles/NacosRoleServiceRemoteImpl.java | {
"start": 1884,
"end": 12783
} | class ____ extends AbstractCheckedRoleService implements NacosRoleService {
private static final Logger LOGGER = LoggerFactory.getLogger(NacosRoleServiceRemoteImpl.class);
private final NacosRestTemplate nacosRestTemplate;
private final AuthConfigs authConfigs;
public NacosRoleServiceRemoteImpl(AuthConfigs authConfigs) {
super(authConfigs);
this.authConfigs = authConfigs;
this.nacosRestTemplate = new DefaultHttpClientFactory(LOGGER).createNacosRestTemplate();
}
@Override
public void addPermission(String role, String resource, String action) {
Map<String, String> body = Map.of("role", role, "resource", resource, "action", action);
try {
HttpRestResult<String> result = nacosRestTemplate.postForm(
buildRemotePermissionUrlPath(AuthConstants.PERMISSION_PATH),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), null, body, String.class);
RemoteServerUtil.singleCheckResult(result);
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public void deletePermission(String role, String resource, String action) {
Query query = Query.newInstance().addParam("role", role).addParam("resource", resource)
.addParam("action", action);
try {
HttpRestResult<String> result = nacosRestTemplate.delete(
buildRemotePermissionUrlPath(AuthConstants.PERMISSION_PATH),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(result);
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public List<PermissionInfo> getPermissions(String role) {
if (getCachedPermissionInfoMap().containsKey(role)) {
return getCachedPermissionInfoMap().get(role);
}
reload();
return getCachedPermissionInfoMap().get(role);
}
@Override
public Page<PermissionInfo> getPermissions(String role, int pageNo, int pageSize) {
Query query = Query.newInstance().addParam("role", role).addParam("pageNo", pageNo)
.addParam("pageSize", pageSize).addParam("search", "accurate");
return getPermissionInfoPageFromRemote(query);
}
@Override
public Page<PermissionInfo> findPermissions(String role, int pageNo, int pageSize) {
Query query = Query.newInstance().addParam("role", role).addParam("pageNo", pageNo)
.addParam("pageSize", pageSize).addParam("search", "blur");
return getPermissionInfoPageFromRemote(query);
}
@Override
public List<RoleInfo> getRoles(String username) {
if (getCachedRoleInfoMap().containsKey(username)) {
return getCachedRoleInfoMap().get(username);
}
reload();
return getCachedRoleInfoMap().get(username);
}
@Override
public Page<RoleInfo> getRoles(String username, String role, int pageNo, int pageSize) {
Query query = Query.newInstance().addParam("username", username).addParam("role", role)
.addParam("pageNo", pageNo).addParam("pageSize", pageSize).addParam("search", "accurate");
return getRoleInfoPageFromRemote(query);
}
@Override
public Page<RoleInfo> findRoles(String username, String role, int pageNo, int pageSize) {
Query query = Query.newInstance().addParam("username", username).addParam("role", role)
.addParam("pageNo", pageNo).addParam("pageSize", pageSize).addParam("search", "blur");
return getRoleInfoPageFromRemote(query);
}
@Override
public List<String> findRoleNames(String role) {
Query query = Query.newInstance().addParam("role", role);
try {
HttpRestResult<String> httpResult = nacosRestTemplate.get(
buildRemoteRoleUrlPath(AuthConstants.ROLE_PATH + "/search"),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(httpResult);
Result<List<String>> result = JacksonUtils.toObj(httpResult.getData(), new TypeReference<>() {
});
return result.getData();
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public List<RoleInfo> getAllRoles() {
return getRoles(StringUtils.EMPTY, StringUtils.EMPTY, DEFAULT_PAGE_NO, Integer.MAX_VALUE).getPageItems();
}
@Override
public void addRole(String role, String username) {
if (AuthConstants.GLOBAL_ADMIN_ROLE.equals(role)) {
throw new IllegalArgumentException(
"role '" + AuthConstants.GLOBAL_ADMIN_ROLE + "' is not permitted to create!");
}
Map<String, String> body = Map.of("role", role, "username", username);
try {
HttpRestResult<String> httpResult = nacosRestTemplate.postForm(
buildRemoteRoleUrlPath(AuthConstants.ROLE_PATH),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), body, String.class);
RemoteServerUtil.singleCheckResult(httpResult);
getCachedRoleSet().add(role);
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public void deleteRole(String role, String userName) {
if (AuthConstants.GLOBAL_ADMIN_ROLE.equals(role)) {
throw new IllegalArgumentException(
"role '" + AuthConstants.GLOBAL_ADMIN_ROLE + "' is not permitted to delete!");
}
Query query = Query.newInstance().addParam("role", role).addParam("userName", userName);
try {
HttpRestResult<String> result = nacosRestTemplate.delete(buildRemoteRoleUrlPath(AuthConstants.ROLE_PATH),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(result);
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public void deleteRole(String role) {
if (AuthConstants.GLOBAL_ADMIN_ROLE.equals(role)) {
throw new IllegalArgumentException(
"role '" + AuthConstants.GLOBAL_ADMIN_ROLE + "' is not permitted to delete!");
}
Query query = Query.newInstance().addParam("role", role);
try {
HttpRestResult<String> result = nacosRestTemplate.delete(buildRemoteRoleUrlPath(AuthConstants.ROLE_PATH),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(result);
getCachedRoleSet().remove(role);
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
@Override
public void addAdminRole(String username) {
// if has global admin role, means already synced admin role to console cached.
if (hasGlobalAdminRole()) {
return;
}
// No need to call add admin role. In {@link NacosUserServiceRemoteImpl#createUser},
// it will call create admin role which include add admin role operation.
getCachedRoleSet().add(AuthConstants.GLOBAL_ADMIN_ROLE);
authConfigs.setHasGlobalAdminRole(true);
}
private String buildRemotePermissionUrlPath(String apiPath) {
return RequestUrlConstants.HTTP_PREFIX + RemoteServerUtil.getOneNacosServerAddress()
+ RemoteServerUtil.getRemoteServerContextPath() + apiPath;
}
private Page<PermissionInfo> getPermissionInfoPageFromRemote(Query query) {
try {
HttpRestResult<String> httpResult = nacosRestTemplate.get(
buildRemotePermissionUrlPath(AuthConstants.PERMISSION_PATH + "/list"),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(httpResult);
Result<Page<PermissionInfo>> result = JacksonUtils.toObj(httpResult.getData(), new TypeReference<>() {
});
return result.getData();
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
private String buildRemoteRoleUrlPath(String apiPath) {
return RequestUrlConstants.HTTP_PREFIX + RemoteServerUtil.getOneNacosServerAddress()
+ RemoteServerUtil.getRemoteServerContextPath() + apiPath;
}
private Page<RoleInfo> getRoleInfoPageFromRemote(Query query) {
try {
HttpRestResult<String> httpResult = nacosRestTemplate.get(
buildRemoteRoleUrlPath(AuthConstants.ROLE_PATH + "/list"),
RemoteServerUtil.buildServerRemoteHeader(authConfigs), query, String.class);
RemoteServerUtil.singleCheckResult(httpResult);
Result<Page<RoleInfo>> result = JacksonUtils.toObj(httpResult.getData(), new TypeReference<>() {
});
return result.getData();
} catch (NacosException e) {
throw new NacosRuntimeException(e.getErrCode(), e.getErrMsg());
} catch (Exception unpectedException) {
throw new NacosRuntimeException(NacosException.SERVER_ERROR, unpectedException.getMessage());
}
}
}
| NacosRoleServiceRemoteImpl |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/MdcAwareExecutorService.java | {
"start": 1312,
"end": 3757
} | class ____<S extends ExecutorService> extends MdcAwareExecutor<S>
implements ExecutorService {
public MdcAwareExecutorService(S delegate, Map<String, String> contextData) {
super(delegate, contextData);
}
@Override
public void shutdown() {
delegate.shutdown();
}
@Override
public List<Runnable> shutdownNow() {
return delegate.shutdownNow();
}
@Override
public boolean isShutdown() {
return delegate.isShutdown();
}
@Override
public boolean isTerminated() {
return delegate.isTerminated();
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return delegate.awaitTermination(timeout, unit);
}
@Override
public <T> Future<T> submit(Callable<T> task) {
return delegate.submit(wrapCallable(contextData, task));
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
return delegate.submit(wrapRunnable(contextData, task), result);
}
@Override
public Future<?> submit(Runnable task) {
return delegate.submit(wrapRunnable(contextData, task));
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
throws InterruptedException {
return delegate.invokeAll(wrapCallables(tasks));
}
@Override
public <T> List<Future<T>> invokeAll(
Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException {
return delegate.invokeAll(wrapCallables(tasks), timeout, unit);
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws InterruptedException, ExecutionException {
return delegate.invokeAny(wrapCallables(tasks));
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return delegate.invokeAny(wrapCallables(tasks), timeout, unit);
}
private <T> List<Callable<T>> wrapCallables(Collection<? extends Callable<T>> tasks) {
List<Callable<T>> list = new ArrayList<>(tasks.size());
for (Callable<T> task : tasks) {
list.add(wrapCallable(contextData, task));
}
return list;
}
}
| MdcAwareExecutorService |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtDateClaimValidatorTests.java | {
"start": 798,
"end": 6703
} | class ____ extends ESTestCase {
private Clock clock;
@Before
public void init() {
clock = mock(Clock.class);
}
public void testClaimIsNotDate() throws ParseException {
final String claimName = randomAlphaOfLengthBetween(10, 18);
final JwtDateClaimValidator validator = new JwtDateClaimValidator(
clock,
claimName,
TimeValue.ZERO,
randomFrom(JwtDateClaimValidator.Relationship.values()),
randomBoolean()
);
final JWTClaimsSet jwtClaimsSet = JWTClaimsSet.parse(Map.of(claimName, randomAlphaOfLengthBetween(3, 8)));
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> validator.validate(getJwsHeader(), jwtClaimsSet)
);
assertThat(e.getMessage(), containsString("cannot parse date claim"));
assertThat(e.getCause(), instanceOf(ParseException.class));
}
public void testClaimDoesNotExist() throws ParseException {
final String claimName = randomFrom(randomAlphaOfLengthBetween(3, 8), "iat", "nbf", "auth_time");
final JwtDateClaimValidator validator = new JwtDateClaimValidator(
clock,
claimName,
TimeValue.ZERO,
randomFrom(JwtDateClaimValidator.Relationship.values()),
false
);
final JWTClaimsSet jwtClaimsSet = JWTClaimsSet.parse(Map.of());
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> validator.validate(getJwsHeader(), jwtClaimsSet)
);
assertThat(e.getMessage(), containsString("missing required date claim"));
}
public void testClaimDoesNotExistIsOKWhenAllowNullIsTrue() throws ParseException {
final String claimName = randomFrom(randomAlphaOfLengthBetween(3, 8), "iat", "nbf", "auth_time");
final JwtDateClaimValidator validator = new JwtDateClaimValidator(
clock,
claimName,
TimeValue.ZERO,
randomFrom(JwtDateClaimValidator.Relationship.values()),
true
);
final JWTClaimsSet jwtClaimsSet = JWTClaimsSet.parse(Map.of());
try {
validator.validate(getJwsHeader(), jwtClaimsSet);
} catch (Exception e) {
throw new AssertionError("validation should have passed without exception", e);
}
}
public void testBeforeNow() throws ParseException {
final String claimName = randomFrom(randomAlphaOfLengthBetween(10, 18), "iat", "nbf", "auth_time");
final long allowedSkewInSeconds = randomLongBetween(0, 300);
final JwtDateClaimValidator validator = new JwtDateClaimValidator(
clock,
claimName,
TimeValue.timeValueSeconds(allowedSkewInSeconds),
JwtDateClaimValidator.Relationship.BEFORE_NOW,
false
);
final Instant now = Instant.now();
when(clock.instant()).thenReturn(now);
final Instant before = now.minusSeconds(randomLongBetween(1 - allowedSkewInSeconds, 600));
try {
validator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, before.getEpochSecond())));
} catch (Exception e) {
throw new AssertionError("validation should have passed without exception", e);
}
final Instant after = now.plusSeconds(randomLongBetween(1 + allowedSkewInSeconds, 600));
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> validator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, after.getEpochSecond())))
);
assertThat(
e.getMessage(),
containsString(
"date claim ["
+ claimName
+ "] value ["
+ after.getEpochSecond() * 1000
+ "] must be before now ["
+ now.toEpochMilli()
+ "]"
)
);
}
public void testAfterNow() throws ParseException {
final String claimName = randomFrom(randomAlphaOfLengthBetween(10, 18), "exp");
final long allowedSkewInSeconds = randomLongBetween(0, 300);
final JwtDateClaimValidator validator = new JwtDateClaimValidator(
clock,
claimName,
TimeValue.timeValueSeconds(allowedSkewInSeconds),
JwtDateClaimValidator.Relationship.AFTER_NOW,
false
);
final Instant now = Instant.now();
when(clock.instant()).thenReturn(now);
final Instant before = now.minusSeconds(randomLongBetween(1 + allowedSkewInSeconds, 600));
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> validator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, before.getEpochSecond())))
);
assertThat(
e.getMessage(),
containsString(
"date claim ["
+ claimName
+ "] value ["
+ before.getEpochSecond() * 1000
+ "] must be after now ["
+ now.toEpochMilli()
+ "]"
)
);
final Instant after = now.plusSeconds(randomLongBetween(1 - allowedSkewInSeconds, 600));
try {
validator.validate(getJwsHeader(), JWTClaimsSet.parse(Map.of(claimName, after.getEpochSecond())));
} catch (Exception exception) {
throw new AssertionError("validation should have passed without exception", e);
}
}
private JWSHeader getJwsHeader() throws ParseException {
return JWSHeader.parse(Map.of("alg", randomAlphaOfLengthBetween(3, 8)));
}
}
| JwtDateClaimValidatorTests |
java | apache__spark | launcher/src/test/java/org/apache/spark/launcher/InProcessLauncherSuite.java | {
"start": 4696,
"end": 4997
} | class ____ extends InProcessLauncher {
@Override
Method findSparkSubmit() throws IOException {
try {
return InProcessLauncherSuite.class.getMethod("runTest", String[].class);
} catch (Exception e) {
throw new IOException(e);
}
}
}
}
| TestInProcessLauncher |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Log4jWarningErrorMetricsAppender.java | {
"start": 1700,
"end": 13209
} | class ____ implements Comparable<PurgeElement> {
String message;
Long timestamp;
PurgeElement(String message, Long timestamp) {
this.message = message;
this.timestamp = timestamp;
}
public int compareTo(PurgeElement e) {
if (e == null) {
throw new NullPointerException("Null element passed to compareTo");
}
int ret = this.timestamp.compareTo(e.timestamp);
if (ret != 0) {
return ret;
}
return this.message.compareTo(e.message);
}
@Override
public boolean equals(Object e) {
if (!(e instanceof PurgeElement)) {
return false;
}
if (e == this) {
return true;
}
PurgeElement el = (PurgeElement) e;
return (this.message.equals(el.message))
&& (this.timestamp.equals(el.timestamp));
}
@Override
public int hashCode() {
return this.timestamp.hashCode();
}
}
Map<String, SortedMap<Long, Integer>> errors;
Map<String, SortedMap<Long, Integer>> warnings;
SortedMap<Long, Integer> errorsTimestampCount;
SortedMap<Long, Integer> warningsTimestampCount;
SortedSet<PurgeElement> errorsPurgeInformation;
SortedSet<PurgeElement> warningsPurgeInformation;
Timer cleanupTimer;
long cleanupInterval;
long messageAgeLimitSeconds;
int maxUniqueMessages;
final Object lock = new Object();
/**
* Create an appender to keep track of the errors and warnings logged by the
* system. It will keep purge messages older than 2 days. It will store upto
* the last 500 unique errors and the last 500 unique warnings. The thread to
* purge message will run every 5 minutes, unless the 500 message limit is hit
* earlier.
*/
public Log4jWarningErrorMetricsAppender() {
this(5 * 60, 24 * 60 * 60, 250);
}
/**
* Create an appender to keep track of the errors and warnings logged by the
* system.
*
* @param cleanupIntervalSeconds
* the interval at which old messages are purged to prevent the
* message stores from growing unbounded
* @param messageAgeLimitSeconds
* the maximum age of a message in seconds before it is purged from
* the store
* @param maxUniqueMessages
* the maximum number of unique messages of each type we keep before
* we start purging
*/
public Log4jWarningErrorMetricsAppender(int cleanupIntervalSeconds,
long messageAgeLimitSeconds, int maxUniqueMessages) {
super();
errors = new HashMap<>();
warnings = new HashMap<>();
errorsTimestampCount = new TreeMap<>();
warningsTimestampCount = new TreeMap<>();
errorsPurgeInformation = new TreeSet<>();
warningsPurgeInformation = new TreeSet<>();
cleanupTimer = new Timer();
cleanupInterval = cleanupIntervalSeconds * 1000;
cleanupTimer.schedule(new ErrorAndWarningsCleanup(), cleanupInterval);
this.messageAgeLimitSeconds = messageAgeLimitSeconds;
this.maxUniqueMessages = maxUniqueMessages;
this.setName(LOG_METRICS_APPENDER);
this.setThreshold(Level.WARN);
}
/**
* {@inheritDoc}
*/
@Override
protected void append(LoggingEvent event) {
String message = event.getRenderedMessage();
String[] throwableStr = event.getThrowableStrRep();
if (throwableStr != null) {
message = message + "\n" + StringUtils.join("\n", throwableStr);
message =
org.apache.commons.lang3.StringUtils.left(message, MAX_MESSAGE_SIZE);
}
int level = event.getLevel().toInt();
if (level == Level.WARN_INT || level == Level.ERROR_INT) {
// store second level information
Long eventTimeSeconds = event.getTimeStamp() / 1000;
Map<String, SortedMap<Long, Integer>> map;
SortedMap<Long, Integer> timestampsCount;
SortedSet<PurgeElement> purgeInformation;
if (level == Level.WARN_INT) {
map = warnings;
timestampsCount = warningsTimestampCount;
purgeInformation = warningsPurgeInformation;
} else {
map = errors;
timestampsCount = errorsTimestampCount;
purgeInformation = errorsPurgeInformation;
}
updateMessageDetails(message, eventTimeSeconds, map, timestampsCount,
purgeInformation);
}
}
private void updateMessageDetails(String message, Long eventTimeSeconds,
Map<String, SortedMap<Long, Integer>> map,
SortedMap<Long, Integer> timestampsCount,
SortedSet<PurgeElement> purgeInformation) {
synchronized (lock) {
if (map.containsKey(message)) {
SortedMap<Long, Integer> tmp = map.get(message);
Long lastMessageTime = tmp.lastKey();
int value = 1;
if (tmp.containsKey(eventTimeSeconds)) {
value = tmp.get(eventTimeSeconds) + 1;
}
tmp.put(eventTimeSeconds, value);
purgeInformation.remove(new PurgeElement(message, lastMessageTime));
} else {
SortedMap<Long, Integer> value = new TreeMap<>();
value.put(eventTimeSeconds, 1);
map.put(message, value);
if (map.size() > maxUniqueMessages * 2) {
cleanupTimer.cancel();
cleanupTimer = new Timer();
cleanupTimer.schedule(new ErrorAndWarningsCleanup(), 0);
}
}
purgeInformation.add(new PurgeElement(message, eventTimeSeconds));
int newValue = 1;
if (timestampsCount.containsKey(eventTimeSeconds)) {
newValue = timestampsCount.get(eventTimeSeconds) + 1;
}
timestampsCount.put(eventTimeSeconds, newValue);
}
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
cleanupTimer.cancel();
}
/**
* {@inheritDoc}
*/
@Override
public boolean requiresLayout() {
return false;
}
/**
* Get the counts of errors in the time periods provided. Note that the counts
* provided by this function may differ from the ones provided by
* getErrorMessagesAndCounts since the message store is purged at regular
* intervals to prevent it from growing without bounds, while the store for
* the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of error counts in the time periods corresponding to cutoffs
*/
public List<Integer> getErrorCounts(List<Long> cutoffs) {
return this.getCounts(errorsTimestampCount, cutoffs);
}
/**
* Get the counts of warnings in the time periods provided. Note that the
* counts provided by this function may differ from the ones provided by
* getWarningMessagesAndCounts since the message store is purged at regular
* intervals to prevent it from growing without bounds, while the store for
* the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of warning counts in the time periods corresponding to cutoffs
*/
public List<Integer> getWarningCounts(List<Long> cutoffs) {
return this.getCounts(warningsTimestampCount, cutoffs);
}
private List<Integer> getCounts(SortedMap<Long, Integer> map,
List<Long> cutoffs) {
List<Integer> ret = new ArrayList<>();
Long largestCutoff = Collections.min(cutoffs);
for (int i = 0; i < cutoffs.size(); ++i) {
ret.add(0);
}
synchronized (lock) {
Map<Long, Integer> submap = map.tailMap(largestCutoff);
for (Map.Entry<Long, Integer> entry : submap.entrySet()) {
for (int i = 0; i < cutoffs.size(); ++i) {
if (entry.getKey() >= cutoffs.get(i)) {
int tmp = ret.get(i);
ret.set(i, tmp + entry.getValue());
}
}
}
}
return ret;
}
/**
* Get the errors and the number of occurrences for each of the errors for the
* time cutoffs provided. Note that the counts provided by this function may
* differ from the ones provided by getErrorCounts since the message store is
* purged at regular intervals to prevent it from growing without bounds,
* while the store for the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of maps corresponding for each cutoff provided; each map
* contains the error and the number of times the error occurred in
* the time period
*/
public List<Map<String, Element>>
getErrorMessagesAndCounts(List<Long> cutoffs) {
return this.getElementsAndCounts(errors, cutoffs, errorsPurgeInformation);
}
/**
* Get the warning and the number of occurrences for each of the warnings for
* the time cutoffs provided. Note that the counts provided by this function
* may differ from the ones provided by getWarningCounts since the message
* store is purged at regular intervals to prevent it from growing without
* bounds, while the store for the counts is purged less frequently.
*
* @param cutoffs
* list of timestamp cutoffs(in seconds) for which the counts are
* desired
* @return list of maps corresponding for each cutoff provided; each map
* contains the warning and the number of times the error occurred in
* the time period
*/
public List<Map<String, Element>> getWarningMessagesAndCounts(
List<Long> cutoffs) {
return this.getElementsAndCounts(warnings, cutoffs, warningsPurgeInformation);
}
private List<Map<String, Element>> getElementsAndCounts(
Map<String, SortedMap<Long, Integer>> map, List<Long> cutoffs,
SortedSet<PurgeElement> purgeInformation) {
if (purgeInformation.size() > maxUniqueMessages) {
ErrorAndWarningsCleanup cleanup = new ErrorAndWarningsCleanup();
long cutoff = Time.now() - (messageAgeLimitSeconds * 1000);
cutoff = (cutoff / 1000);
cleanup.cleanupMessages(map, purgeInformation, cutoff, maxUniqueMessages);
}
List<Map<String, Element>> ret = new ArrayList<>(cutoffs.size());
for (int i = 0; i < cutoffs.size(); ++i) {
ret.add(new HashMap<String, Element>());
}
synchronized (lock) {
for (Map.Entry<String, SortedMap<Long, Integer>> element : map.entrySet()) {
for (int i = 0; i < cutoffs.size(); ++i) {
Map<String, Element> retMap = ret.get(i);
SortedMap<Long, Integer> qualifyingTimes =
element.getValue().tailMap(cutoffs.get(i));
long count = 0;
for (Map.Entry<Long, Integer> entry : qualifyingTimes.entrySet()) {
count += entry.getValue();
}
if (!qualifyingTimes.isEmpty()) {
retMap.put(element.getKey(),
new Element(count, qualifyingTimes.lastKey()));
}
}
}
}
return ret;
}
// getters and setters for log4j
public long getCleanupInterval() {
return cleanupInterval;
}
public void setCleanupInterval(long cleanupInterval) {
this.cleanupInterval = cleanupInterval;
}
public long getMessageAgeLimitSeconds() {
return messageAgeLimitSeconds;
}
public void setMessageAgeLimitSeconds(long messageAgeLimitSeconds) {
this.messageAgeLimitSeconds = messageAgeLimitSeconds;
}
public int getMaxUniqueMessages() {
return maxUniqueMessages;
}
public void setMaxUniqueMessages(int maxUniqueMessages) {
this.maxUniqueMessages = maxUniqueMessages;
}
| PurgeElement |
java | google__dagger | javatests/artifacts/dagger-android/simple/app/src/sharedTest/java/dagger/android/simple/SimpleActivityTest.java | {
"start": 1519,
"end": 2106
} | class ____ {
@Test
public void testActivityInject() throws Exception {
try (ActivityScenario<SimpleActivity> scenario =
ActivityScenario.launch(SimpleActivity.class)) {
scenario.onActivity(
activity -> {
onView(withId(R.id.greeting))
.check(matches(withText("Hello, ProdUser! You are on build robolectric.")));
});
}
}
@Test
public void verifyApplicationInstance() {
assertThat((Context) ApplicationProvider.getApplicationContext())
.isInstanceOf(SimpleApplication.class);
}
}
| SimpleActivityTest |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/logout/BaseOpenSamlLogoutRequestValidator.java | {
"start": 1859,
"end": 6351
} | class ____ implements Saml2LogoutRequestValidator {
static {
OpenSamlInitializationService.initialize();
}
private final OpenSamlOperations saml;
BaseOpenSamlLogoutRequestValidator(OpenSamlOperations saml) {
this.saml = saml;
}
/**
* {@inheritDoc}
*/
@Override
public Saml2LogoutValidatorResult validate(Saml2LogoutRequestValidatorParameters parameters) {
Saml2LogoutRequest request = parameters.getLogoutRequest();
RelyingPartyRegistration registration = parameters.getRelyingPartyRegistration();
Authentication authentication = parameters.getAuthentication();
LogoutRequest logoutRequest = this.saml.deserialize(Saml2Utils.withEncoded(request.getSamlRequest())
.inflate(request.getBinding() == Saml2MessageBinding.REDIRECT)
.decode());
return Saml2LogoutValidatorResult.withErrors()
.errors(verifySignature(request, logoutRequest, registration))
.errors(validateRequest(logoutRequest, registration, authentication))
.build();
}
private Consumer<Collection<Saml2Error>> verifySignature(Saml2LogoutRequest request, LogoutRequest logoutRequest,
RelyingPartyRegistration registration) {
AssertingPartyMetadata details = registration.getAssertingPartyMetadata();
Collection<Saml2X509Credential> credentials = details.getVerificationX509Credentials();
VerificationConfigurer verify = this.saml.withVerificationKeys(credentials).entityId(details.getEntityId());
return (errors) -> {
if (logoutRequest.isSigned()) {
errors.addAll(verify.verify(logoutRequest));
}
else {
RedirectParameters params = new RedirectParameters(request.getParameters(),
request.getParametersQuery(), logoutRequest);
errors.addAll(verify.verify(params));
}
};
}
private Consumer<Collection<Saml2Error>> validateRequest(LogoutRequest request,
RelyingPartyRegistration registration, Authentication authentication) {
return (errors) -> {
validateIssuer(request, registration).accept(errors);
validateDestination(request, registration).accept(errors);
validateSubject(request, registration, authentication).accept(errors);
};
}
private Consumer<Collection<Saml2Error>> validateIssuer(LogoutRequest request,
RelyingPartyRegistration registration) {
return (errors) -> {
if (request.getIssuer() == null) {
errors.add(new Saml2Error(Saml2ErrorCodes.INVALID_ISSUER, "Failed to find issuer in LogoutRequest"));
return;
}
String issuer = request.getIssuer().getValue();
if (!issuer.equals(registration.getAssertingPartyMetadata().getEntityId())) {
errors
.add(new Saml2Error(Saml2ErrorCodes.INVALID_ISSUER, "Failed to match issuer to configured issuer"));
}
};
}
private Consumer<Collection<Saml2Error>> validateDestination(LogoutRequest request,
RelyingPartyRegistration registration) {
return (errors) -> {
if (request.getDestination() == null) {
errors.add(new Saml2Error(Saml2ErrorCodes.INVALID_DESTINATION,
"Failed to find destination in LogoutRequest"));
return;
}
String destination = request.getDestination();
if (!destination.equals(registration.getSingleLogoutServiceLocation())) {
errors.add(new Saml2Error(Saml2ErrorCodes.INVALID_DESTINATION,
"Failed to match destination to configured destination"));
}
};
}
private Consumer<Collection<Saml2Error>> validateSubject(LogoutRequest request,
RelyingPartyRegistration registration, Authentication authentication) {
return (errors) -> {
if (authentication == null) {
return;
}
NameID nameId = getNameId(request, registration);
if (nameId == null) {
errors
.add(new Saml2Error(Saml2ErrorCodes.SUBJECT_NOT_FOUND, "Failed to find subject in LogoutRequest"));
return;
}
validateNameId(nameId, authentication, errors);
};
}
private NameID getNameId(LogoutRequest request, RelyingPartyRegistration registration) {
this.saml.withDecryptionKeys(registration.getDecryptionX509Credentials()).decrypt(request);
return request.getNameID();
}
private void validateNameId(NameID nameId, Authentication authentication, Collection<Saml2Error> errors) {
String name = (authentication.getCredentials() instanceof Saml2ResponseAssertionAccessor assertion)
? assertion.getNameId() : authentication.getName();
if (!nameId.getValue().equals(name)) {
errors.add(new Saml2Error(Saml2ErrorCodes.INVALID_REQUEST,
"Failed to match subject in LogoutRequest with currently logged in user"));
}
}
}
| BaseOpenSamlLogoutRequestValidator |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpRequest.java | {
"start": 10135,
"end": 16582
} | class ____ {
public static HttpRequest parse(XContentParser parser) throws IOException {
Builder builder = new Builder();
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (Field.PROXY.match(currentFieldName, parser.getDeprecationHandler())) {
try {
builder.proxy(HttpProxy.parse(parser));
} catch (Exception e) {
throw new ElasticsearchParseException("could not parse http request. could not parse [{}] field", currentFieldName);
}
} else if (Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.auth(BasicAuth.parse(parser));
} else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue()));
} else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Users and 2.x specify the timeout this way
try {
builder.connectionTimeout(
WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.CONNECTION_TIMEOUT.toString())
);
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse http request template. invalid time value for [{}] field",
pe,
currentFieldName
);
}
} else if (HttpRequest.Field.READ_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.readTimeout(TimeValue.timeValueMillis(parser.longValue()));
} else if (HttpRequest.Field.READ_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
// Users and 2.x specify the timeout this way
try {
builder.readTimeout(WatcherDateTimeUtils.parseTimeValue(parser, HttpRequest.Field.READ_TIMEOUT.toString()));
} catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException(
"could not parse http request template. invalid time value for [{}] field",
pe,
currentFieldName
);
}
} else if (token == XContentParser.Token.START_OBJECT) {
@SuppressWarnings({ "unchecked", "rawtypes" })
final Map<String, String> headers = (Map) WatcherUtils.flattenModel(parser.map());
if (Field.HEADERS.match(currentFieldName, parser.getDeprecationHandler())) {
builder.setHeaders(headers);
} else if (Field.PARAMS.match(currentFieldName, parser.getDeprecationHandler())) {
builder.setParams(headers);
} else if (Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) {
builder.body(parser.text());
} else {
throw new ElasticsearchParseException(
"could not parse http request. unexpected object field [{}]",
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (Field.SCHEME.match(currentFieldName, parser.getDeprecationHandler())) {
builder.scheme(Scheme.parse(parser.text()));
} else if (Field.METHOD.match(currentFieldName, parser.getDeprecationHandler())) {
builder.method(HttpMethod.parse(parser.text()));
} else if (Field.HOST.match(currentFieldName, parser.getDeprecationHandler())) {
builder.host = parser.text();
} else if (Field.PATH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.path(parser.text());
} else if (Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) {
builder.body(parser.text());
} else if (Field.URL.match(currentFieldName, parser.getDeprecationHandler())) {
builder.fromUrl(parser.text());
} else {
throw new ElasticsearchParseException(
"could not parse http request. unexpected string field [{}]",
currentFieldName
);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (Field.PORT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.port = parser.intValue();
} else {
throw new ElasticsearchParseException(
"could not parse http request. unexpected numeric field [{}]",
currentFieldName
);
}
} else {
throw new ElasticsearchParseException("could not parse http request. unexpected token [{}]", token);
}
}
if (builder.host == null) {
throw new ElasticsearchParseException(
"could not parse http request. missing required [{}] field",
Field.HOST.getPreferredName()
);
}
if (builder.port < 0) {
throw new ElasticsearchParseException(
"could not parse http request. missing required [{}] field",
Field.PORT.getPreferredName()
);
}
return builder.build();
}
}
public static | Parser |
java | resilience4j__resilience4j | resilience4j-timelimiter/src/main/java/io/github/resilience4j/timelimiter/TimeLimiter.java | {
"start": 866,
"end": 8040
} | interface ____ {
String DEFAULT_NAME = "UNDEFINED";
/**
* Creates a TimeLimiter decorator with a default TimeLimiterConfig configuration.
*
* @return The {@link TimeLimiter}
*/
static TimeLimiter ofDefaults() {
return new TimeLimiterImpl(DEFAULT_NAME, TimeLimiterConfig.ofDefaults());
}
/**
* Creates a TimeLimiter decorator with a default TimeLimiterConfig configuration.
*
* @return The {@link TimeLimiter}
*/
static TimeLimiter ofDefaults(String name) {
return new TimeLimiterImpl(name, TimeLimiterConfig.ofDefaults());
}
/**
* Creates a TimeLimiter decorator with a TimeLimiterConfig configuration.
*
* @param timeLimiterConfig the TimeLimiterConfig
* @return The {@link TimeLimiter}
*/
static TimeLimiter of(TimeLimiterConfig timeLimiterConfig) {
return of(DEFAULT_NAME, timeLimiterConfig);
}
/**
* Creates a TimeLimiter decorator with a TimeLimiterConfig configuration.
*
* @param name the name of the TimeLimiter
* @param timeLimiterConfig the TimeLimiterConfig
* @return The {@link TimeLimiter}
*/
static TimeLimiter of(String name, TimeLimiterConfig timeLimiterConfig) {
return new TimeLimiterImpl(name, timeLimiterConfig);
}
/**
* Creates a TimeLimiter with a custom TimeLimiter configuration.
* <p>
* The {@code tags} passed will be appended to the tags already configured for the registry.
* When tags (keys) of the two collide the tags passed with this method will override the tags
* of the registry.
*
* @param name the name of the TimeLimiter
* @param timeLimiterConfig a custom TimeLimiter configuration
* @param tags tags added to the Retry
* @return a TimeLimiter with a custom TimeLimiter configuration.
*/
static TimeLimiter of(String name, TimeLimiterConfig timeLimiterConfig, Map<String, String> tags) {
return new TimeLimiterImpl(name, timeLimiterConfig, tags);
}
/**
* Creates a TimeLimiter decorator with a timeout Duration.
*
* @param timeoutDuration the timeout Duration
* @return The {@link TimeLimiter}
*/
static TimeLimiter of(Duration timeoutDuration) {
TimeLimiterConfig timeLimiterConfig = TimeLimiterConfig.custom()
.timeoutDuration(timeoutDuration)
.build();
return new TimeLimiterImpl(DEFAULT_NAME, timeLimiterConfig);
}
/**
* Creates a Callback that is restricted by a TimeLimiter.
*
* @param timeLimiter the TimeLimiter
* @param futureSupplier the original future supplier
* @param <T> the type of results supplied by the supplier
* @param <F> the future type supplied
* @return a future supplier which is restricted by a {@link TimeLimiter}.
*/
static <T, F extends Future<T>> Callable<T> decorateFutureSupplier(TimeLimiter timeLimiter,
Supplier<F> futureSupplier) {
return timeLimiter.decorateFutureSupplier(futureSupplier);
}
/**
* Decorate a CompletionStage supplier which is decorated by a TimeLimiter
*
* @param timeLimiter the TimeLimiter
* @param scheduler execution service to use to schedule timeout
* @param supplier the original CompletionStage supplier
* @param <T> the type of the returned CompletionStage's result
* @param <F> the CompletionStage type supplied
* @return a CompletionStage supplier which is decorated by a TimeLimiter
*/
static <T, F extends CompletionStage<T>> Supplier<CompletionStage<T>> decorateCompletionStage(
TimeLimiter timeLimiter, ScheduledExecutorService scheduler, Supplier<F> supplier) {
return timeLimiter.decorateCompletionStage(scheduler, supplier);
}
String getName();
/**
* Returns an unmodifiable map with tags assigned to this TimeLimiter.
*
* @return the tags assigned to this TimeLimiter in an unmodifiable map
*/
Map<String, String> getTags();
/**
* Get the TimeLimiterConfig of this TimeLimiter decorator.
*
* @return the TimeLimiterConfig of this TimeLimiter decorator
*/
TimeLimiterConfig getTimeLimiterConfig();
/**
* Decorates and executes the Future Supplier.
*
* @param futureSupplier the original future supplier
* @param <T> the result type of the future
* @param <F> the type of Future
* @return the result of the Future.
* @throws Exception if unable to compute a result
*/
default <T, F extends Future<T>> T executeFutureSupplier(Supplier<F> futureSupplier)
throws Exception {
return decorateFutureSupplier(this, futureSupplier).call();
}
/**
* Decorates and executes the CompletionStage Supplier
*
* @param scheduler execution service to use to schedule timeout
* @param supplier the original CompletionStage supplier
* @param <T> the type of the returned CompletionStage's result
* @param <F> the CompletionStage type supplied
* @return the decorated CompletionStage
*/
default <T, F extends CompletionStage<T>> CompletionStage<T> executeCompletionStage(
ScheduledExecutorService scheduler, Supplier<F> supplier) {
return decorateCompletionStage(this, scheduler, supplier).get();
}
/**
* Creates a Callback that is restricted by a TimeLimiter.
*
* @param futureSupplier the original future supplier
* @param <T> the type of results supplied by the supplier
* @param <F> the future type supplied
* @return a future supplier which is restricted by a {@link TimeLimiter}.
*/
<T, F extends Future<T>> Callable<T> decorateFutureSupplier(Supplier<F> futureSupplier);
/**
* Decorate a CompletionStage supplier which is decorated by a TimeLimiter
*
* @param scheduler execution service to use to schedule timeout
* @param supplier the original CompletionStage supplier
* @param <T> the type of the returned CompletionStage's result
* @param <F> the CompletionStage type supplied
* @return a CompletionStage supplier which is decorated by a TimeLimiter
*/
<T, F extends CompletionStage<T>> Supplier<CompletionStage<T>> decorateCompletionStage(
ScheduledExecutorService scheduler, Supplier<F> supplier);
/**
* Returns an EventPublisher which can be used to register event consumers.
*
* @return an EventPublisher
*/
EventPublisher getEventPublisher();
/**
* Records a successful call.
* <p>
* This method must be invoked when a call was successful.
*/
void onSuccess();
/**
* Records a failed call. This method must be invoked when a call failed.
*
* @param throwable The throwable which must be recorded
*/
void onError(Throwable throwable);
/**
* An EventPublisher which can be used to register event consumers.
*/
| TimeLimiter |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/Plan.java | {
"start": 2493,
"end": 2553
} | interface ____ extends PlanContext, PlanView, PlanEdit {
}
| Plan |
java | elastic__elasticsearch | x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/fst/BytesStore.java | {
"start": 1299,
"end": 17549
} | class ____ extends DataOutput implements Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesStore.class) + RamUsageEstimator
.shallowSizeOfInstance(ArrayList.class);
private final List<byte[]> blocks = new ArrayList<>();
private final int blockSize;
private final int blockBits;
private final int blockMask;
private byte[] current;
private int nextWrite;
BytesStore(int blockBits) {
this.blockBits = blockBits;
blockSize = 1 << blockBits;
blockMask = blockSize - 1;
nextWrite = blockSize;
}
/** Pulls bytes from the provided IndexInput. */
BytesStore(DataInput in, long numBytes, int maxBlockSize) throws IOException {
int blockSize = 2;
int blockBits = 1;
while (blockSize < numBytes && blockSize < maxBlockSize) {
blockSize *= 2;
blockBits++;
}
this.blockBits = blockBits;
this.blockSize = blockSize;
this.blockMask = blockSize - 1;
long left = numBytes;
while (left > 0) {
final int chunk = (int) Math.min(blockSize, left);
byte[] block = new byte[chunk];
in.readBytes(block, 0, block.length);
blocks.add(block);
left -= chunk;
}
// So .getPosition still works
nextWrite = blocks.get(blocks.size() - 1).length;
}
/** Absolute write byte; you must ensure dest is < max position written so far. */
public void writeByte(long dest, byte b) {
int blockIndex = (int) (dest >> blockBits);
byte[] block = blocks.get(blockIndex);
block[(int) (dest & blockMask)] = b;
}
@Override
public void writeByte(byte b) {
if (nextWrite == blockSize) {
current = new byte[blockSize];
blocks.add(current);
nextWrite = 0;
}
current[nextWrite++] = b;
}
@Override
public void writeBytes(byte[] b, int offset, int len) {
while (len > 0) {
int chunk = blockSize - nextWrite;
if (len <= chunk) {
assert b != null;
assert current != null;
System.arraycopy(b, offset, current, nextWrite, len);
nextWrite += len;
break;
} else {
if (chunk > 0) {
System.arraycopy(b, offset, current, nextWrite, chunk);
offset += chunk;
len -= chunk;
}
current = new byte[blockSize];
blocks.add(current);
nextWrite = 0;
}
}
}
int getBlockBits() {
return blockBits;
}
/**
* Absolute writeBytes without changing the current position. Note: this cannot "grow" the bytes,
* so you must only call it on already written parts.
*/
void writeBytes(long dest, byte[] b, int offset, int len) {
// System.out.println(" BS.writeBytes dest=" + dest + " offset=" + offset + " len=" + len);
assert dest + len <= getPosition() : "dest=" + dest + " pos=" + getPosition() + " len=" + len;
// Note: weird: must go "backwards" because copyBytes
// calls us with overlapping src/dest. If we
// go forwards then we overwrite bytes before we can
// copy them:
/*
int blockIndex = dest >> blockBits;
int upto = dest & blockMask;
byte[] block = blocks.get(blockIndex);
while (len > 0) {
int chunk = blockSize - upto;
System.out.println(" cycle chunk=" + chunk + " len=" + len);
if (len <= chunk) {
System.arraycopy(b, offset, block, upto, len);
break;
} else {
System.arraycopy(b, offset, block, upto, chunk);
offset += chunk;
len -= chunk;
blockIndex++;
block = blocks.get(blockIndex);
upto = 0;
}
}
*/
final long end = dest + len;
int blockIndex = (int) (end >> blockBits);
int downTo = (int) (end & blockMask);
if (downTo == 0) {
blockIndex--;
downTo = blockSize;
}
byte[] block = blocks.get(blockIndex);
while (len > 0) {
// System.out.println(" cycle downTo=" + downTo + " len=" + len);
if (len <= downTo) {
// System.out.println(" final: offset=" + offset + " len=" + len + " dest=" +
// (downTo-len));
System.arraycopy(b, offset, block, downTo - len, len);
break;
} else {
len -= downTo;
// System.out.println(" partial: offset=" + (offset + len) + " len=" + downTo + "
// dest=0");
System.arraycopy(b, offset + len, block, 0, downTo);
blockIndex--;
block = blocks.get(blockIndex);
downTo = blockSize;
}
}
}
/**
* Absolute copy bytes self to self, without changing the position. Note: this cannot "grow" the
* bytes, so must only call it on already written parts.
*/
public void copyBytes(long src, long dest, int len) {
// System.out.println("BS.copyBytes src=" + src + " dest=" + dest + " len=" + len);
assert src < dest;
// Note: weird: must go "backwards" because copyBytes
// calls us with overlapping src/dest. If we
// go forwards then we overwrite bytes before we can
// copy them:
/*
int blockIndex = src >> blockBits;
int upto = src & blockMask;
byte[] block = blocks.get(blockIndex);
while (len > 0) {
int chunk = blockSize - upto;
System.out.println(" cycle: chunk=" + chunk + " len=" + len);
if (len <= chunk) {
writeBytes(dest, block, upto, len);
break;
} else {
writeBytes(dest, block, upto, chunk);
blockIndex++;
block = blocks.get(blockIndex);
upto = 0;
len -= chunk;
dest += chunk;
}
}
*/
long end = src + len;
int blockIndex = (int) (end >> blockBits);
int downTo = (int) (end & blockMask);
if (downTo == 0) {
blockIndex--;
downTo = blockSize;
}
byte[] block = blocks.get(blockIndex);
while (len > 0) {
// System.out.println(" cycle downTo=" + downTo);
if (len <= downTo) {
// System.out.println(" finish");
writeBytes(dest, block, downTo - len, len);
break;
} else {
// System.out.println(" partial");
len -= downTo;
writeBytes(dest + len, block, 0, downTo);
blockIndex--;
block = blocks.get(blockIndex);
downTo = blockSize;
}
}
}
/** Copies bytes from this store to a target byte array. */
public void copyBytes(long src, byte[] dest, int offset, int len) {
int blockIndex = (int) (src >> blockBits);
int upto = (int) (src & blockMask);
byte[] block = blocks.get(blockIndex);
while (len > 0) {
int chunk = blockSize - upto;
if (len <= chunk) {
System.arraycopy(block, upto, dest, offset, len);
break;
} else {
System.arraycopy(block, upto, dest, offset, chunk);
blockIndex++;
block = blocks.get(blockIndex);
upto = 0;
len -= chunk;
offset += chunk;
}
}
}
/** Writes an int at the absolute position without changing the current pointer. */
public void writeInt(long pos, int value) {
int blockIndex = (int) (pos >> blockBits);
int upto = (int) (pos & blockMask);
byte[] block = blocks.get(blockIndex);
int shift = 24;
for (int i = 0; i < 4; i++) {
block[upto++] = (byte) (value >> shift);
shift -= 8;
if (upto == blockSize) {
upto = 0;
blockIndex++;
block = blocks.get(blockIndex);
}
}
}
/** Reverse from srcPos, inclusive, to destPos, inclusive. */
public void reverse(long srcPos, long destPos) {
assert srcPos < destPos;
assert destPos < getPosition();
// System.out.println("reverse src=" + srcPos + " dest=" + destPos);
int srcBlockIndex = (int) (srcPos >> blockBits);
int src = (int) (srcPos & blockMask);
byte[] srcBlock = blocks.get(srcBlockIndex);
int destBlockIndex = (int) (destPos >> blockBits);
int dest = (int) (destPos & blockMask);
byte[] destBlock = blocks.get(destBlockIndex);
// System.out.println(" srcBlock=" + srcBlockIndex + " destBlock=" + destBlockIndex);
int limit = (int) (destPos - srcPos + 1) / 2;
for (int i = 0; i < limit; i++) {
// System.out.println(" cycle src=" + src + " dest=" + dest);
byte b = srcBlock[src];
srcBlock[src] = destBlock[dest];
destBlock[dest] = b;
src++;
if (src == blockSize) {
srcBlockIndex++;
srcBlock = blocks.get(srcBlockIndex);
// System.out.println(" set destBlock=" + destBlock + " srcBlock=" + srcBlock);
src = 0;
}
dest--;
if (dest == -1) {
destBlockIndex--;
destBlock = blocks.get(destBlockIndex);
// System.out.println(" set destBlock=" + destBlock + " srcBlock=" + srcBlock);
dest = blockSize - 1;
}
}
}
public void skipBytes(int len) {
while (len > 0) {
int chunk = blockSize - nextWrite;
if (len <= chunk) {
nextWrite += len;
break;
} else {
len -= chunk;
current = new byte[blockSize];
blocks.add(current);
nextWrite = 0;
}
}
}
public long getPosition() {
return ((long) blocks.size() - 1) * blockSize + nextWrite;
}
/**
* Pos must be less than the max position written so far! Ie, you cannot "grow" the file with
* this!
*/
public void truncate(long newLen) {
assert newLen <= getPosition();
assert newLen >= 0;
int blockIndex = (int) (newLen >> blockBits);
nextWrite = (int) (newLen & blockMask);
if (nextWrite == 0) {
blockIndex--;
nextWrite = blockSize;
}
blocks.subList(blockIndex + 1, blocks.size()).clear();
if (newLen == 0) {
current = null;
} else {
current = blocks.get(blockIndex);
}
assert newLen == getPosition();
}
public void finish() {
if (current != null) {
byte[] lastBuffer = new byte[nextWrite];
System.arraycopy(current, 0, lastBuffer, 0, nextWrite);
blocks.set(blocks.size() - 1, lastBuffer);
current = null;
}
}
/** Writes all of our bytes to the target {@link DataOutput}. */
public void writeTo(DataOutput out) throws IOException {
for (byte[] block : blocks) {
out.writeBytes(block, 0, block.length);
}
}
public FST.BytesReader getForwardReader() {
if (blocks.size() == 1) {
return new ForwardBytesReader(blocks.get(0));
}
return new FST.BytesReader() {
private byte[] current;
private int nextBuffer;
private int nextRead = blockSize;
@Override
public byte readByte() {
if (nextRead == blockSize) {
current = blocks.get(nextBuffer++);
nextRead = 0;
}
return current[nextRead++];
}
@Override
public void skipBytes(long count) {
setPosition(getPosition() + count);
}
@Override
public void readBytes(byte[] b, int offset, int len) {
while (len > 0) {
int chunkLeft = blockSize - nextRead;
if (len <= chunkLeft) {
System.arraycopy(current, nextRead, b, offset, len);
nextRead += len;
break;
} else {
if (chunkLeft > 0) {
System.arraycopy(current, nextRead, b, offset, chunkLeft);
offset += chunkLeft;
len -= chunkLeft;
}
current = blocks.get(nextBuffer++);
nextRead = 0;
}
}
}
@Override
public long getPosition() {
return ((long) nextBuffer - 1) * blockSize + nextRead;
}
@Override
public void setPosition(long pos) {
int bufferIndex = (int) (pos >> blockBits);
if (nextBuffer != bufferIndex + 1) {
nextBuffer = bufferIndex + 1;
current = blocks.get(bufferIndex);
}
nextRead = (int) (pos & blockMask);
assert getPosition() == pos;
}
@Override
public boolean reversed() {
return false;
}
};
}
public FST.BytesReader getReverseReader() {
return getReverseReader(true);
}
FST.BytesReader getReverseReader(boolean allowSingle) {
if (allowSingle && blocks.size() == 1) {
return new ReverseBytesReader(blocks.get(0));
}
return new FST.BytesReader() {
private byte[] current = blocks.size() == 0 ? null : blocks.get(0);
private int nextBuffer = -1;
private int nextRead = 0;
@Override
public byte readByte() {
if (nextRead == -1) {
current = blocks.get(nextBuffer--);
nextRead = blockSize - 1;
}
return current[nextRead--];
}
@Override
public void skipBytes(long count) {
setPosition(getPosition() - count);
}
@Override
public void readBytes(byte[] b, int offset, int len) {
for (int i = 0; i < len; i++) {
b[offset + i] = readByte();
}
}
@Override
public long getPosition() {
return ((long) nextBuffer + 1) * blockSize + nextRead;
}
@Override
public void setPosition(long pos) {
// NOTE: a little weird because if you
// setPosition(0), the next byte you read is
// bytes[0] ... but I would expect bytes[-1] (ie,
// EOF)...?
int bufferIndex = (int) (pos >> blockBits);
if (nextBuffer != bufferIndex - 1) {
nextBuffer = bufferIndex - 1;
current = blocks.get(bufferIndex);
}
nextRead = (int) (pos & blockMask);
assert getPosition() == pos : "pos=" + pos + " getPos()=" + getPosition();
}
@Override
public boolean reversed() {
return true;
}
};
}
@Override
public long ramBytesUsed() {
long size = BASE_RAM_BYTES_USED;
for (byte[] block : blocks) {
size += RamUsageEstimator.sizeOf(block);
}
return size;
}
@Override
public String toString() {
return getClass().getSimpleName() + "(numBlocks=" + blocks.size() + ")";
}
}
| BytesStore |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/ai/model/a2a/AgentExtension.java | {
"start": 771,
"end": 2110
} | class ____ {
private String uri;
private String description;
private Boolean required;
private Map<String, Object> params;
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Boolean getRequired() {
return required;
}
public void setRequired(Boolean required) {
this.required = required;
}
public Map<String, Object> getParams() {
return params;
}
public void setParams(Map<String, Object> params) {
this.params = params;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) {
return false;
}
AgentExtension that = (AgentExtension) o;
return Objects.equals(uri, that.uri) && Objects.equals(description, that.description) && Objects.equals(
required, that.required) && Objects.equals(params, that.params);
}
@Override
public int hashCode() {
return Objects.hash(uri, description, required, params);
}
}
| AgentExtension |
java | elastic__elasticsearch | plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleStaticMethodClass.java | {
"start": 530,
"end": 646
} | class ____ {
public static int exampleAddInts(int x, int y) {
return x + y;
}
}
| ExampleStaticMethodClass |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/fs/ForStFileSystemUtils.java | {
"start": 1070,
"end": 2937
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(ForStFileSystemUtils.class);
private static final String DUMMY_DIR_NAME = "_dummy_dir_";
public static boolean isParentDir(@Nullable Path path, String dir) {
if (path == null) {
return false;
}
return isParentDir(path.toString(), dir);
}
public static boolean isParentDir(String path, String dir) {
if (dir.isEmpty()) {
return false;
}
if (dir.charAt(dir.length() - 1) == '/') {
return path.startsWith(dir);
} else {
return (path.startsWith(dir + "/"));
}
}
public static ForStFlinkFileSystem tryDecorate(ForStFlinkFileSystem fileSystem) {
try {
return isIncompleteMkdirEnabled(fileSystem)
? new ForStFileSystemTrackingCreatedDirDecorator(fileSystem)
: fileSystem;
} catch (IOException e) {
LOG.info("Cannot decorate ForStFlinkFileSystem", e);
}
return fileSystem;
}
private static boolean isIncompleteMkdirEnabled(ForStFlinkFileSystem fileSystem)
throws IOException {
// check if the underlying FileSystem uses an incomplete mkdir implementation
Path dummyDir = new Path(fileSystem.getRemoteBase(), DUMMY_DIR_NAME + UUID.randomUUID());
if (fileSystem.mkdirs(dummyDir)) {
if (!fileSystem.exists(dummyDir)) {
return true;
}
fileSystem.delete(new Path(DUMMY_DIR_NAME), true);
return false;
} else {
LOG.info(
"Cannot to mkdir for "
+ DUMMY_DIR_NAME
+ ", skip decoration of ForStFlinkFileSystem");
}
return false;
}
}
| ForStFileSystemUtils |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/project/harness/Xpp3DomNodePointer.java | {
"start": 1227,
"end": 3387
} | class ____ extends NodePointer {
private XmlNode node;
Xpp3DomNodePointer(XmlNode node) {
super(null);
this.node = node;
}
Xpp3DomNodePointer(NodePointer parent, XmlNode node) {
super(parent);
this.node = node;
}
@Override
public int compareChildNodePointers(NodePointer pointer1, NodePointer pointer2) {
XmlNode node1 = (XmlNode) pointer1.getBaseValue();
XmlNode node2 = (XmlNode) pointer2.getBaseValue();
if (node1 == node2) {
return 0;
}
for (XmlNode child : node.children()) {
if (child == node1) {
return -1;
}
if (child == node2) {
return 1;
}
}
return 0;
}
@Override
public Object getValue() {
return getValue(node);
}
private static Object getValue(XmlNode node) {
if (node.value() != null) {
return node.value();
} else {
List<Object> children = new ArrayList<>();
for (XmlNode child : node.children()) {
children.add(getValue(child));
}
return children;
}
}
@Override
public Object getBaseValue() {
return node;
}
@Override
public Object getImmediateNode() {
return node;
}
@Override
public int getLength() {
return 1;
}
@Override
public QName getName() {
return new QName(null, node.name());
}
@Override
public boolean isCollection() {
return false;
}
@Override
public boolean isLeaf() {
return node.children().isEmpty();
}
@Override
public void setValue(Object value) {
throw new UnsupportedOperationException();
}
@Override
public NodeIterator childIterator(NodeTest test, boolean reverse, NodePointer startWith) {
return new Xpp3DomNodeIterator(this, test, reverse, startWith);
}
@Override
public NodeIterator attributeIterator(QName qname) {
return new Xpp3DomAttributeIterator(this, qname);
}
}
| Xpp3DomNodePointer |
java | spring-projects__spring-boot | module/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/info/InfoContributorAutoConfigurationTests.java | {
"start": 9703,
"end": 10049
} | class ____ {
@Bean
BuildProperties buildProperties() {
Properties properties = new Properties();
properties.put("group", "com.example");
properties.put("artifact", "demo");
properties.put("foo", "bar");
return new BuildProperties(properties);
}
}
@Configuration(proxyBeanMethods = false)
static | BuildPropertiesConfiguration |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/LocalDateTimeWithTemporalTimeTest.java | {
"start": 1089,
"end": 1584
} | class ____ {
@Id
private Long id;
//throws org.hibernate.AnnotationException: @Temporal should only be set on a java.util.Date or java.util.Calendar property
//@Temporal(TemporalType.TIME)
@Column(name = "`timestamp`")
private LocalDateTime timestamp;
public DateEvent() {
}
public DateEvent(LocalDateTime timestamp) {
this.timestamp = timestamp;
}
public Long getId() {
return id;
}
public LocalDateTime getTimestamp() {
return timestamp;
}
}
}
| DateEvent |
java | elastic__elasticsearch | x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java | {
"start": 1384,
"end": 12574
} | class ____ extends AbstractUpgradeTestCase {
private static final String JOB_ID = "ml-mappings-upgrade-job";
@BeforeClass
public static void maybeSkip() {
assumeFalse("Skip ML tests on unsupported glibc versions", SKIP_ML_TESTS);
}
@Override
protected Collection<String> templatesToWaitFor() {
// We shouldn't wait for ML templates during the upgrade - production won't
if (CLUSTER_TYPE != ClusterType.OLD) {
return super.templatesToWaitFor();
}
return Stream.concat(XPackRestTestConstants.ML_POST_V7120_TEMPLATES.stream(), super.templatesToWaitFor().stream())
.collect(Collectors.toSet());
}
/**
* The purpose of this test is to ensure that when a job is open through a rolling upgrade we upgrade the results
* index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade
*/
public void testMappingsUpgrade() throws Exception {
switch (CLUSTER_TYPE) {
case OLD:
createAndOpenTestJob();
break;
case MIXED:
// We don't know whether the job is on an old or upgraded node, so cannot assert that the mappings have been upgraded
break;
case UPGRADED:
assertUpgradedResultsMappings();
assertUpgradedAnnotationsMappings();
closeAndReopenTestJob();
assertUpgradedConfigMappings();
assertMlLegacyTemplatesDeleted();
IndexMappingTemplateAsserter.assertMlMappingsMatchTemplates(client());
assertNotificationsIndexAliasCreated();
assertBusy(
() -> IndexMappingTemplateAsserter.assertTemplateVersionAndPattern(
client(),
".ml-anomalies-",
10000005,
List.of(".ml-anomalies-*", ".reindexed-v7-ml-anomalies-*")
)
);
break;
default:
throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]");
}
}
private void createAndOpenTestJob() throws IOException {
// Use a custom index because other rolling upgrade tests meddle with the shared index
String jobConfig = """
{
"results_index_name":"mappings-upgrade-test",
"analysis_config" : {
"bucket_span": "600s",
"detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}]
},
"data_description" : {
}
}"
""";
Request putJob = new Request("PUT", "_ml/anomaly_detectors/" + JOB_ID);
putJob.setJsonEntity(jobConfig);
Response response = client().performRequest(putJob);
assertEquals(200, response.getStatusLine().getStatusCode());
Request openJob = new Request("POST", "_ml/anomaly_detectors/" + JOB_ID + "/_open");
response = client().performRequest(openJob);
assertEquals(200, response.getStatusLine().getStatusCode());
}
// Doing this should force the config index mappings to be upgraded,
// when the finished time is cleared on reopening the job
private void closeAndReopenTestJob() throws IOException {
Request closeJob = new Request("POST", "_ml/anomaly_detectors/" + JOB_ID + "/_close");
Response response = client().performRequest(closeJob);
assertEquals(200, response.getStatusLine().getStatusCode());
Request openJob = new Request("POST", "_ml/anomaly_detectors/" + JOB_ID + "/_open");
response = client().performRequest(openJob);
assertEquals(200, response.getStatusLine().getStatusCode());
}
@SuppressWarnings("unchecked")
private void assertUpgradedResultsMappings() throws Exception {
assertBusy(() -> {
Request getMappings = new Request("GET", XPackRestTestHelper.resultsWriteAlias(JOB_ID) + "/_mappings");
Response response = client().performRequest(getMappings);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
Map<String, Object> indexLevel = null;
// The name of the concrete index underlying the results index alias may or may not have been changed
// by the upgrade process (depending on what other tests are being run and the order they're run in),
// so navigating to the next level of the tree must account for both cases
for (Map.Entry<String, Object> entry : responseLevel.entrySet()) {
if (entry.getKey().startsWith(".ml-anomalies-") && entry.getKey().contains("mappings-upgrade-test")) {
indexLevel = (Map<String, Object>) entry.getValue();
break;
}
}
assertNotNull(indexLevel);
assertEquals(
AnomalyDetectorsIndex.RESULTS_INDEX_MAPPINGS_VERSION,
extractValue("mappings._meta.managed_index_mappings_version", indexLevel)
);
// TODO: as the years go by, the field we assert on here should be changed
// to the most recent field we've added that is NOT of type "keyword"
assertEquals(
"Incorrect type for peak_model_bytes in " + responseLevel,
"long",
extractValue("mappings.properties.model_size_stats.properties.peak_model_bytes.type", indexLevel)
);
});
}
@SuppressWarnings("unchecked")
private void assertUpgradedAnnotationsMappings() throws Exception {
assertBusy(() -> {
Request getMappings = new Request("GET", ".ml-annotations-write/_mappings");
Response response = client().performRequest(getMappings);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
Map<String, Object> indexLevel = null;
// The name of the concrete index underlying the annotations index write alias may or may not have been
// changed by the upgrade process (depending on what other tests are being run and the order they're run
// in), so navigating to the next level of the tree must account for both cases
for (Map.Entry<String, Object> entry : responseLevel.entrySet()) {
if (entry.getKey().startsWith(".ml-annotations-")) {
indexLevel = (Map<String, Object>) entry.getValue();
break;
}
}
assertNotNull(indexLevel);
assertEquals(
AnnotationIndex.ANNOTATION_INDEX_MAPPINGS_VERSION,
extractValue("mappings._meta.managed_index_mappings_version", indexLevel)
);
// TODO: as the years go by, the field we assert on here should be changed
// to the most recent field we've added that would be incorrectly mapped by dynamic
// mappings, for example a field we want to be "keyword" incorrectly mapped as "text"
assertEquals(
"Incorrect type for event in " + responseLevel,
"keyword",
extractValue("mappings.properties.event.type", indexLevel)
);
});
}
private void assertMlLegacyTemplatesDeleted() throws Exception {
// All the legacy ML templates we created over the years should be deleted now they're no longer needed
assertBusy(() -> {
Request request = new Request("GET", "/_template/.ml*");
try {
Response response = client().performRequest(request);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
// If we get here the test has failed, but it's critical that we find out which templates
// existed, hence not using expectThrows() above
assertThat(responseLevel.keySet(), empty());
} catch (ResponseException e) {
// Not found is fine
assertThat(
"Unexpected failure getting ML templates: " + e.getResponse().getStatusLine(),
e.getResponse().getStatusLine().getStatusCode(),
is(404)
);
}
});
}
@SuppressWarnings("unchecked")
private void assertUpgradedConfigMappings() throws Exception {
assertBusy(() -> {
Request getMappings = new Request("GET", ".ml-config/_mappings");
getMappings.setOptions(
expectWarnings(
"this request accesses system indices: [.ml-config], but in a future major "
+ "version, direct access to system indices will be prevented by default"
)
);
Response response = client().performRequest(getMappings);
Map<String, Object> responseLevel = entityAsMap(response);
assertNotNull(responseLevel);
Map<String, Object> indexLevel = (Map<String, Object>) responseLevel.get(".ml-config");
assertNotNull(indexLevel);
assertEquals(
MlConfigIndex.CONFIG_INDEX_MAPPINGS_VERSION,
extractValue("mappings._meta.managed_index_mappings_version", indexLevel)
);
// TODO: as the years go by, the field we assert on here should be changed
// to the most recent field we've added that is NOT of type "keyword"
assertEquals(
"Incorrect type for annotations_enabled in " + responseLevel,
"boolean",
extractValue("mappings.properties.model_plot_config.properties.annotations_enabled.type", indexLevel)
);
});
}
@SuppressWarnings("unchecked")
private void assertNotificationsIndexAliasCreated() throws Exception {
assertBusy(() -> {
Request getMappings = new Request("GET", "_alias/.ml-notifications-write");
Response response = client().performRequest(getMappings);
Map<String, Object> responseMap = entityAsMap(response);
assertThat(responseMap.entrySet(), hasSize(1));
var aliases = (Map<String, Object>) responseMap.get(".ml-notifications-000002");
assertThat(aliases.entrySet(), hasSize(1));
var allAliases = (Map<String, Object>) aliases.get("aliases");
var writeAlias = (Map<String, Object>) allAliases.get(".ml-notifications-write");
assertThat(writeAlias, hasEntry("is_hidden", Boolean.TRUE));
var isWriteIndex = (Boolean) writeAlias.get("is_write_index");
assertThat(isWriteIndex, anyOf(is(Boolean.TRUE), nullValue()));
});
}
}
| MlMappingsUpgradeIT |
java | spring-projects__spring-boot | module/spring-boot-r2dbc/src/main/java/org/springframework/boot/r2dbc/docker/compose/ClickHouseR2dbcDockerComposeConnectionDetailsFactory.java | {
"start": 1844,
"end": 2677
} | class ____ extends DockerComposeConnectionDetails
implements R2dbcConnectionDetails {
private static final ConnectionFactoryOptionsBuilder connectionFactoryOptionsBuilder = new ConnectionFactoryOptionsBuilder(
"clickhouse", 8123);
private final ConnectionFactoryOptions connectionFactoryOptions;
ClickhouseDbR2dbcDockerComposeConnectionDetails(RunningService service) {
super(service);
ClickHouseEnvironment environment = new ClickHouseEnvironment(service.env());
this.connectionFactoryOptions = connectionFactoryOptionsBuilder.build(service, environment.getDatabase(),
environment.getUsername(), environment.getPassword());
}
@Override
public ConnectionFactoryOptions getConnectionFactoryOptions() {
return this.connectionFactoryOptions;
}
}
}
| ClickhouseDbR2dbcDockerComposeConnectionDetails |
java | apache__camel | test-infra/camel-test-infra-kafka/src/test/java/org/apache/camel/test/infra/kafka/services/ContainerLocalAuthKafkaService.java | {
"start": 2645,
"end": 4658
} | class ____ extends TransientAuthenticatedKafkaContainer {
public StaticKafkaContainer(String jaasConfigFile) {
super(jaasConfigFile);
addFixedExposedPort(9093, 9093);
}
@Override
public String getBootstrapServers() {
return String.format("PLAINTEXT://%s:9093", this.getHost());
}
}
public ContainerLocalAuthKafkaService(String jaasConfigFile) {
kafka = initContainer(jaasConfigFile);
}
public ContainerLocalAuthKafkaService(KafkaContainer kafka) {
this.kafka = kafka;
}
protected KafkaContainer initContainer(String jaasConfigFile) {
return new TransientAuthenticatedKafkaContainer(jaasConfigFile);
}
public String getBootstrapServers() {
return kafka.getBootstrapServers();
}
@Override
public String brokers() {
return getBootstrapServers();
}
@Override
public void registerProperties() {
System.setProperty(KafkaProperties.KAFKA_BOOTSTRAP_SERVERS, getBootstrapServers());
}
@Override
public void initialize() {
kafka.start();
registerProperties();
LOG.info("Kafka bootstrap server running at address {}", kafka.getBootstrapServers());
}
@Override
public void shutdown() {
kafka.stop();
}
@Override
public KafkaContainer getContainer() {
return kafka;
}
/**
* This method can be used by tests to get a sample 'sasl.jaas.config' configuration for the given user and password
*
* @param username the user to create the config for
* @param password the password for the user
* @return A string with the configuration
*/
public static String generateSimpleSaslJaasConfig(String username, String password) {
return String.format("org.apache.kafka.common.security.plain.PlainLoginModule required username='%s' password='%s';",
username, password);
}
}
| StaticKafkaContainer |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/clientproxy/packageprivate/foo/Producer.java | {
"start": 184,
"end": 441
} | class ____ {
@Produces
@ApplicationScoped
public MyInterface2 myInterface2() {
return new MyInterface2() {
@Override
public String ping() {
return "quarkus";
}
};
}
}
| Producer |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-integration-tests/http2/src/test/java/org/springframework/cloud/gateway/tests/http2/Http2ApplicationTests.java | {
"start": 2072,
"end": 3484
} | class ____ {
@LocalServerPort
int port;
@Test
public void http2Works(CapturedOutput output) {
Hooks.onOperatorDebug();
String uri = "https://localhost:" + port + "/myprefix/hello";
String expected = "Hello";
assertResponse(uri, expected);
Assertions.assertThat(output).contains("Negotiated application-level protocol [h2]", "PRI * HTTP/2.0");
}
public static void assertResponse(String uri, String expected) {
WebClient client = WebClient.builder().clientConnector(new ReactorClientHttpConnector(getHttpClient())).build();
Mono<ResponseEntity<String>> responseEntityMono = client.get().uri(uri).retrieve().toEntity(String.class);
StepVerifier.create(responseEntityMono).assertNext(entity -> {
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).isEqualTo(expected);
}).expectComplete().verify();
}
static HttpClient getHttpClient() {
return HttpClient
.create(ConnectionProvider.builder("test")
.maxConnections(100)
.pendingAcquireTimeout(Duration.ofMillis(0))
.pendingAcquireMaxCount(-1)
.build())
.protocol(HttpProtocol.HTTP11, HttpProtocol.H2)
.secure(sslContextSpec -> {
Http2SslContextSpec clientSslCtxt = Http2SslContextSpec.forClient()
.configure(builder -> builder.trustManager(InsecureTrustManagerFactory.INSTANCE));
sslContextSpec.sslContext(clientSslCtxt);
});
}
}
| Http2ApplicationTests |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/target/AppWidgetTarget.java | {
"start": 456,
"end": 759
} | class ____ used in order to display downloaded Bitmap inside an ImageView of an AppWidget
* through RemoteViews.
*
* <p>Note - For cancellation to work correctly, you must pass in the same instance of this class
* for every subsequent load.
*/
// Public API.
@SuppressWarnings("WeakerAccess")
public | is |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java | {
"start": 19989,
"end": 20141
} | interface ____ extends Suppressible {
Description matchPrimitiveType(PrimitiveTypeTree tree, VisitorState state);
}
public | PrimitiveTypeTreeMatcher |
java | quarkusio__quarkus | extensions/smallrye-context-propagation/deployment/src/test/java/io/quarkus/smallrye/context/deployment/test/cdi/ConfiguredAndSharedBeansTest.java | {
"start": 12350,
"end": 13299
} | class ____ {
@Produces
@ApplicationScoped
@NamedInstance("userProduced")
ManagedExecutor produceEM() {
//create any non-default
return ManagedExecutor.builder().maxAsync(2).build();
}
@Produces
@ApplicationScoped
@NamedInstance("userProduced")
ThreadContext produceTC() {
//create any non-default
return ThreadContext.builder().propagated().build();
}
@Produces
@ApplicationScoped
@MyQualifier
ManagedExecutor produceQualifiedEM() {
//create any non-default
return ManagedExecutor.builder().maxAsync(2).build();
}
@Produces
@ApplicationScoped
@MyQualifier
ThreadContext produceQualifiedTC() {
//create any non-default
return ThreadContext.builder().propagated().build();
}
}
}
| ProducerBean |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SnapshotRetentionConfiguration.java | {
"start": 1333,
"end": 14782
} | class ____ implements ToXContentObject, Writeable {
public static final SnapshotRetentionConfiguration EMPTY = new SnapshotRetentionConfiguration(null, null, null);
private static final ParseField EXPIRE_AFTER = new ParseField("expire_after");
private static final ParseField MINIMUM_SNAPSHOT_COUNT = new ParseField("min_count");
private static final ParseField MAXIMUM_SNAPSHOT_COUNT = new ParseField("max_count");
private static final Logger logger = LogManager.getLogger(SnapshotRetentionConfiguration.class);
private static final Set<SnapshotState> UNSUCCESSFUL_STATES = EnumSet.of(SnapshotState.FAILED, SnapshotState.PARTIAL);
private static final ConstructingObjectParser<SnapshotRetentionConfiguration, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_retention",
true,
a -> {
TimeValue expireAfter = a[0] == null ? null : TimeValue.parseTimeValue((String) a[0], EXPIRE_AFTER.getPreferredName());
Integer minCount = (Integer) a[1];
Integer maxCount = (Integer) a[2];
return new SnapshotRetentionConfiguration(expireAfter, minCount, maxCount);
}
);
static {
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EXPIRE_AFTER);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MINIMUM_SNAPSHOT_COUNT);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAXIMUM_SNAPSHOT_COUNT);
}
private final LongSupplier nowSupplier;
private final TimeValue expireAfter;
private final Integer minimumSnapshotCount;
private final Integer maximumSnapshotCount;
SnapshotRetentionConfiguration(StreamInput in) throws IOException {
nowSupplier = System::currentTimeMillis;
this.expireAfter = in.readOptionalTimeValue();
this.minimumSnapshotCount = in.readOptionalVInt();
this.maximumSnapshotCount = in.readOptionalVInt();
}
public SnapshotRetentionConfiguration(
@Nullable TimeValue expireAfter,
@Nullable Integer minimumSnapshotCount,
@Nullable Integer maximumSnapshotCount
) {
this(System::currentTimeMillis, expireAfter, minimumSnapshotCount, maximumSnapshotCount);
}
public SnapshotRetentionConfiguration(
LongSupplier nowSupplier,
@Nullable TimeValue expireAfter,
@Nullable Integer minimumSnapshotCount,
@Nullable Integer maximumSnapshotCount
) {
this.nowSupplier = nowSupplier;
this.expireAfter = expireAfter;
this.minimumSnapshotCount = minimumSnapshotCount;
this.maximumSnapshotCount = maximumSnapshotCount;
if (this.minimumSnapshotCount != null && this.minimumSnapshotCount < 1) {
throw new IllegalArgumentException("minimum snapshot count must be at least 1, but was: " + this.minimumSnapshotCount);
}
if (this.maximumSnapshotCount != null && this.maximumSnapshotCount < 1) {
throw new IllegalArgumentException("maximum snapshot count must be at least 1, but was: " + this.maximumSnapshotCount);
}
if ((maximumSnapshotCount != null && minimumSnapshotCount != null) && this.minimumSnapshotCount > this.maximumSnapshotCount) {
throw new IllegalArgumentException(
"minimum snapshot count "
+ this.minimumSnapshotCount
+ " cannot be larger than maximum snapshot count "
+ this.maximumSnapshotCount
);
}
}
public static SnapshotRetentionConfiguration parse(XContentParser parser, String name) {
return PARSER.apply(parser, null);
}
/**
* @return whether a snapshot should be deleted according to this retention policy.
* @param allSnapshots all the snapshot details pertaining to this SLM policy and repository
*/
public boolean isSnapshotEligibleForDeletion(
SnapshotId snapshotId,
RepositoryData.SnapshotDetails snapshotDetails,
Map<SnapshotId, RepositoryData.SnapshotDetails> allSnapshots
) {
assert Strings.hasText(snapshotDetails.getSlmPolicy());
final var snapshotState = snapshotDetails.getSnapshotState();
final var startTimeMillis = snapshotDetails.getStartTimeMillis();
final var snapshotName = snapshotId.getName();
final int totalSnapshotCount = allSnapshots.size();
final var sortedSnapshots = allSnapshots.entrySet()
.stream()
.sorted(Comparator.comparingLong(e -> e.getValue().getStartTimeMillis()))
.toList();
int successCount = 0;
long latestSuccessfulTimestamp = Long.MIN_VALUE;
for (final var snapshot : allSnapshots.values()) {
assert Objects.equals(snapshot.getSlmPolicy(), snapshotDetails.getSlmPolicy());
if (snapshot.getSnapshotState() == SnapshotState.SUCCESS) {
successCount++;
latestSuccessfulTimestamp = Math.max(latestSuccessfulTimestamp, snapshot.getStartTimeMillis());
}
}
final long newestSuccessfulTimestamp = latestSuccessfulTimestamp;
final int successfulSnapshotCount = successCount;
// First, if there's no expire_after and a more recent successful snapshot, we can delete all the failed ones
if (this.expireAfter == null && UNSUCCESSFUL_STATES.contains(snapshotState) && newestSuccessfulTimestamp > startTimeMillis) {
// There's no expire_after and there's a more recent successful snapshot, delete this failed one
logger.trace("[{}]: ELIGIBLE as it is {} and there is a more recent successful snapshot", snapshotName, snapshotState);
return true;
}
// Next, enforce the maximum count, if the size is over the maximum number of
// snapshots, then allow the oldest N (where N is the number over the maximum snapshot
// count) snapshots to be eligible for deletion
if (this.maximumSnapshotCount != null && successfulSnapshotCount > this.maximumSnapshotCount) {
final long successfulSnapsToDelete = successfulSnapshotCount - this.maximumSnapshotCount;
boolean found = false;
int successfulSeen = 0;
for (final var s : sortedSnapshots) {
if (s.getValue().getSnapshotState() == SnapshotState.SUCCESS) {
successfulSeen++;
}
if (successfulSeen > successfulSnapsToDelete) {
break;
}
if (s.getKey().equals(snapshotId)) {
found = true;
break;
}
}
if (found) {
logger.trace(
"[{}]: ELIGIBLE as it is one of the {} oldest snapshots with "
+ "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots",
snapshotName,
successfulSnapsToDelete,
successfulSnapshotCount,
totalSnapshotCount,
this.maximumSnapshotCount
);
return true;
} else {
logger.trace(
"[{}]: SKIPPING as it is not one of the {} oldest snapshots with "
+ "{} non-failed snapshots ({} total), over the limit of {} maximum snapshots",
snapshotName,
successfulSnapsToDelete,
successfulSnapshotCount,
totalSnapshotCount,
this.maximumSnapshotCount
);
}
}
// Next check the minimum count, since that is a blanket requirement regardless of time,
// if we haven't hit the minimum then we need to keep the snapshot regardless of
// expiration time
if (this.minimumSnapshotCount != null && successfulSnapshotCount <= this.minimumSnapshotCount) {
if (UNSUCCESSFUL_STATES.contains(snapshotState) == false) {
logger.trace(
"[{}]: INELIGIBLE as there are {} non-failed snapshots ({} total) and {} minimum snapshots needed",
snapshotName,
successfulSnapshotCount,
totalSnapshotCount,
this.minimumSnapshotCount
);
return false;
} else {
logger.trace(
"[{}]: SKIPPING minimum snapshot count check as this snapshot is {} and not counted "
+ "towards the minimum snapshot count.",
snapshotName,
snapshotState
);
}
}
// Finally, check the expiration time of the snapshot, if it is past, then it is
// eligible for deletion
if (this.expireAfter != null) {
if (this.minimumSnapshotCount != null) {
// Only the oldest N snapshots are actually eligible, since if we went below this we
// would fall below the configured minimum number of snapshots to keep
final boolean maybeEligible;
if (snapshotState == SnapshotState.SUCCESS) {
maybeEligible = sortedSnapshots.stream()
.filter(snap -> SnapshotState.SUCCESS.equals(snap.getValue().getSnapshotState()))
.limit(Math.max(0, successfulSnapshotCount - minimumSnapshotCount))
.anyMatch(s -> s.getKey().equals(snapshotId));
} else if (UNSUCCESSFUL_STATES.contains(snapshotState)) {
maybeEligible = allSnapshots.containsKey(snapshotId);
} else {
logger.trace("[{}] INELIGIBLE because snapshot is in state [{}]", snapshotName, snapshotState);
return false;
}
if (maybeEligible == false) {
// This snapshot is *not* one of the N oldest snapshots, so even if it were
// old enough, the other snapshots would be deleted before it
logger.trace(
"[{}]: INELIGIBLE as snapshot expiration would pass the "
+ "minimum number of configured snapshots ({}) to keep, regardless of age",
snapshotName,
this.minimumSnapshotCount
);
return false;
}
}
final long snapshotAge = nowSupplier.getAsLong() - startTimeMillis;
if (snapshotAge > this.expireAfter.getMillis()) {
logger.trace(
() -> format(
"[%s]: ELIGIBLE as snapshot age of %s is older than %s",
snapshotName,
new TimeValue(snapshotAge).toHumanReadableString(3),
this.expireAfter.toHumanReadableString(3)
)
);
return true;
} else {
logger.trace(
() -> format(
"[%s]: INELIGIBLE as snapshot age of [%sms] is newer than %s",
snapshotName,
new TimeValue(snapshotAge).toHumanReadableString(3),
this.expireAfter.toHumanReadableString(3)
)
);
return false;
}
}
// If nothing matched, the snapshot is not eligible for deletion
logger.trace("[{}]: INELIGIBLE as no retention predicates matched", snapshotName);
return false;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalTimeValue(this.expireAfter);
out.writeOptionalVInt(this.minimumSnapshotCount);
out.writeOptionalVInt(this.maximumSnapshotCount);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (expireAfter != null) {
builder.field(EXPIRE_AFTER.getPreferredName(), expireAfter.getStringRep());
}
if (minimumSnapshotCount != null) {
builder.field(MINIMUM_SNAPSHOT_COUNT.getPreferredName(), minimumSnapshotCount);
}
if (maximumSnapshotCount != null) {
builder.field(MAXIMUM_SNAPSHOT_COUNT.getPreferredName(), maximumSnapshotCount);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(expireAfter, minimumSnapshotCount, maximumSnapshotCount);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
SnapshotRetentionConfiguration other = (SnapshotRetentionConfiguration) obj;
return Objects.equals(this.expireAfter, other.expireAfter)
&& Objects.equals(minimumSnapshotCount, other.minimumSnapshotCount)
&& Objects.equals(maximumSnapshotCount, other.maximumSnapshotCount);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| SnapshotRetentionConfiguration |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxErrorSupplied.java | {
"start": 1165,
"end": 2118
} | class ____<T> extends Flux<T> implements Fuseable.ScalarCallable, SourceProducer<T> {
final Supplier<? extends Throwable> errorSupplier;
FluxErrorSupplied(Supplier<? extends Throwable> errorSupplier) {
this.errorSupplier = Objects.requireNonNull(errorSupplier, "errorSupplier");
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
Throwable error = Objects.requireNonNull(errorSupplier.get(), "errorSupplier produced a null Throwable");
Operators.error(actual, error);
}
@Override
public Object call() throws Exception {
Throwable error = Objects.requireNonNull(errorSupplier.get(), "errorSupplier produced a null Throwable");
if(error instanceof Exception){
throw ((Exception)error);
}
throw Exceptions.propagate(error);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return SourceProducer.super.scanUnsafe(key);
}
}
| FluxErrorSupplied |
java | apache__kafka | storage/src/main/java/org/apache/kafka/server/log/remote/metadata/storage/RemoteLogSegmentMetadataSnapshot.java | {
"start": 1807,
"end": 9497
} | class ____ extends RemoteLogMetadata {
/**
* Universally unique remote log segment id.
*/
private final Uuid segmentId;
/**
* Start offset of this segment.
*/
private final long startOffset;
/**
* End offset of this segment.
*/
private final long endOffset;
/**
* Maximum timestamp in milliseconds in the segment
*/
private final long maxTimestampMs;
/**
* LeaderEpoch vs offset for messages within this segment.
*/
private final NavigableMap<Integer, Long> segmentLeaderEpochs;
/**
* Size of the segment in bytes.
*/
private final int segmentSizeInBytes;
/**
* Custom metadata.
*/
private final Optional<CustomMetadata> customMetadata;
/**
* It indicates the state in which the action is executed on this segment.
*/
private final RemoteLogSegmentState state;
/**
* Indicates whether the transaction index is empty for this segment.
*/
private final boolean txnIdxEmpty;
/**
* Creates an instance with the given metadata of remote log segment.
* <p>
* {@code segmentLeaderEpochs} can not be empty. If all the records in this segment belong to the same leader epoch
* then it should have an entry with epoch mapping to start-offset of this segment.
*
* @param segmentId Universally unique remote log segment id.
* @param startOffset Start offset of this segment (inclusive).
* @param endOffset End offset of this segment (inclusive).
* @param maxTimestampMs Maximum timestamp in milliseconds in this segment.
* @param brokerId Broker id from which this event is generated.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param segmentSizeInBytes Size of this segment in bytes.
* @param customMetadata Custom metadata.
* @param state State of the respective segment of remoteLogSegmentId.
* @param segmentLeaderEpochs leader epochs occurred within this segment.
* @param txnIdxEmpty true if the transaction index is empty, false otherwise.
*/
public RemoteLogSegmentMetadataSnapshot(Uuid segmentId,
long startOffset,
long endOffset,
long maxTimestampMs,
int brokerId,
long eventTimestampMs,
int segmentSizeInBytes,
Optional<CustomMetadata> customMetadata,
RemoteLogSegmentState state,
Map<Integer, Long> segmentLeaderEpochs,
boolean txnIdxEmpty) {
super(brokerId, eventTimestampMs);
this.segmentId = Objects.requireNonNull(segmentId, "remoteLogSegmentId can not be null");
this.state = Objects.requireNonNull(state, "state can not be null");
this.startOffset = startOffset;
this.endOffset = endOffset;
this.maxTimestampMs = maxTimestampMs;
this.segmentSizeInBytes = segmentSizeInBytes;
this.customMetadata = Objects.requireNonNull(customMetadata, "customMetadata can not be null");
this.txnIdxEmpty = txnIdxEmpty;
if (segmentLeaderEpochs == null || segmentLeaderEpochs.isEmpty()) {
throw new IllegalArgumentException("segmentLeaderEpochs can not be null or empty");
}
this.segmentLeaderEpochs = Collections.unmodifiableNavigableMap(new TreeMap<>(segmentLeaderEpochs));
}
public static RemoteLogSegmentMetadataSnapshot create(RemoteLogSegmentMetadata metadata) {
return new RemoteLogSegmentMetadataSnapshot(metadata.remoteLogSegmentId().id(), metadata.startOffset(), metadata.endOffset(),
metadata.maxTimestampMs(), metadata.brokerId(), metadata.eventTimestampMs(),
metadata.segmentSizeInBytes(), metadata.customMetadata(), metadata.state(), metadata.segmentLeaderEpochs(), metadata.isTxnIdxEmpty()
);
}
/**
* @return unique id of this segment.
*/
public Uuid segmentId() {
return segmentId;
}
/**
* @return Start offset of this segment (inclusive).
*/
public long startOffset() {
return startOffset;
}
/**
* @return End offset of this segment (inclusive).
*/
public long endOffset() {
return endOffset;
}
/**
* @return Total size of this segment in bytes.
*/
public int segmentSizeInBytes() {
return segmentSizeInBytes;
}
/**
* @return Maximum timestamp in milliseconds of a record within this segment.
*/
public long maxTimestampMs() {
return maxTimestampMs;
}
/**
* @return Map of leader epoch vs offset for the records available in this segment.
*/
public NavigableMap<Integer, Long> segmentLeaderEpochs() {
return segmentLeaderEpochs;
}
/**
* @return Custom metadata.
*/
public Optional<CustomMetadata> customMetadata() {
return customMetadata;
}
/**
* Returns the current state of this remote log segment. It can be any of the below
* <ul>
* {@link RemoteLogSegmentState#COPY_SEGMENT_STARTED}
* {@link RemoteLogSegmentState#COPY_SEGMENT_FINISHED}
* {@link RemoteLogSegmentState#DELETE_SEGMENT_STARTED}
* {@link RemoteLogSegmentState#DELETE_SEGMENT_FINISHED}
* </ul>
*/
public RemoteLogSegmentState state() {
return state;
}
public boolean isTxnIdxEmpty() {
return txnIdxEmpty;
}
@Override
public TopicIdPartition topicIdPartition() {
throw new UnsupportedOperationException("This metadata does not have topic partition with it.");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof RemoteLogSegmentMetadataSnapshot that)) return false;
return startOffset == that.startOffset
&& endOffset == that.endOffset
&& maxTimestampMs == that.maxTimestampMs
&& segmentSizeInBytes == that.segmentSizeInBytes
&& Objects.equals(customMetadata, that.customMetadata)
&& Objects.equals(segmentId, that.segmentId)
&& Objects.equals(segmentLeaderEpochs, that.segmentLeaderEpochs)
&& state == that.state
&& txnIdxEmpty == that.txnIdxEmpty;
}
@Override
public int hashCode() {
return Objects.hash(segmentId, startOffset, endOffset, maxTimestampMs, segmentLeaderEpochs, segmentSizeInBytes, customMetadata, state, txnIdxEmpty);
}
@Override
public String toString() {
return "RemoteLogSegmentMetadataSnapshot{" +
"segmentId=" + segmentId +
", startOffset=" + startOffset +
", endOffset=" + endOffset +
", maxTimestampMs=" + maxTimestampMs +
", segmentLeaderEpochs=" + segmentLeaderEpochs +
", segmentSizeInBytes=" + segmentSizeInBytes +
", customMetadata=" + customMetadata +
", state=" + state +
", txnIdxEmpty=" + txnIdxEmpty +
'}';
}
}
| RemoteLogSegmentMetadataSnapshot |
java | quarkusio__quarkus | extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/configuration/ConfigEnabledFalseAndActiveTrueTest.java | {
"start": 434,
"end": 1673
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClass(IndexedEntity.class))
.withConfigurationResource("application.properties")
.overrideConfigKey("quarkus.hibernate-search-orm.enabled", "false")
.overrideConfigKey("quarkus.hibernate-search-orm.active", "true")
.assertException(throwable -> assertThat(throwable)
.isInstanceOf(ConfigurationException.class)
.hasMessageContainingAll(
"Hibernate Search activated explicitly for persistence unit '<default>', but the Hibernate Search extension was disabled at build time",
"If you want Hibernate Search to be active for this persistence unit, you must set 'quarkus.hibernate-search-orm.enabled' to 'true' at build time",
"If you don't want Hibernate Search to be active for this persistence unit, you must leave 'quarkus.hibernate-search-orm.active' unset or set it to 'false'"));
@Test
public void test() {
// Startup should fail
}
}
| ConfigEnabledFalseAndActiveTrueTest |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-23/src/main/java/org/redisson/spring/data/connection/RedissonStreamCommands.java | {
"start": 12108,
"end": 18163
} | class ____ implements MultiDecoder<PendingMessages> {
private final String groupName;
private final Range<?> range;
public PendingMessagesReplayDecoder(String groupName, Range<?> range) {
this.groupName = groupName;
this.range = range;
}
@Override
public PendingMessages decode(List<Object> parts, State state) {
List<PendingMessage> pendingMessages = (List<PendingMessage>) (Object) parts;
return new PendingMessages(groupName, range, pendingMessages);
}
}
@Override
public PendingMessages xPending(byte[] key, String groupName, XPendingOptions options) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(groupName, "Group name must not be null!");
List<Object> params = new ArrayList<>();
params.add(key);
params.add(groupName);
params.add(((Range.Bound<String>)options.getRange().getLowerBound()).getValue().orElse("-"));
params.add(((Range.Bound<String>)options.getRange().getUpperBound()).getValue().orElse("+"));
if (options.getCount() != null) {
params.add(options.getCount());
} else {
params.add(10);
}
if (options.getConsumerName() != null) {
params.add(options.getConsumerName());
}
return connection.write(key, StringCodec.INSTANCE, new RedisCommand<>("XPENDING",
new ListMultiDecoder2<PendingMessages>(
new PendingMessagesReplayDecoder(groupName, options.getRange()),
new PendingMessageReplayDecoder(groupName))),
params.toArray());
}
@Override
public Long xAck(byte[] key, String group, RecordId... recordIds) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(group, "Group must not be null!");
Assert.notNull(recordIds, "recordIds must not be null!");
List<Object> params = new ArrayList<>();
params.add(key);
params.add(group);
params.addAll(toStringList(recordIds));
return connection.write(key, StringCodec.INSTANCE, RedisCommands.XACK, params.toArray());
}
private static final RedisStrictCommand<RecordId> XADD = new RedisStrictCommand<RecordId>("XADD", obj -> RecordId.of(obj.toString()));
@Override
public RecordId xAdd(MapRecord<byte[], byte[], byte[]> record, XAddOptions options) {
Assert.notNull(record, "record must not be null!");
List<Object> params = new LinkedList<>();
params.add(record.getStream());
if (options.getMaxlen() != null) {
params.add("MAXLEN");
params.add(options.getMaxlen());
}
if (!record.getId().shouldBeAutoGenerated()) {
params.add(record.getId().getValue());
} else {
params.add("*");
}
record.getValue().forEach((key, value) -> {
params.add(key);
params.add(value);
});
return connection.write(record.getStream(), StringCodec.INSTANCE, XADD, params.toArray());
}
@Override
public Long xDel(byte[] key, RecordId... recordIds) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(recordIds, "recordIds must not be null!");
List<Object> params = new ArrayList<>();
params.add(key);
params.addAll(toStringList(recordIds));
return connection.write(key, StringCodec.INSTANCE, RedisCommands.XDEL, params.toArray());
}
private static final RedisStrictCommand<String> XGROUP_STRING = new RedisStrictCommand<>("XGROUP");
@Override
public String xGroupCreate(byte[] key, String groupName, ReadOffset readOffset) {
return xGroupCreate(key, groupName, readOffset, false);
}
private static final RedisStrictCommand<Boolean> XGROUP_BOOLEAN = new RedisStrictCommand<Boolean>("XGROUP", obj -> ((Long)obj) > 0);
@Override
public Boolean xGroupDelConsumer(byte[] key, Consumer consumer) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(consumer, "Consumer must not be null!");
Assert.notNull(consumer.getName(), "Consumer name must not be null!");
Assert.notNull(consumer.getGroup(), "Consumer group must not be null!");
return connection.write(key, StringCodec.INSTANCE, XGROUP_BOOLEAN, "DELCONSUMER", key, consumer.getGroup(), consumer.getName());
}
@Override
public Boolean xGroupDestroy(byte[] key, String groupName) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(groupName, "GroupName must not be null!");
return connection.write(key, StringCodec.INSTANCE, XGROUP_BOOLEAN, "DESTROY", key, groupName);
}
@Override
public Long xLen(byte[] key) {
Assert.notNull(key, "Key must not be null!");
return connection.write(key, StringCodec.INSTANCE, RedisCommands.XLEN, key);
}
private List<ByteRecord> range(RedisCommand<?> rangeCommand, byte[] key, Range<String> range, RedisZSetCommands.Limit limit) {
Assert.notNull(key, "Key must not be null!");
Assert.notNull(range, "Range must not be null!");
Assert.notNull(limit, "Limit must not be null!");
List<Object> params = new LinkedList<>();
params.add(key);
if (rangeCommand.getName().equals(RedisCommands.XRANGE.getName())) {
params.add(range.getLowerBound().getValue().orElse("-"));
params.add(range.getUpperBound().getValue().orElse("+"));
} else {
params.add(range.getUpperBound().getValue().orElse("+"));
params.add(range.getLowerBound().getValue().orElse("-"));
}
if (limit.getCount() > 0) {
params.add("COUNT");
params.add(limit.getCount());
}
return connection.write(key, ByteArrayCodec.INSTANCE, rangeCommand, params.toArray());
}
private static | PendingMessagesReplayDecoder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/StructEmbeddableTest.java | {
"start": 3472,
"end": 21334
} | class ____ implements AdditionalMappingContributor {
@Override
public void contribute(
AdditionalMappingContributions contributions,
InFlightMetadataCollector metadata,
ResourceStreamLocator resourceStreamLocator,
MetadataBuildingContext buildingContext) {
final Namespace namespace = new Namespace(
PhysicalNamingStrategyStandardImpl.INSTANCE,
null,
new Namespace.Name( null, null )
);
//---------------------------------------------------------
// PostgreSQL
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgreSQLDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgreSQL structProcedure",
namespace,
"create procedure structProcedure(INOUT result structType) AS $$ declare res structType; begin res.theBinary = bytea '\\x01'; res.theString = 'ABC'; res.theDouble = 0; res.theInt = 0; res.theLocalDateTime = timestamp '2022-12-01 01:00:00'; res.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; result = res; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgreSQLDialect.class.getName() )
)
);
//---------------------------------------------------------
// PostgresPlus
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structFunction",
namespace,
"create function structFunction() returns structType as $$ declare result structType; begin result.theBinary = bytea '\\x01'; result.theString = 'ABC'; result.theDouble = 0; result.theInt = 0; result.theLocalDateTime = timestamp '2022-12-01 01:00:00'; result.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; return result; end $$ language plpgsql",
"drop function structFunction",
Set.of( PostgresPlusDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"PostgrePlus structProcedure",
namespace,
"create procedure structProcedure(result INOUT structType) AS $$ declare res structType; begin res.theBinary = bytea '\\x01'; res.theString = 'ABC'; res.theDouble = 0; res.theInt = 0; res.theLocalDateTime = timestamp '2022-12-01 01:00:00'; res.theUuid = '53886a8a-7082-4879-b430-25cb94415be8'::uuid; result = res; end $$ language plpgsql",
"drop procedure structProcedure",
Set.of( PostgresPlusDialect.class.getName() )
)
);
//---------------------------------------------------------
// DB2
//---------------------------------------------------------
final String binaryType;
final String binaryLiteralPrefix;
if ( metadata.getDatabase().getDialect().getVersion().isBefore( 11 ) ) {
binaryType = "char(16) for bit data";
binaryLiteralPrefix = "x";
}
else {
binaryType = "binary(16)";
binaryLiteralPrefix = "bx";
}
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"DB2 structFunction",
namespace,
"create function structFunction() returns structType language sql RETURN select structType()..theBinary(" + binaryLiteralPrefix + "'01')..theString('ABC')..theDouble(0)..theInt(0)..theLocalDateTime(timestamp '2022-12-01 01:00:00')..theUuid(cast(" + binaryLiteralPrefix + "'" +
// UUID is already in HEX encoding, but we have to remove the dashes
"53886a8a-7082-4879-b430-25cb94415be8".replace( "-", "" )
+ "' as " + binaryType + ")) from (values (1)) t",
"drop function structFunction",
Set.of( DB2Dialect.class.getName() )
)
);
//---------------------------------------------------------
// Oracle
//---------------------------------------------------------
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structFunction",
namespace,
"create function structFunction return structType is result structType; begin " +
"result := structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); return result; end;",
"drop function structFunction",
Set.of( OracleDialect.class.getName() )
)
);
contributions.contributeAuxiliaryDatabaseObject(
new NamedAuxiliaryDatabaseObject(
"Oracle structProcedure",
namespace,
"create procedure structProcedure(result OUT structType) AS begin " +
"result := structType(" +
"theBinary => hextoraw('01')," +
"theString => 'ABC'," +
"theDouble => 0," +
"theInt => 0," +
"theLocalDateTime => timestamp '2022-12-01 01:00:00'," +
"theUuid => hextoraw('53886a8a70824879b43025cb94415be8')," +
"converted_gender => null," +
"gender => null," +
"mutableValue => null," +
"ordinal_gender => null," +
"theBoolean => null," +
"theClob => null," +
"theDate => null," +
"theDuration => null," +
"theInstant => null," +
"theInteger => null," +
"theLocalDate => null," +
"theLocalTime => null," +
"theNumericBoolean => null," +
"theOffsetDateTime => null," +
"theStringBoolean => null," +
"theTime => null," +
"theTimestamp => null," +
"theUrl => null," +
"theZonedDateTime => null" +
"); end;",
"drop procedure structProcedure",
Set.of( OracleDialect.class.getName() )
)
);
}
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist( new StructHolder( 1L, EmbeddableAggregate.createAggregate1() ) );
session.persist( new StructHolder( 2L, EmbeddableAggregate.createAggregate2() ) );
}
);
}
@AfterEach
protected void cleanupTest(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testUpdate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
StructHolder structHolder = entityManager.find( StructHolder.class, 1L );
structHolder.setAggregate( EmbeddableAggregate.createAggregate2() );
entityManager.flush();
entityManager.clear();
assertStructEquals( EmbeddableAggregate.createAggregate2(), entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testFetch(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 1", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 1L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testFetchNull(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<StructHolder> structHolders = entityManager.createQuery( "from StructHolder b where b.id = 2", StructHolder.class ).getResultList();
assertEquals( 1, structHolders.size() );
assertEquals( 2L, structHolders.get( 0 ).getId() );
assertStructEquals( EmbeddableAggregate.createAggregate2(), structHolders.get( 0 ).getAggregate() );
}
);
}
@Test
public void testDomainResult(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<EmbeddableAggregate> structs = entityManager.createQuery( "select b.aggregate from StructHolder b where b.id = 1", EmbeddableAggregate.class ).getResultList();
assertEquals( 1, structs.size() );
assertStructEquals( EmbeddableAggregate.createAggregate1(), structs.get( 0 ) );
}
);
}
@Test
public void testSelectionItems(SessionFactoryScope scope) {
scope.inSession(
entityManager -> {
List<Tuple> tuples = entityManager.createQuery(
"select " +
"b.aggregate.theInt," +
"b.aggregate.theDouble," +
"b.aggregate.theBoolean," +
"b.aggregate.theNumericBoolean," +
"b.aggregate.theStringBoolean," +
"b.aggregate.theString," +
"b.aggregate.theInteger," +
"b.aggregate.theUrl," +
"b.aggregate.theClob," +
"b.aggregate.theBinary," +
"b.aggregate.theDate," +
"b.aggregate.theTime," +
"b.aggregate.theTimestamp," +
"b.aggregate.theInstant," +
"b.aggregate.theUuid," +
"b.aggregate.gender," +
"b.aggregate.convertedGender," +
"b.aggregate.ordinalGender," +
"b.aggregate.theDuration," +
"b.aggregate.theLocalDateTime," +
"b.aggregate.theLocalDate," +
"b.aggregate.theLocalTime," +
"b.aggregate.theZonedDateTime," +
"b.aggregate.theOffsetDateTime," +
"b.aggregate.mutableValue " +
"from StructHolder b where b.id = 1",
Tuple.class
).getResultList();
assertEquals( 1, tuples.size() );
final Tuple tuple = tuples.get( 0 );
final EmbeddableAggregate struct = new EmbeddableAggregate();
struct.setTheInt( tuple.get( 0, int.class ) );
struct.setTheDouble( tuple.get( 1, Double.class ) );
struct.setTheBoolean( tuple.get( 2, Boolean.class ) );
struct.setTheNumericBoolean( tuple.get( 3, Boolean.class ) );
struct.setTheStringBoolean( tuple.get( 4, Boolean.class ) );
struct.setTheString( tuple.get( 5, String.class ) );
struct.setTheInteger( tuple.get( 6, Integer.class ) );
struct.setTheUrl( tuple.get( 7, URL.class ) );
struct.setTheClob( tuple.get( 8, String.class ) );
struct.setTheBinary( tuple.get( 9, byte[].class ) );
struct.setTheDate( tuple.get( 10, Date.class ) );
struct.setTheTime( tuple.get( 11, Time.class ) );
struct.setTheTimestamp( tuple.get( 12, Timestamp.class ) );
struct.setTheInstant( tuple.get( 13, Instant.class ) );
struct.setTheUuid( tuple.get( 14, UUID.class ) );
struct.setGender( tuple.get( 15, EntityOfBasics.Gender.class ) );
struct.setConvertedGender( tuple.get( 16, EntityOfBasics.Gender.class ) );
struct.setOrdinalGender( tuple.get( 17, EntityOfBasics.Gender.class ) );
struct.setTheDuration( tuple.get( 18, Duration.class ) );
struct.setTheLocalDateTime( tuple.get( 19, LocalDateTime.class ) );
struct.setTheLocalDate( tuple.get( 20, LocalDate.class ) );
struct.setTheLocalTime( tuple.get( 21, LocalTime.class ) );
struct.setTheZonedDateTime( tuple.get( 22, ZonedDateTime.class ) );
struct.setTheOffsetDateTime( tuple.get( 23, OffsetDateTime.class ) );
struct.setMutableValue( tuple.get( 24, MutableValue.class ) );
EmbeddableAggregate.assertEquals( EmbeddableAggregate.createAggregate1(), struct );
}
);
}
@Test
public void testDeleteWhere(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "delete StructHolder b where b.aggregate is not null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ) );
}
);
}
@Test
public void testUpdateAggregate(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate = null" ).executeUpdate();
assertNull( entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAggregateMember(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate.theString = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateMultipleAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
entityManager.createMutationQuery( "update StructHolder b set b.aggregate.theString = null, b.aggregate.theUuid = null" ).executeUpdate();
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
struct.setTheString( null );
struct.setTheUuid( null );
assertStructEquals( struct, entityManager.find( StructHolder.class, 1L ).getAggregate() );
}
);
}
@Test
public void testUpdateAllAggregateMembers(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
EmbeddableAggregate struct = EmbeddableAggregate.createAggregate1();
entityManager.createMutationQuery(
"update StructHolder b set " +
"b.aggregate.theInt = :theInt," +
"b.aggregate.theDouble = :theDouble," +
"b.aggregate.theBoolean = :theBoolean," +
"b.aggregate.theNumericBoolean = :theNumericBoolean," +
"b.aggregate.theStringBoolean = :theStringBoolean," +
"b.aggregate.theString = :theString," +
"b.aggregate.theInteger = :theInteger," +
"b.aggregate.theUrl = :theUrl," +
"b.aggregate.theClob = :theClob," +
"b.aggregate.theBinary = :theBinary," +
"b.aggregate.theDate = :theDate," +
"b.aggregate.theTime = :theTime," +
"b.aggregate.theTimestamp = :theTimestamp," +
"b.aggregate.theInstant = :theInstant," +
"b.aggregate.theUuid = :theUuid," +
"b.aggregate.gender = :gender," +
"b.aggregate.convertedGender = :convertedGender," +
"b.aggregate.ordinalGender = :ordinalGender," +
"b.aggregate.theDuration = :theDuration," +
"b.aggregate.theLocalDateTime = :theLocalDateTime," +
"b.aggregate.theLocalDate = :theLocalDate," +
"b.aggregate.theLocalTime = :theLocalTime," +
"b.aggregate.theZonedDateTime = :theZonedDateTime," +
"b.aggregate.theOffsetDateTime = :theOffsetDateTime," +
"b.aggregate.mutableValue = :mutableValue " +
"where b.id = 2"
)
.setParameter( "theInt", struct.getTheInt() )
.setParameter( "theDouble", struct.getTheDouble() )
.setParameter( "theBoolean", struct.isTheBoolean() )
.setParameter( "theNumericBoolean", struct.isTheNumericBoolean() )
.setParameter( "theStringBoolean", struct.isTheStringBoolean() )
.setParameter( "theString", struct.getTheString() )
.setParameter( "theInteger", struct.getTheInteger() )
.setParameter( "theUrl", struct.getTheUrl() )
.setParameter( "theClob", struct.getTheClob() )
.setParameter( "theBinary", struct.getTheBinary() )
.setParameter( "theDate", struct.getTheDate() )
.setParameter( "theTime", struct.getTheTime() )
.setParameter( "theTimestamp", struct.getTheTimestamp() )
.setParameter( "theInstant", struct.getTheInstant() )
.setParameter( "theUuid", struct.getTheUuid() )
.setParameter( "gender", struct.getGender() )
.setParameter( "convertedGender", struct.getConvertedGender() )
.setParameter( "ordinalGender", struct.getOrdinalGender() )
.setParameter( "theDuration", struct.getTheDuration() )
.setParameter( "theLocalDateTime", struct.getTheLocalDateTime() )
.setParameter( "theLocalDate", struct.getTheLocalDate() )
.setParameter( "theLocalTime", struct.getTheLocalTime() )
.setParameter( "theZonedDateTime", struct.getTheZonedDateTime() )
.setParameter( "theOffsetDateTime", struct.getTheOffsetDateTime() )
.setParameter( "mutableValue", struct.getMutableValue() )
.executeUpdate();
assertStructEquals( EmbeddableAggregate.createAggregate1(), entityManager.find( StructHolder.class, 2L ).getAggregate() );
}
);
}
@Test
public void testNativeQuery(SessionFactoryScope scope) {
scope.inTransaction(
entityManager -> {
//noinspection unchecked
List<Object> resultList = entityManager.createNativeQuery(
"select b.aggregate from StructHolder b where b.id = 1",
// DB2 does not support structs on the driver level, and we instead do a XML serialization/deserialization
// So in order to receive the correct value, we have to specify the actual type that we expect
scope.getSessionFactory().getJdbcServices().getDialect() instanceof DB2Dialect
? (Class<Object>) (Class<?>) EmbeddableAggregate.class
// Using Object. | StructEmbeddableTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/DocWriteRequest.java | {
"start": 1819,
"end": 2008
} | interface ____ group ActionRequest, which perform writes to a single document
* Action requests implementing this can be part of {@link org.elasticsearch.action.bulk.BulkRequest}
*/
public | to |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ImportBeanDefinitionRegistrar.java | {
"start": 2062,
"end": 2678
} | class ____ provide a single constructor with one or more of
* the following supported parameter types:
* <ul>
* <li>{@link org.springframework.core.env.Environment Environment}</li>
* <li>{@link org.springframework.beans.factory.BeanFactory BeanFactory}</li>
* <li>{@link java.lang.ClassLoader ClassLoader}</li>
* <li>{@link org.springframework.core.io.ResourceLoader ResourceLoader}</li>
* </ul>
*
* <p>See implementations and associated unit tests for usage examples.
*
* @author Chris Beams
* @author Juergen Hoeller
* @since 3.1
* @see Import
* @see ImportSelector
* @see Configuration
*/
public | may |
java | apache__avro | lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDeserializer.java | {
"start": 1910,
"end": 4731
} | class ____<T extends AvroWrapper<D>, D> implements Deserializer<T> {
/** The Avro writer schema for deserializing. */
private final Schema mWriterSchema;
/** The Avro reader schema for deserializing. */
private final Schema mReaderSchema;
/** The Avro datum reader for deserializing. */
final DatumReader<D> mAvroDatumReader;
/** An Avro binary decoder for deserializing. */
private BinaryDecoder mAvroDecoder;
/**
* Constructor.
*
* @param writerSchema The Avro writer schema for the data to deserialize.
* @param readerSchema The Avro reader schema for the data to deserialize (may
* be null).
*/
protected AvroDeserializer(Schema writerSchema, Schema readerSchema, ClassLoader classLoader) {
mWriterSchema = writerSchema;
mReaderSchema = null != readerSchema ? readerSchema : writerSchema;
mAvroDatumReader = new ReflectDatumReader<>(mWriterSchema, mReaderSchema, new ReflectData(classLoader));
}
/**
* Constructor.
*
* @param writerSchema The Avro writer schema for the data to deserialize.
* @param readerSchema The Avro reader schema for the data to deserialize (may
* be null).
* @param datumReader The Avro datum reader to use for deserialization.
*/
protected AvroDeserializer(Schema writerSchema, Schema readerSchema, DatumReader<D> datumReader) {
mWriterSchema = writerSchema;
mReaderSchema = null != readerSchema ? readerSchema : writerSchema;
mAvroDatumReader = datumReader;
}
/**
* Gets the writer schema used for deserializing.
*
* @return The writer schema;
*/
public Schema getWriterSchema() {
return mWriterSchema;
}
/**
* Gets the reader schema used for deserializing.
*
* @return The reader schema.
*/
public Schema getReaderSchema() {
return mReaderSchema;
}
/** {@inheritDoc} */
@Override
public void open(InputStream inputStream) throws IOException {
mAvroDecoder = DecoderFactory.get().directBinaryDecoder(inputStream, mAvroDecoder);
}
/** {@inheritDoc} */
@Override
public T deserialize(T avroWrapperToReuse) throws IOException {
// Create a new Avro wrapper if there isn't one to reuse.
if (null == avroWrapperToReuse) {
avroWrapperToReuse = createAvroWrapper();
}
// Deserialize the Avro datum from the input stream.
avroWrapperToReuse.datum(mAvroDatumReader.read(avroWrapperToReuse.datum(), mAvroDecoder));
return avroWrapperToReuse;
}
/** {@inheritDoc} */
@Override
public void close() throws IOException {
mAvroDecoder.inputStream().close();
}
/**
* Creates a new empty <code>T</code> (extends AvroWrapper) instance.
*
* @return A new empty <code>T</code> instance.
*/
protected abstract T createAvroWrapper();
}
| AvroDeserializer |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/ApplyTest.java | {
"start": 1978,
"end": 2304
} | class ____ {
@Test
@DisplayName("should save modifications to disk")
void shouldSaveModificationsToDisk() {
assertTrue(applyGoal.shouldSaveModifications(), "Apply goal should save modifications to disk");
}
}
@Nested
@DisplayName("Execution")
| ModificationBehaviorTests |
java | apache__camel | core/camel-core-xml/src/main/java/org/apache/camel/core/xml/AbstractCamelContextFactoryBean.java | {
"start": 65498,
"end": 76539
} | class
____ (profile.isDefaultProfile()) {
LOG.info("Using custom default ThreadPoolProfile with id: {} and implementation: {}", entry.getKey(),
profile);
context.getExecutorServiceManager().setDefaultThreadPoolProfile(profile);
defaultIds.add(entry.getKey());
} else {
context.getExecutorServiceManager().registerThreadPoolProfile(profile);
}
}
}
// use custom profiles defined in the CamelContext
if (getThreadPoolProfiles() != null && !getThreadPoolProfiles().isEmpty()) {
for (ThreadPoolProfileDefinition definition : getThreadPoolProfiles()) {
Boolean defaultProfile = CamelContextHelper.parseBoolean(getContext(), definition.getDefaultProfile());
if (defaultProfile != null && defaultProfile) {
LOG.info("Using custom default ThreadPoolProfile with id: {} and implementation: {}", definition.getId(),
definition);
context.getExecutorServiceManager().setDefaultThreadPoolProfile(asThreadPoolProfile(context, definition));
defaultIds.add(definition.getId());
} else {
context.getExecutorServiceManager().registerThreadPoolProfile(asThreadPoolProfile(context, definition));
}
}
}
// validate at most one is defined
if (defaultIds.size() > 1) {
throw new IllegalArgumentException(
"Only exactly one default ThreadPoolProfile is allowed, was " + defaultIds.size() + " ids: " + defaultIds);
}
}
/**
* Creates a {@link ThreadPoolProfile} instance based on the definition.
*
* @param context the camel context
* @return the profile
*/
private ThreadPoolProfile asThreadPoolProfile(CamelContext context, ThreadPoolProfileDefinition definition) {
ThreadPoolProfile answer = new ThreadPoolProfile();
answer.setId(definition.getId());
answer.setDefaultProfile(CamelContextHelper.parseBoolean(context, definition.getDefaultProfile()));
answer.setPoolSize(CamelContextHelper.parseInteger(context, definition.getPoolSize()));
answer.setMaxPoolSize(CamelContextHelper.parseInteger(context, definition.getMaxPoolSize()));
answer.setKeepAliveTime(CamelContextHelper.parseLong(context, definition.getKeepAliveTime()));
answer.setMaxQueueSize(CamelContextHelper.parseInteger(context, definition.getMaxQueueSize()));
answer.setAllowCoreThreadTimeOut(CamelContextHelper.parseBoolean(context, definition.getAllowCoreThreadTimeOut()));
answer.setRejectedPolicy(
CamelContextHelper.parse(context, ThreadPoolRejectedPolicy.class, definition.getRejectedPolicy()));
answer.setTimeUnit(CamelContextHelper.parse(context, TimeUnit.class, definition.getTimeUnit()));
return answer;
}
protected abstract void initBeanPostProcessor(T context);
/**
* Strategy to install all available routes into the context
*/
protected void installRoutes() throws Exception {
// let's add RoutesBuilder's added from references
if (getBuilderRefs() != null) {
for (RouteBuilderDefinition builderRef : getBuilderRefs()) {
RoutesBuilder routes = builderRef.createRoutes(getContext());
if (routes != null) {
this.builders.add(routes);
} else {
throw new CamelException("Cannot find any routes with this RouteBuilder reference: " + builderRef);
}
}
}
// install already configured routes
for (RoutesBuilder routeBuilder : this.builders) {
getContext().addRoutes(routeBuilder);
}
}
protected abstract void postProcessBeforeInit(RouteBuilder builder);
/**
* Strategy method to try find {@link org.apache.camel.builder.RouteBuilder} instances on the classpath
*/
protected void findRouteBuilders() throws Exception {
// package scan
addPackageElementContentsToScanDefinition();
PackageScanDefinition packageScanDef = getPackageScan();
if (packageScanDef != null && !packageScanDef.getPackages().isEmpty()) {
// use package scan filter
final PatternBasedPackageScanFilter filter
= addPatterns(packageScanDef.getIncludes(), packageScanDef.getExcludes());
String[] normalized = normalizePackages(getContext(), packageScanDef.getPackages());
findRouteBuildersByPackageScan(normalized, filter, builders);
}
// context scan
ContextScanDefinition contextScanDef = getContextScan();
if (contextScanDef != null) {
// use package scan filter
final PatternBasedPackageScanFilter filter
= addPatterns(contextScanDef.getIncludes(), contextScanDef.getExcludes());
// lets be false by default, to skip prototype beans
boolean includeNonSingletons = contextScanDef.getIncludeNonSingletons() != null
&& Boolean.parseBoolean(contextScanDef.getIncludeNonSingletons());
findRouteBuildersByContextScan(filter, includeNonSingletons, builders);
}
}
private PatternBasedPackageScanFilter addPatterns(List<String> contextScanDef, List<String> contextScanDef1) {
PatternBasedPackageScanFilter filter = new PatternBasedPackageScanFilter();
// support property placeholders in include and exclude
for (String include : contextScanDef) {
include = getContext().resolvePropertyPlaceholders(include);
filter.addIncludePattern(include);
}
for (String exclude : contextScanDef1) {
exclude = getContext().resolvePropertyPlaceholders(exclude);
filter.addExcludePattern(exclude);
}
return filter;
}
protected abstract void findRouteBuildersByPackageScan(
String[] packages, PackageScanFilter filter, List<RoutesBuilder> builders)
throws Exception;
protected abstract void findRouteBuildersByContextScan(
PackageScanFilter filter, boolean includeNonSingletons, List<RoutesBuilder> builders)
throws Exception;
private void addPackageElementContentsToScanDefinition() {
PackageScanDefinition packageScanDef = getPackageScan();
if (getPackages() != null && getPackages().length > 0) {
if (packageScanDef == null) {
packageScanDef = new PackageScanDefinition();
setPackageScan(packageScanDef);
}
for (String pkg : getPackages()) {
packageScanDef.getPackages().add(pkg);
}
}
}
private String[] normalizePackages(T context, List<String> unnormalized) {
List<String> packages = new ArrayList<>();
for (String name : unnormalized) {
// it may use property placeholders
name = context.resolvePropertyPlaceholders(name);
name = StringHelper.normalizeClassName(name);
if (org.apache.camel.util.ObjectHelper.isNotEmpty(name)) {
LOG.trace("Using package: {} to scan for RouteBuilder classes", name);
packages.add(name);
}
}
return packages.toArray(new String[0]);
}
private void setupCustomServices() {
ModelJAXBContextFactory modelJAXBContextFactory = getBeanForType(ModelJAXBContextFactory.class);
if (modelJAXBContextFactory != null) {
LOG.info("Using custom ModelJAXBContextFactory: {}", modelJAXBContextFactory);
getContext().getCamelContextExtension().addContextPlugin(ModelJAXBContextFactory.class, modelJAXBContextFactory);
}
ClassResolver classResolver = getBeanForType(ClassResolver.class);
if (classResolver != null) {
LOG.info("Using custom ClassResolver: {}", classResolver);
getContext().setClassResolver(classResolver);
}
FactoryFinderResolver factoryFinderResolver = getBeanForType(FactoryFinderResolver.class);
if (factoryFinderResolver != null) {
LOG.info("Using custom FactoryFinderResolver: {}", factoryFinderResolver);
getContext().getCamelContextExtension().addContextPlugin(FactoryFinderResolver.class, factoryFinderResolver);
}
ExecutorServiceManager executorServiceStrategy = getBeanForType(ExecutorServiceManager.class);
if (executorServiceStrategy != null) {
LOG.info("Using custom ExecutorServiceStrategy: {}", executorServiceStrategy);
getContext().setExecutorServiceManager(executorServiceStrategy);
}
ThreadPoolFactory threadPoolFactory = getBeanForType(ThreadPoolFactory.class);
if (threadPoolFactory != null) {
LOG.info("Using custom ThreadPoolFactory: {}", threadPoolFactory);
getContext().getExecutorServiceManager().setThreadPoolFactory(threadPoolFactory);
}
ProcessorFactory processorFactory = getBeanForType(ProcessorFactory.class);
if (processorFactory != null) {
LOG.info("Using custom ProcessorFactory: {}", processorFactory);
getContext().getCamelContextExtension().addContextPlugin(ProcessorFactory.class, processorFactory);
}
Debugger debugger = getBeanForType(Debugger.class);
if (debugger != null) {
LOG.info("Using custom Debugger: {}", debugger);
getContext().setDebugger(debugger);
}
setupUuidGenerator();
NodeIdFactory nodeIdFactory = getBeanForType(NodeIdFactory.class);
if (nodeIdFactory != null) {
LOG.info("Using custom NodeIdFactory: {}", nodeIdFactory);
getContext().getCamelContextExtension().addContextPlugin(NodeIdFactory.class, nodeIdFactory);
}
StreamCachingStrategy streamCachingStrategy = getBeanForType(StreamCachingStrategy.class);
if (streamCachingStrategy != null) {
LOG.info("Using custom StreamCachingStrategy: {}", streamCachingStrategy);
getContext().setStreamCachingStrategy(streamCachingStrategy);
}
MessageHistoryFactory messageHistoryFactory = getBeanForType(MessageHistoryFactory.class);
if (messageHistoryFactory != null) {
LOG.info("Using custom MessageHistoryFactory: {}", messageHistoryFactory);
getContext().setMessageHistoryFactory(messageHistoryFactory);
}
ReactiveExecutor reactiveExecutor = getBeanForType(ReactiveExecutor.class);
if (reactiveExecutor != null) {
// already logged in CamelContext
getContext().getCamelContextExtension().setReactiveExecutor(reactiveExecutor);
}
}
}
| if |
java | ReactiveX__RxJava | src/jmh/java/io/reactivex/rxjava3/core/RangePerf.java | {
"start": 1079,
"end": 2305
} | class ____ {
@Param({ "1", "1000", "1000000" })
public int times;
Flowable<Integer> range;
Flowable<Integer> rangeAsync;
Flowable<Integer> rangeAsyncPipeline;
@Setup
public void setup() {
range = Flowable.range(1, times);
rangeAsync = range.observeOn(Schedulers.single());
rangeAsyncPipeline = range.subscribeOn(new SingleScheduler()).observeOn(Schedulers.single());
}
@Benchmark
public Object rangeSync(Blackhole bh) {
PerfSubscriber lo = new PerfSubscriber(bh);
range.subscribe(lo);
return lo;
}
// @Benchmark
public void rangeAsync(Blackhole bh) throws Exception {
PerfSubscriber lo = new PerfSubscriber(bh);
rangeAsync.subscribe(lo);
if (times == 1) {
while (lo.latch.getCount() != 0) { }
} else {
lo.latch.await();
}
}
// @Benchmark
public void rangePipeline(Blackhole bh) throws Exception {
PerfSubscriber lo = new PerfSubscriber(bh);
rangeAsyncPipeline.subscribe(lo);
if (times == 1) {
while (lo.latch.getCount() != 0) { }
} else {
lo.latch.await();
}
}
}
| RangePerf |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/core/JmsMessageOperations.java | {
"start": 7363,
"end": 8058
} | class ____ convert the payload to
* @return the converted payload of the reply message, possibly {@code null} if
* the message could not be received, for example due to a timeout
* @since 7.0
*/
<T> @Nullable T receiveSelectedAndConvert(@Nullable String messageSelector, Class<T> targetClass)
throws MessagingException;
/**
* Receive a message from the given destination and convert its payload to the
* specified target class.
* @param destination the target destination
* @param messageSelector the JMS message selector expression (or {@code null} if none).
* See the JMS specification for a detailed definition of selector expressions.
* @param targetClass the target | to |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 25508,
"end": 26113
} | class ____ {
public String @Nullable [][] getMessage(boolean b, String[][] s) {
return b ? null : s;
}
}
""")
.doTest();
}
@Test
public void alreadyTypeAnnotatedInnerClassMemberSelect() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
import org.checkerframework.checker.nullness.qual.Nullable;
public | LiteralNullReturnTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/ClientQueryParams.java | {
"start": 372,
"end": 804
} | interface ____ of a REST client interface, the specified query parameters will be sent
* on each request for all
* methods in the interface.
* When this annotation is placed on a method, the parameters will be sent only for that method. If the same query parameter is
* specified in an annotation
* for both the type and the method, only the header value specified in the annotation on the method will be sent.
* <p>
* This | level |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/transport/TZlibTransport.java | {
"start": 3454,
"end": 4223
} | class ____ extends OutputStream {
private TTransport transport = null;
public TTransportOutputStream(TTransport transport) {
this.transport = transport;
}
@Override
public void write(final int b) throws IOException {
try {
transport.write(new byte[] {(byte) b});
} catch (TTransportException e) {
throw new IOException(e);
}
}
@Override
public void write(byte b[], int off, int len) throws IOException {
try {
transport.write(b, off, len);
} catch (TTransportException e) {
throw new IOException(e);
}
}
@Override
public void flush() throws IOException {
try {
transport.flush();
} catch (TTransportException e) {
throw new IOException(e);
}
}
}
| TTransportOutputStream |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/jackson/LevelMixInJsonTest.java | {
"start": 911,
"end": 1100
} | class ____ extends LevelMixInTest {
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jJsonObjectMapper(false, true, false, false);
}
}
| LevelMixInJsonTest |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/client/XdsClient.java | {
"start": 4836,
"end": 5826
} | interface ____<T extends ResourceUpdate> {
/**
* Called when the resource discovery RPC encounters some transient error.
*
* <p>Note that we expect that the implementer to:
* - Comply with the guarantee to not generate certain statuses by the library:
* https://grpc.github.io/grpc/core/md_doc_statuscodes.html. If the code needs to be
* propagated to the channel, override it with {@link io.grpc.Status.Code#UNAVAILABLE}.
* - Keep {@link Status} description in one form or another, as it contains valuable debugging
* information.
*/
void onError(Status error);
/**
* Called when the requested resource is not available.
*
* @param resourceName name of the resource requested in discovery request.
*/
void onResourceDoesNotExist(String resourceName);
void onChanged(T update);
}
/**
* The metadata of the xDS resource; used by the xDS config dump.
*/
public static final | ResourceWatcher |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/FilterChainTest_Clob_2.java | {
"start": 1237,
"end": 3808
} | class ____ extends TestCase {
private DruidDataSource dataSource;
private CallableStatementProxy statement;
private int invokeCount;
protected void setUp() throws Exception {
dataSource = new DruidDataSource();
MockCallableStatement mockStmt = new MockCallableStatement(null, "") {
@Override
public Object getObject(int parameterIndex) throws SQLException {
invokeCount++;
return new MockClob();
}
};
statement = new CallableStatementProxyImpl(new ConnectionProxyImpl(null, null, null, 0), mockStmt, "", 1);
}
protected void tearDown() throws Exception {
JdbcUtils.close(dataSource);
invokeCount = 0;
}
public void test_getClob() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = chain.callableStatement_getClob(statement, 1);
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
public void test_getClob_1() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = chain.callableStatement_getClob(statement, "1");
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
public void test_getObject() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = (Clob) chain.callableStatement_getObject(statement, 1);
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
public void test_getObject_1() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = (Clob) chain.callableStatement_getObject(statement, "1");
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
public void test_getObject_2() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = (Clob) chain.callableStatement_getObject(statement, 1, Collections.<String, Class<?>>emptyMap());
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
public void test_getObject_3() throws Exception {
FilterChainImpl chain = new FilterChainImpl(dataSource);
Clob clob = (Clob) chain.callableStatement_getObject(statement, "1", Collections.<String, Class<?>>emptyMap());
assertTrue(clob instanceof ClobProxy);
assertEquals(1, invokeCount);
}
}
| FilterChainTest_Clob_2 |
java | google__guava | android/guava/src/com/google/common/cache/ForwardingLoadingCache.java | {
"start": 2558,
"end": 2934
} | class ____<K, V>
extends ForwardingLoadingCache<K, V> {
private final LoadingCache<K, V> delegate;
protected SimpleForwardingLoadingCache(LoadingCache<K, V> delegate) {
this.delegate = Preconditions.checkNotNull(delegate);
}
@Override
protected final LoadingCache<K, V> delegate() {
return delegate;
}
}
}
| SimpleForwardingLoadingCache |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/BooleanArrayFieldTest.java | {
"start": 1114,
"end": 1334
} | class ____ {
private Boolean[] value;
public Boolean[] getValue() {
return value;
}
public void setValue(Boolean[] value) {
this.value = value;
}
}
}
| V0 |
java | google__gson | gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnClassesTest.java | {
"start": 11867,
"end": 12854
} | class ____ extends TypeAdapter<Foo> {
@Override
public void write(JsonWriter out, Foo value) throws IOException {
out.value(value.name().toLowerCase(Locale.US));
}
@Override
public Foo read(JsonReader in) throws IOException {
return Foo.valueOf(in.nextString().toUpperCase(Locale.US));
}
}
@Test
public void testIncorrectJsonAdapterType() {
Gson gson = new Gson();
WithInvalidAdapterClass obj = new WithInvalidAdapterClass();
var e = assertThrows(IllegalArgumentException.class, () -> gson.toJson(obj));
assertThat(e)
.hasMessageThat()
.isEqualTo(
"Invalid attempt to bind an instance of java.lang.Integer as a @JsonAdapter for "
+ WithInvalidAdapterClass.class.getName()
+ ". @JsonAdapter value must be a TypeAdapter, TypeAdapterFactory, JsonSerializer"
+ " or JsonDeserializer.");
}
@JsonAdapter(Integer.class)
private static final | FooJsonAdapter |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/BackCompatibilityBinderIntegrationTests.java | {
"start": 2551,
"end": 2770
} | class ____ {
private @Nullable String password;
@Nullable String getPassword() {
return this.password;
}
void setPassword(@Nullable String password) {
this.password = password;
}
}
}
| PasswordProperties |
java | apache__hadoop | hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/RecordComparator.java | {
"start": 1268,
"end": 1831
} | class ____ extends WritableComparator {
/**
* Construct a raw {@link Record} comparison implementation. */
protected RecordComparator(Class<? extends WritableComparable> recordClass) {
super(recordClass);
}
// inheric JavaDoc
@Override
public abstract int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2);
/**
* Register an optimized comparator for a {@link Record} implementation.
*
* @param c record classs for which a raw comparator is provided
* @param comparator Raw comparator instance for | RecordComparator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CanIgnoreReturnValueSuggesterTest.java | {
"start": 13834,
"end": 14479
} | class ____ {
private String name;
@CanIgnoreReturnValue
public Client setName(String name) {
this.name = name;
return getThis();
}
private Client getThis() {
return this;
}
}
""")
.doTest();
}
@Test
public void simpleCaseAlreadyAnnotatedWithCirv() {
helper
.addInputLines(
"Client.java",
"""
package com.google.frobber;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
public final | Client |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/TlsSyslogFrameTest.java | {
"start": 1100,
"end": 2114
} | class ____ {
private static final String TEST_MESSAGE = "The quick brown fox jumps over the lazy dog";
@Test
void equals() {
final TlsSyslogFrame first = new TlsSyslogFrame(TEST_MESSAGE);
final TlsSyslogFrame second = new TlsSyslogFrame(TEST_MESSAGE);
assertEquals(first, second);
assertEquals(first.hashCode(), second.hashCode());
}
@Test
void notEquals() {
final TlsSyslogFrame first = new TlsSyslogFrame("A message");
final TlsSyslogFrame second = new TlsSyslogFrame("B message");
assertNotEquals(first, second);
assertNotEquals(first.hashCode(), second.hashCode());
}
@Test
void testToString() {
final TlsSyslogFrame frame = new TlsSyslogFrame(TEST_MESSAGE);
final int length = TEST_MESSAGE.getBytes(StandardCharsets.UTF_8).length;
final String expected = Integer.toString(length) + Chars.SPACE + TEST_MESSAGE;
assertEquals(expected, frame.toString());
}
}
| TlsSyslogFrameTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/requests/OffsetCommitRequestTest.java | {
"start": 2007,
"end": 6571
} | class ____ {
protected static String groupId = "groupId";
protected static String memberId = "consumerId";
protected static String groupInstanceId = "groupInstanceId";
protected static Uuid topicIdOne = Uuid.randomUuid();
protected static Uuid topicIdTwo = Uuid.randomUuid();
protected static String topicOne = "topicOne";
protected static String topicTwo = "topicTwo";
protected static int partitionOne = 1;
protected static int partitionTwo = 2;
protected static long offset = 100L;
protected static short leaderEpoch = 20;
protected static String metadata = "metadata";
protected static int throttleTimeMs = 10;
private static OffsetCommitRequestData data;
@BeforeEach
public void setUp() {
List<OffsetCommitRequestTopic> topics = Arrays.asList(
new OffsetCommitRequestTopic()
.setTopicId(topicIdOne)
.setName(topicOne)
.setPartitions(Collections.singletonList(
new OffsetCommitRequestPartition()
.setPartitionIndex(partitionOne)
.setCommittedOffset(offset)
.setCommittedLeaderEpoch(leaderEpoch)
.setCommittedMetadata(metadata)
)),
new OffsetCommitRequestTopic()
.setTopicId(topicIdTwo)
.setName(topicTwo)
.setPartitions(Collections.singletonList(
new OffsetCommitRequestPartition()
.setPartitionIndex(partitionTwo)
.setCommittedOffset(offset)
.setCommittedLeaderEpoch(leaderEpoch)
.setCommittedMetadata(metadata)
))
);
data = new OffsetCommitRequestData()
.setGroupId(groupId)
.setTopics(topics);
}
@Test
public void testConstructor() {
Map<TopicPartition, Long> expectedOffsets = new HashMap<>();
expectedOffsets.put(new TopicPartition(topicOne, partitionOne), offset);
expectedOffsets.put(new TopicPartition(topicTwo, partitionTwo), offset);
OffsetCommitRequest.Builder builder = OffsetCommitRequest.Builder.forTopicNames(data);
for (short version : ApiKeys.OFFSET_COMMIT.allVersions()) {
OffsetCommitRequest request = builder.build(version);
assertEquals(expectedOffsets, request.offsets());
OffsetCommitResponse response = request.getErrorResponse(throttleTimeMs, Errors.NOT_COORDINATOR.exception());
assertEquals(Collections.singletonMap(Errors.NOT_COORDINATOR, 2), response.errorCounts());
assertEquals(throttleTimeMs, response.throttleTimeMs());
}
}
@Test
public void testVersionSupportForGroupInstanceId() {
OffsetCommitRequest.Builder builder = OffsetCommitRequest.Builder.forTopicNames(
new OffsetCommitRequestData()
.setGroupId(groupId)
.setMemberId(memberId)
.setGroupInstanceId(groupInstanceId)
);
for (short version : ApiKeys.OFFSET_COMMIT.allVersions()) {
if (version >= 7) {
builder.build(version);
} else {
final short finalVersion = version;
assertThrows(UnsupportedVersionException.class, () -> builder.build(finalVersion));
}
}
}
@Test
public void testGetErrorResponse() {
OffsetCommitResponseData expectedResponse = new OffsetCommitResponseData()
.setTopics(Arrays.asList(
new OffsetCommitResponseTopic()
.setTopicId(topicIdOne)
.setName(topicOne)
.setPartitions(Collections.singletonList(
new OffsetCommitResponsePartition()
.setErrorCode(Errors.UNKNOWN_MEMBER_ID.code())
.setPartitionIndex(partitionOne))),
new OffsetCommitResponseTopic()
.setTopicId(topicIdTwo)
.setName(topicTwo)
.setPartitions(Collections.singletonList(
new OffsetCommitResponsePartition()
.setErrorCode(Errors.UNKNOWN_MEMBER_ID.code())
.setPartitionIndex(partitionTwo)))));
assertEquals(expectedResponse, getErrorResponse(data, Errors.UNKNOWN_MEMBER_ID));
}
}
| OffsetCommitRequestTest |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/annotation/EachProperty.java | {
"start": 1632,
"end": 2592
} | class ____ {
* ExampleConfiguration({@literal @}Parameter String name) {
* ...
* }
* }
* </code></pre>
*
* <p>In the above example for a configuration property of {@code foo.bar.test}, the value of the {@code name} argument
* will be {@code "test"}</p>
*
* <p>The bean is created as a singleton with a Named qualifier matching the configuration entry
* name, thus allowing retrieval with:</p>
* <pre>{@code
* ExampleConfiguration exampleConfiguration = applicationContext.getBean(ExampleConfiguration.class, Qualifiers.byName("test"));
* }</pre>
*
* <p>Or alternatively dependency injection via the Named qualifier.</p>
*
* <p>This annotation is typically used in conjunction with {@link EachBean}. For example, one can drive the
* configuration of other beans with the {@link EachBean} annotation:</p>
* <pre><code>
* {@literal @}EachBean(ExampleConfiguration)
* {@literal @}Singleton
* public | ExampleConfiguration |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/ToDynamicRawTest.java | {
"start": 973,
"end": 1535
} | class ____ extends ContextTestSupport {
@Test
public void testToDynamicRaw() throws Exception {
getMockEndpoint("mock:RAW(se+ret)").expectedBodiesReceived("Hello Camel");
template.sendBody("direct:start", "Hello Camel");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").toD("mock:RAW(se+ret)");
}
};
}
}
| ToDynamicRawTest |
java | apache__camel | components/camel-oaipmh/src/main/java/org/apache/camel/oaipmh/component/OAIPMHEndpoint.java | {
"start": 1712,
"end": 6852
} | class ____ extends ScheduledPollEndpoint implements EndpointServiceLocation {
private transient URI url;
@UriPath(description = "Base URL of the repository to which the request is made through the OAI-PMH protocol")
@Metadata(required = true)
private String baseUrl;
@UriParam(description = "Specifies a lower bound for datestamp-based selective harvesting. UTC DateTime value")
private String from;
@UriParam(description = "Specifies an upper bound for datestamp-based selective harvesting. UTC DateTime value.")
private String until;
@UriParam(description = "Specifies membership as a criteria for set-based selective harvesting")
private String set;
@UriParam(description = "Request name supported by OAI-PMh protocol", defaultValue = "ListRecords")
private String verb = "ListRecords";
@UriParam(description = "Specifies the metadataPrefix of the format that should be included in the metadata part of the returned records.",
defaultValue = "oai_dc")
private String metadataPrefix = "oai_dc";
@UriParam(label = "security", description = "Causes the defined url to make an https request")
private boolean ssl;
@UriParam(label = "security", description = "Ignore SSL certificate warnings")
private boolean ignoreSSLWarnings;
@UriParam(description = "Identifier of the requested resources. Applicable only with certain verbs")
private String identifier;
@UriParam(label = "producer",
description = "Returns the response of a single request. Otherwise it will make requests until there is no more data to return.")
private boolean onlyFirst;
private Map<String, Object> queryParameters;
public OAIPMHEndpoint(String uri, String remaining, OAIPMHComponent component) {
super(uri, component);
this.baseUrl = remaining;
}
@Override
public String getServiceUrl() {
return baseUrl;
}
@Override
public String getServiceProtocol() {
return "oai-phm";
}
@Override
public boolean isLenientProperties() {
return true;
}
@Override
protected void doInit() throws Exception {
super.doInit();
validateParameters();
// build uri from parameters
String prefix = "";
if (!baseUrl.startsWith("http:") && !baseUrl.startsWith("https:")) {
prefix = isSsl() ? "https://" : "http://";
}
this.url = URI.create(prefix + baseUrl);
// append extra parameters
if (queryParameters != null && !queryParameters.isEmpty()) {
Map<String, Object> parameters = URISupport.parseParameters(url);
parameters.putAll(queryParameters);
this.url = URISupport.createRemainingURI(url, parameters);
}
}
@Override
public Producer createProducer() throws Exception {
return new OAIPMHProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
OAIPMHConsumer consumer = new OAIPMHConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
private void validateParameters() {
// From parameter in ISO 8601 format
if (from != null) {
ISODateTimeFormat.dateTimeNoMillis().parseDateTime(from);
}
if (until != null) {
ISODateTimeFormat.dateTimeNoMillis().parseDateTime(until);
}
}
public Map<String, Object> getQueryParameters() {
return queryParameters;
}
public void setQueryParameters(Map<String, Object> queryParameters) {
this.queryParameters = queryParameters;
}
public boolean isIgnoreSSLWarnings() {
return ignoreSSLWarnings;
}
public void setIgnoreSSLWarnings(boolean ignoreSSLWarnings) {
this.ignoreSSLWarnings = ignoreSSLWarnings;
}
public boolean isSsl() {
return ssl;
}
public void setSsl(boolean ssl) {
this.ssl = ssl;
}
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
public String getUntil() {
return until;
}
public void setUntil(String until) {
this.until = until;
}
public String getSet() {
return set;
}
public void setSet(String set) {
this.set = set;
}
public String getVerb() {
return verb;
}
public void setVerb(String verb) {
this.verb = verb;
}
public String getMetadataPrefix() {
return metadataPrefix;
}
public void setMetadataPrefix(String metadataPrefix) {
this.metadataPrefix = metadataPrefix;
}
public URI getUrl() {
return this.url;
}
public String getIdentifier() {
return identifier;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public boolean isOnlyFirst() {
return onlyFirst;
}
public void setOnlyFirst(boolean onlyFist) {
this.onlyFirst = onlyFist;
}
}
| OAIPMHEndpoint |
java | apache__camel | components/camel-whatsapp/src/main/java/org/apache/camel/component/whatsapp/WhatsAppProducer.java | {
"start": 1232,
"end": 2453
} | class ____ extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(WhatsAppProducer.class);
private WhatsAppEndpoint endpoint;
public WhatsAppProducer(WhatsAppEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
if (exchange.getIn().getBody() == null) {
// fail fast
LOG.debug("Received exchange with empty body, skipping");
callback.done(true);
return true;
}
// WhatsAppConfiguration config = endpoint.getConfiguration();
// Tries to get a message in its OutgoingMessage format
// Automatic conversion applies here
BaseMessage message = exchange.getIn().getBody(BaseMessage.class);
ObjectHelper.notNull(message, "message");
final WhatsAppService service = endpoint.getWhatsappService();
LOG.debug("Message being sent is: {}", message);
LOG.debug("Headers of message being sent are: {}", exchange.getIn().getHeaders());
service.sendMessage(exchange, callback, message);
return false;
}
}
| WhatsAppProducer |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/net/HostAndPortTest.java | {
"start": 243,
"end": 5595
} | class ____ {
@Test
public void testParseIPLiteral() {
Assert.assertEquals(-1, HostAndPortImpl.parseIPLiteral("", 0, 0));
assertEquals(-1, HostAndPortImpl.parseIPLiteral("[", 0, 1));
assertEquals(-1, HostAndPortImpl.parseIPLiteral("[]", 0, 2));
assertEquals(3, HostAndPortImpl.parseIPLiteral("[0]", 0, 3));
assertEquals(-1, HostAndPortImpl.parseIPLiteral("[0", 0, 2));
}
@Test
public void testParseDecOctet() {
assertEquals(-1, HostAndPortImpl.parseDecOctet("", 0, 0));
assertEquals(1, HostAndPortImpl.parseDecOctet("0", 0, 1));
assertEquals(1, HostAndPortImpl.parseDecOctet("9", 0, 1));
assertEquals(1, HostAndPortImpl.parseDecOctet("01", 0, 2));
assertEquals(2, HostAndPortImpl.parseDecOctet("19", 0, 2));
assertEquals(3, HostAndPortImpl.parseDecOctet("192", 0, 3));
assertEquals(3, HostAndPortImpl.parseDecOctet("1234", 0, 4));
assertEquals(-1, HostAndPortImpl.parseDecOctet("256", 0, 3));
}
@Test
public void testParseIPV4Address() {
assertEquals(-1, HostAndPortImpl.parseIPv4Address("0.0.0", 0, 5));
assertEquals(-1, HostAndPortImpl.parseIPv4Address("0.0.0#0", 0, 7));
assertEquals(7, HostAndPortImpl.parseIPv4Address("0.0.0.0", 0, 7));
assertEquals(11, HostAndPortImpl.parseIPv4Address("192.168.0.0", 0, 11));
assertEquals(-1, HostAndPortImpl.parseIPv4Address("011.168.0.0", 0, 11));
assertEquals(-1, HostAndPortImpl.parseIPv4Address("10.0.0.1.nip.io", 0, 15));
assertEquals(-1, HostAndPortImpl.parseIPv4Address("10.0.0.1.nip.io", 0, 9));
assertEquals(8, HostAndPortImpl.parseIPv4Address("10.0.0.1.nip.io", 0, 8));
assertEquals(-1, HostAndPortImpl.parseIPv4Address("10.0.0.1:", 0, 9));
assertEquals(8, HostAndPortImpl.parseIPv4Address("10.0.0.1:0", 0, 10));
}
@Test
public void testParseRegName() {
assertEquals(5, HostAndPortImpl.parseRegName("abcdef", 0, 5));
assertEquals(5, HostAndPortImpl.parseRegName("abcdef:1234", 0, 5));
assertEquals(11, HostAndPortImpl.parseRegName("example.com", 0, 11));
assertEquals(14, HostAndPortImpl.parseRegName("example-fr.com", 0, 14));
assertEquals(15, HostAndPortImpl.parseRegName("10.0.0.1.nip.io", 0, 15));
}
@Test
public void testParseHost() {
assertEquals(14, HostAndPortImpl.parseHost("example-fr.com", 0, 14));
assertEquals(5, HostAndPortImpl.parseHost("[0::]", 0, 5));
assertEquals(7, HostAndPortImpl.parseHost("0.0.0.0", 0, 7));
assertEquals(8, HostAndPortImpl.parseHost("10.0.0.1.nip.io", 0, 8));
assertEquals(15, HostAndPortImpl.parseHost("10.0.0.1.nip.io", 0, 15));
assertEquals(8, HostAndPortImpl.parseHost("10.0.0.1:8080", 0, 15));
}
@Test
public void testParseHostAndPort() {
assertHostAndPort("10.0.0.1.nip.io", -1, "10.0.0.1.nip.io");
assertHostAndPort("10.0.0.1.nip.io", 8443, "10.0.0.1.nip.io:8443");
assertHostAndPort("127.0.0.1", 8080, "127.0.0.1:8080");
assertHostAndPort("example.com", 8080, "example.com:8080");
assertHostAndPort("example.com", -1, "example.com");
assertHostAndPort("0.1.2.3", -1, "0.1.2.3");
assertHostAndPort("[0::]", -1, "[0::]");
assertHostAndPort("", -1, "");
assertHostAndPort("", 8080, ":8080");
assertNull(HostAndPortImpl.parseAuthority("/", -1));
assertFalse(HostAndPortImpl.isValidAuthority("/"));
assertNull(HostAndPortImpl.parseAuthority("10.0.0.1:x", -1));
assertFalse(HostAndPortImpl.isValidAuthority("10.0.0.1:x"));
}
@Test
public void testParseInvalid() {
assertHostAndPort("localhost", 65535, "localhost:65535");
assertNull(HostAndPortImpl.parseAuthority("localhost:65536", -1));
assertFalse(HostAndPortImpl.isValidAuthority("localhost:65536"));
assertNull(HostAndPortImpl.parseAuthority("localhost:8080a", -1));
assertFalse(HostAndPortImpl.isValidAuthority("localhost:8080a"));
assertNull(HostAndPortImpl.parseAuthority("http://localhost:8080", -1));
assertFalse(HostAndPortImpl.isValidAuthority("http://localhost:8080"));
assertNull(HostAndPortImpl.parseAuthority("^", -1));
assertFalse(HostAndPortImpl.isValidAuthority("^"));
assertNull(HostAndPortImpl.parseAuthority("bücher.de", -1));
assertFalse(HostAndPortImpl.isValidAuthority("bücher.de"));
}
private void assertHostAndPort(String expectedHost, int expectedPort, String actual) {
HostAndPortImpl hostAndPort = HostAndPortImpl.parseAuthority(actual, -1);
assertNotNull(hostAndPort);
assertTrue(HostAndPortImpl.isValidAuthority(actual));
assertEquals(expectedHost, hostAndPort.host());
assertEquals(expectedPort, hostAndPort.port());
}
@Test
public void testFromJson() {
assertNull(HostAndPort.fromJson(new JsonObject()));
HostAndPort hostAndPort = HostAndPort.fromJson(new JsonObject().put("host", "the-host"));
assertEquals("the-host", hostAndPort.host());
assertEquals(-1, hostAndPort.port());
hostAndPort = HostAndPort.fromJson(new JsonObject().put("host", "the-host").put("port", 4));
assertEquals("the-host", hostAndPort.host());
assertEquals(4, hostAndPort.port());
}
@Test
public void testToJson() {
assertEquals(new JsonObject().put("host", "the-host").put("port", 4), HostAndPort.create("the-host", 4).toJson());
assertEquals(new JsonObject().put("host", "the-host"), HostAndPort.create("the-host", -1).toJson());
}
}
| HostAndPortTest |
java | apache__camel | components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/HttpMethodHelper.java | {
"start": 985,
"end": 2406
} | class ____ {
private static final Pattern HTTP_METHOD_PATTERN = Pattern.compile("(?i)httpMethod=([A-Z]+)");
/**
* This method searches for the httpMethod param on the endpoint and return it.
*
* @param exchange
* @param endpoint
* @return
*/
public static String getHttpMethodFromParameters(Exchange exchange, Endpoint endpoint) {
String queryStringHeader = (String) exchange.getIn().getHeader(Exchange.HTTP_QUERY);
if (queryStringHeader != null) {
String methodFromQuery = getMethodFromQueryString(queryStringHeader);
if (methodFromQuery != null) {
return methodFromQuery;
}
}
// try to get the httpMethod parameter from the the query string in the uri
int queryIndex = endpoint.getEndpointUri().indexOf('?');
if (queryIndex != -1) {
String queryString = endpoint.getEndpointUri().substring(queryIndex + 1);
String methodFromQuery = getMethodFromQueryString(queryString);
if (methodFromQuery != null) {
return methodFromQuery;
}
}
return null;
}
private static String getMethodFromQueryString(String queryString) {
Matcher m = HTTP_METHOD_PATTERN.matcher(queryString);
if (m.find()) {
return m.group(1);
}
return null;
}
}
| HttpMethodHelper |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/throttle/ThrottlerThreadPoolProfileTest.java | {
"start": 1209,
"end": 2148
} | class ____ extends ContextTestSupport {
@Test
public void testThreadPool() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(2);
template.sendBody("direct:start", "Hello");
template.sendBody("direct:start", "World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// create thread pool profile and register to camel
ThreadPoolProfile profile
= new ThreadPoolProfileBuilder("myPool").poolSize(2).maxPoolSize(5).maxQueueSize(10).build();
context.getExecutorServiceManager().registerThreadPoolProfile(profile);
from("direct:start").throttle(constant(2)).executorService("myPool").to("mock:result");
}
};
}
}
| ThrottlerThreadPoolProfileTest |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/common/watermark/WatermarkHandlingResult.java | {
"start": 1013,
"end": 1348
} | enum ____ {
/**
* Process function only peek the watermark, and it's framework's responsibility to handle this
* watermark.
*/
PEEK,
/**
* This watermark should be sent to downstream by process function itself. The framework does no
* additional processing.
*/
POLL,
}
| WatermarkHandlingResult |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/stream/StreamWriterTest_writeValueString2.java | {
"start": 261,
"end": 744
} | class ____ extends TestCase {
public void test_0() throws Exception {
StringWriter out = new StringWriter();
SerializeWriter writer = new SerializeWriter(out, 10);
Assert.assertEquals(10, writer.getBufferLength());
writer.writeString("abcde12345678\"\\");
writer.close();
String text = out.toString();
Assert.assertEquals("\"abcde12345678\\\"\\\\\"", text);
}
}
| StreamWriterTest_writeValueString2 |
java | apache__camel | components/camel-spring-parent/camel-spring-cloud-config/src/main/java/org/apache/camel/component/spring/cloud/config/SpringConfigRemaindersCache.java | {
"start": 916,
"end": 1289
} | class ____ {
private SpringConfigRemaindersCache() {
}
private static final Map<String, String> REMAINDERS_CACHE = new HashMap<>();
public static Map<String, String> get() {
return REMAINDERS_CACHE;
}
public static void put(String remainder, String value) {
REMAINDERS_CACHE.put(remainder, value);
}
}
| SpringConfigRemaindersCache |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.