language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppSmCommand.java | {
"start": 2515,
"end": 8195
} | enum ____ the negative response
// codes instead of just using them like this
NegativeResponseException nre = new NegativeResponseException(SMPP_NEG_RESPONSE_MSG_TOO_LONG);
throw new SmppException(nre);
default:
throw new SmppException("Unknown splitting policy: " + policy);
}
} else {
return segments;
}
}
private SmppSplittingPolicy getSplittingPolicy(Message message) throws SmppException {
if (message.getHeaders().containsKey(SmppConstants.SPLITTING_POLICY)) {
String policyName = message.getHeader(SmppConstants.SPLITTING_POLICY, String.class);
return SmppSplittingPolicy.fromString(policyName);
}
return config.getSplittingPolicy();
}
protected SmppSplitter createSplitter(Message message) throws SmppException {
SmppSplitter splitter;
// use the splitter if provided via header
if (message.getHeaders().containsKey(SmppConstants.DATA_SPLITTER)) {
splitter = message.getHeader(SmppConstants.DATA_SPLITTER, SmppSplitter.class);
if (null != splitter) {
return splitter;
}
throw new SmppException("Invalid splitter given. Must be instance of SmppSplitter");
}
Alphabet alphabet = determineAlphabet(message);
String body = message.getBody(String.class);
if (SmppUtils.is8Bit(alphabet)) {
splitter = new Smpp8BitSplitter(body.length());
} else if (alphabet == Alphabet.ALPHA_UCS2) {
splitter = new SmppUcs2Splitter(body.length());
} else {
splitter = new SmppDefaultSplitter(body.length());
}
return splitter;
}
protected final byte[] getShortMessage(Message message) {
if (has8bitDataCoding(message)) {
return message.getBody(byte[].class);
} else {
byte providedAlphabet = getProvidedAlphabet(message);
Alphabet determinedAlphabet = determineAlphabet(message);
Charset charset = determineCharset(message, providedAlphabet, determinedAlphabet.value());
String body = message.getBody(String.class);
return body.getBytes(charset);
}
}
private static boolean has8bitDataCoding(Message message) {
Byte dcs = message.getHeader(SmppConstants.DATA_CODING, Byte.class);
if (dcs != null) {
return SmppUtils.is8Bit(Alphabet.parseDataCoding(dcs.byteValue()));
} else {
Byte alphabet = message.getHeader(SmppConstants.ALPHABET, Byte.class);
return alphabet != null && SmppUtils.is8Bit(Alphabet.valueOf(alphabet));
}
}
private byte getProvidedAlphabet(Message message) {
byte alphabet = config.getAlphabet();
if (message.getHeaders().containsKey(SmppConstants.ALPHABET)) {
alphabet = message.getHeader(SmppConstants.ALPHABET, Byte.class);
}
return alphabet;
}
private Charset getCharsetForMessage(Message message) {
if (message.getHeaders().containsKey(SmppConstants.ENCODING)) {
String encoding = message.getHeader(SmppConstants.ENCODING, String.class);
if (Charset.isSupported(encoding)) {
return Charset.forName(encoding);
} else {
logger.warn("Unsupported encoding \"{}\" requested in header.", encoding);
}
}
return null;
}
private Charset determineCharset(Message message, byte providedAlphabet, byte determinedAlphabet) {
Charset result = getCharsetForMessage(message);
if (result != null) {
return result;
}
if (providedAlphabet == Alphabet.ALPHA_UCS2.value()
|| providedAlphabet == SmppConstants.UNKNOWN_ALPHABET && determinedAlphabet == Alphabet.ALPHA_UCS2.value()) {
// change charset to use multilang messages
return StandardCharsets.UTF_16BE;
}
return defaultCharset;
}
private Alphabet determineAlphabet(Message message) {
String body = message.getBody(String.class);
byte alphabet = getProvidedAlphabet(message);
Charset charset = getCharsetForMessage(message);
if (charset == null) {
charset = defaultCharset;
}
Alphabet alphabetObj;
if (alphabet == SmppConstants.UNKNOWN_ALPHABET) {
alphabetObj = Alphabet.ALPHA_UCS2;
if (isLatin1Compatible(charset)) {
byte[] messageBytes = body.getBytes(charset);
if (SmppUtils.isGsm0338Encodeable(messageBytes)) {
alphabetObj = Alphabet.ALPHA_DEFAULT;
}
}
} else {
alphabetObj = Alphabet.valueOf(alphabet);
}
return alphabetObj;
}
private boolean isLatin1Compatible(Charset c) {
if (c.equals(ascii) || c.equals(latin1)) {
return true;
}
return false;
}
protected byte getRegisterDeliveryFlag(Message message) {
if (message.getHeaders().containsKey(SmppConstants.REGISTERED_DELIVERY)) {
return message.getHeader(SmppConstants.REGISTERED_DELIVERY, Byte.class);
}
return config.getRegisteredDelivery();
}
protected boolean getRequestsSingleDLR(Message message) {
if (message.getHeaders().containsKey(SmppConstants.SINGLE_DLR)) {
return message.getHeader(SmppConstants.SINGLE_DLR, Boolean.class);
}
return config.isSingleDLR();
}
}
| of |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/ByteArrayEncodingState.java | {
"start": 1139,
"end": 3476
} | class ____ extends EncodingState {
byte[][] inputs;
byte[][] outputs;
int[] inputOffsets;
int[] outputOffsets;
ByteArrayEncodingState(RawErasureEncoder encoder,
byte[][] inputs, byte[][] outputs) {
this.encoder = encoder;
byte[] validInput = CoderUtil.findFirstValidInput(inputs);
this.encodeLength = validInput.length;
this.inputs = inputs;
this.outputs = outputs;
checkParameters(inputs, outputs);
checkBuffers(inputs);
checkBuffers(outputs);
this.inputOffsets = new int[inputs.length]; // ALL ZERO
this.outputOffsets = new int[outputs.length]; // ALL ZERO
}
ByteArrayEncodingState(RawErasureEncoder encoder,
int encodeLength,
byte[][] inputs,
int[] inputOffsets,
byte[][] outputs,
int[] outputOffsets) {
this.encoder = encoder;
this.encodeLength = encodeLength;
this.inputs = inputs;
this.outputs = outputs;
this.inputOffsets = inputOffsets;
this.outputOffsets = outputOffsets;
}
/**
* Convert to a ByteBufferEncodingState when it's backed by on-heap arrays.
*/
ByteBufferEncodingState convertToByteBufferState() {
ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
for (int i = 0; i < inputs.length; i++) {
newInputs[i] = CoderUtil.cloneAsDirectByteBuffer(inputs[i],
inputOffsets[i], encodeLength);
}
for (int i = 0; i < outputs.length; i++) {
newOutputs[i] = ByteBuffer.allocateDirect(encodeLength);
}
ByteBufferEncodingState bbeState = new ByteBufferEncodingState(encoder,
encodeLength, newInputs, newOutputs);
return bbeState;
}
/**
* Check and ensure the buffers are of the desired length.
* @param buffers the buffers to check
*/
void checkBuffers(byte[][] buffers) {
for (byte[] buffer : buffers) {
if (buffer == null) {
throw new HadoopIllegalArgumentException(
"Invalid buffer found, not allowing null");
}
if (buffer.length != encodeLength) {
throw new HadoopIllegalArgumentException(
"Invalid buffer not of length " + encodeLength);
}
}
}
}
| ByteArrayEncodingState |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/NonOverridingEquals.java | {
"start": 2830,
"end": 3468
} | class ____ extends BugChecker implements MethodTreeMatcher {
private static final String MESSAGE_BASE = "equals method doesn't override Object.equals";
/**
* Matches any method definition that: 1) is named `equals` 2) takes a single argument of a type
* other than Object 3) returns a boolean or Boolean
*/
private static final Matcher<MethodTree> MATCHER =
allOf(
methodIsNamed("equals"),
methodHasParameters(variableType(not(isSameType("java.lang.Object")))),
anyOf(methodReturns(BOOLEAN_TYPE), methodReturns(JAVA_LANG_BOOLEAN_TYPE)));
/** Matches if the enclosing | NonOverridingEquals |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/JdkAlpnApplicationProtocolNegotiator.java | {
"start": 994,
"end": 5675
} | class ____ extends JdkBaseApplicationProtocolNegotiator {
private static final boolean AVAILABLE = Conscrypt.isAvailable() ||
JdkAlpnSslUtils.supportsAlpn() ||
(BouncyCastleUtil.isBcTlsAvailable() && BouncyCastleAlpnSslUtils.isAlpnSupported());
private static final SslEngineWrapperFactory ALPN_WRAPPER = AVAILABLE ? new AlpnWrapper() : new FailureWrapper();
/**
* Create a new instance.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(Iterable<String> protocols) {
this(false, protocols);
}
/**
* Create a new instance.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(String... protocols) {
this(false, protocols);
}
/**
* Create a new instance.
* @param failIfNoCommonProtocols Fail with a fatal alert if not common protocols are detected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(boolean failIfNoCommonProtocols, Iterable<String> protocols) {
this(failIfNoCommonProtocols, failIfNoCommonProtocols, protocols);
}
/**
* Create a new instance.
* @param failIfNoCommonProtocols Fail with a fatal alert if not common protocols are detected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(boolean failIfNoCommonProtocols, String... protocols) {
this(failIfNoCommonProtocols, failIfNoCommonProtocols, protocols);
}
/**
* Create a new instance.
* @param clientFailIfNoCommonProtocols Client side fail with a fatal alert if not common protocols are detected.
* @param serverFailIfNoCommonProtocols Server side fail with a fatal alert if not common protocols are detected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(boolean clientFailIfNoCommonProtocols,
boolean serverFailIfNoCommonProtocols, Iterable<String> protocols) {
this(serverFailIfNoCommonProtocols ? FAIL_SELECTOR_FACTORY : NO_FAIL_SELECTOR_FACTORY,
clientFailIfNoCommonProtocols ? FAIL_SELECTION_LISTENER_FACTORY : NO_FAIL_SELECTION_LISTENER_FACTORY,
protocols);
}
/**
* Create a new instance.
* @param clientFailIfNoCommonProtocols Client side fail with a fatal alert if not common protocols are detected.
* @param serverFailIfNoCommonProtocols Server side fail with a fatal alert if not common protocols are detected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(boolean clientFailIfNoCommonProtocols,
boolean serverFailIfNoCommonProtocols, String... protocols) {
this(serverFailIfNoCommonProtocols ? FAIL_SELECTOR_FACTORY : NO_FAIL_SELECTOR_FACTORY,
clientFailIfNoCommonProtocols ? FAIL_SELECTION_LISTENER_FACTORY : NO_FAIL_SELECTION_LISTENER_FACTORY,
protocols);
}
/**
* Create a new instance.
* @param selectorFactory The factory which provides classes responsible for selecting the protocol.
* @param listenerFactory The factory which provides to be notified of which protocol was selected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(ProtocolSelectorFactory selectorFactory,
ProtocolSelectionListenerFactory listenerFactory, Iterable<String> protocols) {
super(ALPN_WRAPPER, selectorFactory, listenerFactory, protocols);
}
/**
* Create a new instance.
* @param selectorFactory The factory which provides classes responsible for selecting the protocol.
* @param listenerFactory The factory which provides to be notified of which protocol was selected.
* @param protocols The order of iteration determines the preference of support for protocols.
*/
public JdkAlpnApplicationProtocolNegotiator(ProtocolSelectorFactory selectorFactory,
ProtocolSelectionListenerFactory listenerFactory, String... protocols) {
super(ALPN_WRAPPER, selectorFactory, listenerFactory, protocols);
}
private static final | JdkAlpnApplicationProtocolNegotiator |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/verification/api/InOrderContext.java | {
"start": 218,
"end": 333
} | interface ____ {
boolean isVerified(Invocation invocation);
void markVerified(Invocation i);
}
| InOrderContext |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_569_1.java | {
"start": 1113,
"end": 1353
} | class ____<T> {
private List<B<T>> bList;
public List<B<T>> getbList() {
return bList;
}
public void setbList(List<B<T>> bList) {
this.bList = bList;
}
}
public static | A |
java | apache__kafka | storage/src/main/java/org/apache/kafka/storage/internals/log/ProducerAppendInfo.java | {
"start": 2078,
"end": 17250
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(ProducerAppendInfo.class);
private final TopicPartition topicPartition;
private final long producerId;
private final ProducerStateEntry currentEntry;
private final AppendOrigin origin;
private final VerificationStateEntry verificationStateEntry;
private final List<TxnMetadata> transactions = new ArrayList<>();
private final ProducerStateEntry updatedEntry;
/**
* Creates a new instance with the provided parameters.
*
* @param topicPartition topic partition
* @param producerId The id of the producer appending to the log
* @param currentEntry The current entry associated with the producer id which contains metadata for a fixed number of
* the most recent appends made by the producer. Validation of the first incoming append will
* be made against the latest append in the current entry. New appends will replace older appends
* in the current entry so that the space overhead is constant.
* @param origin Indicates the origin of the append which implies the extent of validation. For example, offset
* commits, which originate from the group coordinator, do not have sequence numbers and therefore
* only producer epoch validation is done. Appends which come through replication are not validated
* (we assume the validation has already been done) and appends from clients require full validation.
* @param verificationStateEntry The most recent entry used for verification if no append has been completed yet otherwise null
*/
public ProducerAppendInfo(TopicPartition topicPartition,
long producerId,
ProducerStateEntry currentEntry,
AppendOrigin origin,
VerificationStateEntry verificationStateEntry) {
this.topicPartition = topicPartition;
this.producerId = producerId;
this.currentEntry = currentEntry;
this.origin = origin;
this.verificationStateEntry = verificationStateEntry;
updatedEntry = currentEntry.withProducerIdAndBatchMetadata(producerId, Optional.empty());
}
public long producerId() {
return producerId;
}
private void maybeValidateDataBatch(short producerEpoch, int firstSeq, long offset) {
// Default transaction version TV_UNKNOWN is passed for data batches.
checkProducerEpoch(producerEpoch, offset, TV_UNKNOWN);
if (origin == AppendOrigin.CLIENT) {
checkSequence(producerEpoch, firstSeq, offset);
}
}
/**
* Validates the producer epoch for transaction markers based on the transaction version.
*
* <p>For Transaction Version 2 (TV2) and above, the coordinator always increments
* the producer epoch by one before writing the final transaction marker. This establishes a
* clear invariant: a valid TV2 marker must have an epoch strictly greater than the producer's
* current epoch at the leader. Any marker with markerEpoch <= currentEpoch is a late or duplicate
* marker and must be rejected to prevent conflating multiple transactions under the same epoch,
* which would threaten exactly-once semantics (EOS) guarantees.
*
* <p>For legacy transaction versions (TV0/TV1), markers were written with the same epoch as
* the transactional records, so we accept markers when markerEpoch >= currentEpoch. This
* preserves backward compatibility but cannot distinguish between active and stale markers.
*
* @param producerEpoch the epoch from the transaction marker
* @param offset the offset where the marker will be written
* @param transactionVersion the transaction version (0/1 = legacy, 2 = TV2)
*/
private void checkProducerEpoch(short producerEpoch, long offset, short transactionVersion) {
short current = updatedEntry.producerEpoch();
boolean invalidEpoch = (transactionVersion >= 2) ? (producerEpoch <= current) : (producerEpoch < current);
if (invalidEpoch) {
String comparison = (transactionVersion >= 2) ? "<=" : "<";
String message = "Epoch of producer " + producerId + " at offset " + offset + " in " + topicPartition +
" is " + producerEpoch + ", which is " + comparison + " the last seen epoch " + current +
" (TV" + transactionVersion + ")";
if (origin == AppendOrigin.REPLICATION) {
log.warn(message);
} else {
// Starting from 2.7, we replaced ProducerFenced error with InvalidProducerEpoch in the
// producer send response callback to differentiate from the former fatal exception,
// letting client abort the ongoing transaction and retry.
throw new InvalidProducerEpochException(message);
}
}
}
private void checkSequence(short producerEpoch, int appendFirstSeq, long offset) {
// For transactions v2 idempotent producers, reject non-zero sequences when there is no producer ID state
if (verificationStateEntry != null && verificationStateEntry.supportsEpochBump() &&
appendFirstSeq != 0 && currentEntry.isEmpty()) {
throw new OutOfOrderSequenceException("Invalid sequence number for producer " + producerId + " at " +
"offset " + offset + " in partition " + topicPartition + ": " + appendFirstSeq +
" (incoming seq. number). Expected sequence 0 for transactions v2 idempotent producer with no existing state.");
}
if (verificationStateEntry != null && appendFirstSeq > verificationStateEntry.lowestSequence()) {
throw new OutOfOrderSequenceException("Out of order sequence number for producer " + producerId + " at " +
"offset " + offset + " in partition " + topicPartition + ": " + appendFirstSeq +
" (incoming seq. number), " + verificationStateEntry.lowestSequence() + " (earliest seen sequence)");
}
if (producerEpoch != updatedEntry.producerEpoch()) {
if (appendFirstSeq != 0) {
if (updatedEntry.producerEpoch() != RecordBatch.NO_PRODUCER_EPOCH) {
throw new OutOfOrderSequenceException("Invalid sequence number for new epoch of producer " + producerId +
"at offset " + offset + " in partition " + topicPartition + ": " + producerEpoch + " (request epoch), "
+ appendFirstSeq + " (seq. number), " + updatedEntry.producerEpoch() + " (current producer epoch)");
}
}
} else {
int currentLastSeq;
if (!updatedEntry.isEmpty())
currentLastSeq = updatedEntry.lastSeq();
else if (producerEpoch == currentEntry.producerEpoch())
currentLastSeq = currentEntry.lastSeq();
else
currentLastSeq = RecordBatch.NO_SEQUENCE;
// If there is no current producer epoch (possibly because all producer records have been deleted due to
// retention or the DeleteRecords API) accept writes with any sequence number
if (!(currentEntry.producerEpoch() == RecordBatch.NO_PRODUCER_EPOCH || inSequence(currentLastSeq, appendFirstSeq))) {
throw new OutOfOrderSequenceException("Out of order sequence number for producer " + producerId + " at " +
"offset " + offset + " in partition " + topicPartition + ": " + appendFirstSeq +
" (incoming seq. number), " + currentLastSeq + " (current end sequence number)");
}
}
}
private boolean inSequence(int lastSeq, int nextSeq) {
return nextSeq == lastSeq + 1L || (nextSeq == 0 && lastSeq == Integer.MAX_VALUE);
}
public Optional<CompletedTxn> append(RecordBatch batch, Optional<LogOffsetMetadata> firstOffsetMetadataOpt) {
return append(batch, firstOffsetMetadataOpt, TV_UNKNOWN);
}
public Optional<CompletedTxn> append(RecordBatch batch, Optional<LogOffsetMetadata> firstOffsetMetadataOpt, short transactionVersion) {
if (batch.isControlBatch()) {
Iterator<Record> recordIterator = batch.iterator();
if (recordIterator.hasNext()) {
Record record = recordIterator.next();
EndTransactionMarker endTxnMarker = EndTransactionMarker.deserialize(record);
return appendEndTxnMarker(endTxnMarker, batch.producerEpoch(), batch.baseOffset(), record.timestamp(), transactionVersion);
} else {
// An empty control batch means the entire transaction has been cleaned from the log, so no need to append
return Optional.empty();
}
} else {
LogOffsetMetadata firstOffsetMetadata = firstOffsetMetadataOpt.orElse(new LogOffsetMetadata(batch.baseOffset()));
appendDataBatch(batch.producerEpoch(), batch.baseSequence(), batch.lastSequence(), batch.maxTimestamp(),
firstOffsetMetadata, batch.lastOffset(), batch.isTransactional());
return Optional.empty();
}
}
public void appendDataBatch(short epoch,
int firstSeq,
int lastSeq,
long lastTimestamp,
LogOffsetMetadata firstOffsetMetadata,
long lastOffset,
boolean isTransactional) {
long firstOffset = firstOffsetMetadata.messageOffset;
maybeValidateDataBatch(epoch, firstSeq, firstOffset);
updatedEntry.addBatch(epoch, lastSeq, lastOffset, (int) (lastOffset - firstOffset), lastTimestamp);
OptionalLong currentTxnFirstOffset = updatedEntry.currentTxnFirstOffset();
if (currentTxnFirstOffset.isPresent() && !isTransactional) {
// Received a non-transactional message while a transaction is active
throw new InvalidTxnStateException("Expected transactional write from producer " + producerId + " at " +
"offset " + firstOffsetMetadata + " in partition " + topicPartition);
} else if (currentTxnFirstOffset.isEmpty() && isTransactional) {
// Began a new transaction
updatedEntry.setCurrentTxnFirstOffset(firstOffset);
transactions.add(new TxnMetadata(producerId, firstOffsetMetadata));
}
}
private void checkCoordinatorEpoch(EndTransactionMarker endTxnMarker, long offset) {
if (updatedEntry.coordinatorEpoch() > endTxnMarker.coordinatorEpoch()) {
if (origin == AppendOrigin.REPLICATION) {
log.info("Detected invalid coordinator epoch for producerId {} at offset {} in partition {}: {} is older than previously known coordinator epoch {}",
producerId, offset, topicPartition, endTxnMarker.coordinatorEpoch(), updatedEntry.coordinatorEpoch());
} else {
throw new TransactionCoordinatorFencedException("Invalid coordinator epoch for producerId " + producerId + " at " +
"offset " + offset + " in partition " + topicPartition + ": " + endTxnMarker.coordinatorEpoch() +
" (zombie), " + updatedEntry.coordinatorEpoch() + " (current)");
}
}
}
public Optional<CompletedTxn> appendEndTxnMarker(EndTransactionMarker endTxnMarker,
short producerEpoch,
long offset,
long timestamp,
short transactionVersion) {
// For replication (REPLICATION origin), TV_UNKNOWN is allowed because:
// 1. transactionVersion is not stored in MemoryRecords - it's only metadata in WriteTxnMarkersRequest
// 2. When records are replicated, followers only see MemoryRecords without transactionVersion
// 3. The leader already validated the marker with the correct transactionVersion (e.g., TV2 strict validation)
// 4. Using TV_0 validation (markerEpoch >= currentEpoch) is safe because it's more permissive than TV2
// (markerEpoch > currentEpoch), so any marker that passed TV2 validation will pass TV_0 validation
// For all other origins (CLIENT, COORDINATOR), transactionVersion must be explicitly specified.
if (transactionVersion == TV_UNKNOWN && origin != AppendOrigin.REPLICATION) {
throw new IllegalArgumentException("transactionVersion must be explicitly specified, " +
"cannot use default value TV_UNKNOWN for origin " + origin);
}
// For replication with TV_UNKNOWN, use legacy validation (TV_0 behavior) since the leader already
// performed strict validation and the follower doesn't have access to the original transactionVersion
short effectiveTransactionVersion = (transactionVersion == TV_UNKNOWN) ? 0 : transactionVersion;
checkProducerEpoch(producerEpoch, offset, effectiveTransactionVersion);
checkCoordinatorEpoch(endTxnMarker, offset);
// Only emit the `CompletedTxn` for non-empty transactions. A transaction marker
// without any associated data will not have any impact on the last stable offset
// and would not need to be reflected in the transaction index.
Optional<CompletedTxn> completedTxn = updatedEntry.currentTxnFirstOffset().isPresent() ?
Optional.of(new CompletedTxn(producerId, updatedEntry.currentTxnFirstOffset().getAsLong(), offset,
endTxnMarker.controlType() == ControlRecordType.ABORT))
: Optional.empty();
updatedEntry.update(producerEpoch, endTxnMarker.coordinatorEpoch(), timestamp);
return completedTxn;
}
public ProducerStateEntry toEntry() {
return updatedEntry;
}
public List<TxnMetadata> startedTransactions() {
return Collections.unmodifiableList(transactions);
}
@Override
public String toString() {
return "ProducerAppendInfo(" +
"producerId=" + producerId +
", producerEpoch=" + updatedEntry.producerEpoch() +
", firstSequence=" + updatedEntry.firstSeq() +
", lastSequence=" + updatedEntry.lastSeq() +
", currentTxnFirstOffset=" + updatedEntry.currentTxnFirstOffset() +
", coordinatorEpoch=" + updatedEntry.coordinatorEpoch() +
", lastTimestamp=" + updatedEntry.lastTimestamp() +
", startedTransactions=" + transactions +
')';
}
}
| ProducerAppendInfo |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/RouteShutdownNotReverseOrderTest.java | {
"start": 1209,
"end": 2529
} | class ____ extends ContextTestSupport {
@Test
public void testRouteShutdownNotReverseOrder() throws Exception {
// flip to not reverse
context.getShutdownStrategy().setShutdownRoutesInReverseOrder(false);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:bar", "Hello World");
assertMockEndpointsSatisfied();
// assert correct startup order
DefaultCamelContext dcc = (DefaultCamelContext) context;
List<RouteStartupOrder> order = dcc.getCamelContextExtension().getRouteStartupOrder();
assertEquals(2, order.size());
assertEquals("direct://bar", order.get(0).getRoute().getEndpoint().getEndpointUri());
assertEquals("direct://foo", order.get(1).getRoute().getEndpoint().getEndpointUri());
// assert correct shutdown order
context.stop();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:foo").startupOrder(2).routeId("foo").to("mock:result");
from("direct:bar").startupOrder(1).routeId("bar").to("direct:foo");
}
};
}
}
| RouteShutdownNotReverseOrderTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/jsontype/TestSubtypes.java | {
"start": 2019,
"end": 2208
} | class ____ {
public BaseX value;
public AtomicWrapper() { }
protected AtomicWrapper(int x) { value = new ImplX(x); }
}
// Verifying limits on sub- | AtomicWrapper |
java | junit-team__junit5 | junit-platform-launcher/src/main/java/org/junit/platform/launcher/EngineDiscoveryResult.java | {
"start": 1045,
"end": 2843
} | enum ____ {
/**
* Indicates that test discovery was <em>successful</em>.
*/
SUCCESSFUL,
/**
* Indicates that test discovery has <em>failed</em>.
*/
FAILED
}
private static final EngineDiscoveryResult SUCCESSFUL_RESULT = new EngineDiscoveryResult(Status.SUCCESSFUL, null);
/**
* Create a {@code EngineDiscoveryResult} for a <em>successful</em> test
* discovery.
* @return the {@code EngineDiscoveryResult}; never {@code null}
*/
public static EngineDiscoveryResult successful() {
return SUCCESSFUL_RESULT;
}
/**
* Create a {@code EngineDiscoveryResult} for a <em>failed</em> test
* discovery.
*
* @param throwable the throwable that caused the failed discovery; may be
* {@code null}
* @return the {@code EngineDiscoveryResult}; never {@code null}
*/
public static EngineDiscoveryResult failed(@Nullable Throwable throwable) {
return new EngineDiscoveryResult(Status.FAILED, throwable);
}
private final Status status;
private final @Nullable Throwable throwable;
private EngineDiscoveryResult(Status status, @Nullable Throwable throwable) {
this.status = status;
this.throwable = throwable;
}
/**
* Get the {@linkplain Status status} of this result.
*
* @return the status; never {@code null}
*/
public Status getStatus() {
return status;
}
/**
* Get the throwable that caused this result, if available.
*
* @return an {@code Optional} containing the throwable; never {@code null}
* but potentially empty
*/
public Optional<Throwable> getThrowable() {
return Optional.ofNullable(throwable);
}
@Override
public String toString() {
// @formatter:off
return new ToStringBuilder(this)
.append("status", status)
.append("throwable", throwable)
.toString();
// @formatter:on
}
}
| Status |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/parsing/ReaderEventListener.java | {
"start": 942,
"end": 1880
} | interface ____ extends EventListener {
/**
* Notification that the given defaults has been registered.
* @param defaultsDefinition a descriptor for the defaults
* @see org.springframework.beans.factory.xml.DocumentDefaultsDefinition
*/
void defaultsRegistered(DefaultsDefinition defaultsDefinition);
/**
* Notification that the given component has been registered.
* @param componentDefinition a descriptor for the new component
* @see BeanComponentDefinition
*/
void componentRegistered(ComponentDefinition componentDefinition);
/**
* Notification that the given alias has been registered.
* @param aliasDefinition a descriptor for the new alias
*/
void aliasRegistered(AliasDefinition aliasDefinition);
/**
* Notification that the given import has been processed.
* @param importDefinition a descriptor for the import
*/
void importProcessed(ImportDefinition importDefinition);
}
| ReaderEventListener |
java | alibaba__nacos | client-basic/src/main/java/com/alibaba/nacos/client/auth/ram/utils/RamUtil.java | {
"start": 989,
"end": 2223
} | class ____ {
public static String getAccessKey(Properties properties) {
boolean isUseRamInfoParsing = Boolean.parseBoolean(properties
.getProperty(PropertyKeyConst.IS_USE_RAM_INFO_PARSING,
System.getProperty(SystemPropertyKeyConst.IS_USE_RAM_INFO_PARSING,
Constants.DEFAULT_USE_RAM_INFO_PARSING)));
String result = properties.getProperty(PropertyKeyConst.ACCESS_KEY);
if (isUseRamInfoParsing && StringUtils.isBlank(result)) {
result = SpasAdapter.getAk();
}
return result;
}
public static String getSecretKey(Properties properties) {
boolean isUseRamInfoParsing = Boolean.parseBoolean(properties
.getProperty(PropertyKeyConst.IS_USE_RAM_INFO_PARSING,
System.getProperty(SystemPropertyKeyConst.IS_USE_RAM_INFO_PARSING,
Constants.DEFAULT_USE_RAM_INFO_PARSING)));
String result = properties.getProperty(PropertyKeyConst.SECRET_KEY);
if (isUseRamInfoParsing && StringUtils.isBlank(result)) {
result = SpasAdapter.getSk();
}
return result;
}
}
| RamUtil |
java | apache__rocketmq | client/src/test/java/org/apache/rocketmq/client/trace/DefaultMQConsumerWithTraceTest.java | {
"start": 4476,
"end": 16069
} | class ____ {
private String consumerGroup;
private String consumerGroupNormal;
private String producerGroupTraceTemp = TopicValidator.RMQ_SYS_TRACE_TOPIC + System.currentTimeMillis();
private String topic = "FooBar";
private String brokerName = "BrokerA";
private MQClientInstance mQClientFactory;
@Mock
private MQClientAPIImpl mQClientAPIImpl;
private PullAPIWrapper pullAPIWrapper;
private RebalancePushImpl rebalancePushImpl;
private DefaultMQPushConsumer pushConsumer;
private DefaultMQPushConsumer normalPushConsumer;
private DefaultMQPushConsumer customTraceTopicPushConsumer;
private AsyncTraceDispatcher asyncTraceDispatcher;
private MQClientInstance mQClientTraceFactory;
@Mock
private MQClientAPIImpl mQClientTraceAPIImpl;
private DefaultMQProducer traceProducer;
private String customerTraceTopic = "rmq_trace_topic_12345";
@Before
public void init() throws Exception {
ConcurrentMap<String, MQClientInstance> factoryTable = (ConcurrentMap<String, MQClientInstance>) FieldUtils.readDeclaredField(MQClientManager.getInstance(), "factoryTable", true);
for (Map.Entry<String, MQClientInstance> entry : factoryTable.entrySet()) {
entry.getValue().shutdown();
}
factoryTable.clear();
consumerGroup = "FooBarGroup" + System.currentTimeMillis();
pushConsumer = new DefaultMQPushConsumer(consumerGroup, true, "");
consumerGroupNormal = "FooBarGroup" + System.currentTimeMillis();
normalPushConsumer = new DefaultMQPushConsumer(consumerGroupNormal, false, "");
customTraceTopicPushConsumer = new DefaultMQPushConsumer(consumerGroup, true, customerTraceTopic);
pushConsumer.setNamesrvAddr("127.0.0.1:9876");
pushConsumer.setUseTLS(true);
pushConsumer.setPullInterval(60 * 1000);
pushConsumer.registerMessageListener(new MessageListenerConcurrently() {
@Override
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
ConsumeConcurrentlyContext context) {
return null;
}
});
DefaultMQPushConsumerImpl pushConsumerImpl = pushConsumer.getDefaultMQPushConsumerImpl();
// suppress updateTopicRouteInfoFromNameServer
pushConsumer.changeInstanceNameToPID();
mQClientFactory = spy(MQClientManager.getInstance().getOrCreateMQClientInstance(pushConsumer, (RPCHook) FieldUtils.readDeclaredField(pushConsumerImpl, "rpcHook", true)));
factoryTable.put(pushConsumer.buildMQClientId(), mQClientFactory);
doReturn(false).when(mQClientFactory).updateTopicRouteInfoFromNameServer(anyString());
rebalancePushImpl = spy(new RebalancePushImpl(pushConsumer.getDefaultMQPushConsumerImpl()));
Field field = DefaultMQPushConsumerImpl.class.getDeclaredField("rebalanceImpl");
field.setAccessible(true);
field.set(pushConsumerImpl, rebalancePushImpl);
pushConsumer.subscribe(topic, "*");
pushConsumer.start();
asyncTraceDispatcher = (AsyncTraceDispatcher) pushConsumer.getTraceDispatcher();
traceProducer = asyncTraceDispatcher.getTraceProducer();
mQClientFactory = spy(pushConsumerImpl.getmQClientFactory());
mQClientTraceFactory = spy(pushConsumerImpl.getmQClientFactory());
field = DefaultMQPushConsumerImpl.class.getDeclaredField("mQClientFactory");
field.setAccessible(true);
field.set(pushConsumerImpl, mQClientFactory);
field = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
field.setAccessible(true);
field.set(mQClientFactory, mQClientAPIImpl);
Field fieldTrace = DefaultMQProducerImpl.class.getDeclaredField("mQClientFactory");
fieldTrace.setAccessible(true);
fieldTrace.set(traceProducer.getDefaultMQProducerImpl(), mQClientTraceFactory);
fieldTrace = MQClientInstance.class.getDeclaredField("mQClientAPIImpl");
fieldTrace.setAccessible(true);
fieldTrace.set(mQClientTraceFactory, mQClientTraceAPIImpl);
pullAPIWrapper = spy(new PullAPIWrapper(mQClientFactory, consumerGroup, false));
field = DefaultMQPushConsumerImpl.class.getDeclaredField("pullAPIWrapper");
field.setAccessible(true);
field.set(pushConsumerImpl, pullAPIWrapper);
pushConsumer.getDefaultMQPushConsumerImpl().getRebalanceImpl().setmQClientFactory(mQClientFactory);
mQClientFactory.registerConsumer(consumerGroup, pushConsumerImpl);
when(mQClientFactory.getMQClientAPIImpl().pullMessage(anyString(), any(PullMessageRequestHeader.class),
anyLong(), any(CommunicationMode.class), nullable(PullCallback.class)))
.thenAnswer(new Answer<PullResult>() {
@Override
public PullResult answer(InvocationOnMock mock) throws Throwable {
PullMessageRequestHeader requestHeader = mock.getArgument(1);
MessageClientExt messageClientExt = new MessageClientExt();
messageClientExt.setTopic(topic);
messageClientExt.setQueueId(0);
messageClientExt.setMsgId("123");
messageClientExt.setBody(new byte[] {'a'});
messageClientExt.setOffsetMsgId("234");
messageClientExt.setBornHost(new InetSocketAddress(8080));
messageClientExt.setStoreHost(new InetSocketAddress(8080));
PullResult pullResult = createPullResult(requestHeader, PullStatus.FOUND, Collections.<MessageExt>singletonList(messageClientExt));
((PullCallback) mock.getArgument(4)).onSuccess(pullResult);
return pullResult;
}
});
doReturn(new FindBrokerResult("127.0.0.1:10911", false)).when(mQClientFactory).findBrokerAddressInSubscribe(anyString(), anyLong(), anyBoolean());
Set<MessageQueue> messageQueueSet = new HashSet<>();
messageQueueSet.add(createPullRequest().getMessageQueue());
pushConsumer.getDefaultMQPushConsumerImpl().updateTopicSubscribeInfo(topic, messageQueueSet);
}
@After
public void terminate() {
pushConsumer.shutdown();
}
@Test
public void testPullMessage_WithTrace_Success() throws InterruptedException, RemotingException, MQBrokerException, MQClientException {
traceProducer.getDefaultMQProducerImpl().getMqClientFactory().registerProducer(producerGroupTraceTemp, traceProducer.getDefaultMQProducerImpl());
final CountDownLatch countDownLatch = new CountDownLatch(1);
final AtomicReference<MessageExt> messageAtomic = new AtomicReference<>();
pushConsumer.getDefaultMQPushConsumerImpl().setConsumeMessageService(new ConsumeMessageConcurrentlyService(pushConsumer.getDefaultMQPushConsumerImpl(), new MessageListenerConcurrently() {
@Override
public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
ConsumeConcurrentlyContext context) {
messageAtomic.set(msgs.get(0));
countDownLatch.countDown();
return null;
}
}));
PullMessageService pullMessageService = mQClientFactory.getPullMessageService();
pullMessageService.executePullRequestImmediately(createPullRequest());
countDownLatch.await(30, TimeUnit.SECONDS);
MessageExt msg = messageAtomic.get();
assertThat(msg).isNotNull();
assertThat(msg.getTopic()).isEqualTo(topic);
assertThat(msg.getBody()).isEqualTo(new byte[] {'a'});
}
@Test
public void testPushConsumerWithTraceTLS() {
Assert.assertTrue(asyncTraceDispatcher.getTraceProducer().isUseTLS());
}
private PullRequest createPullRequest() {
PullRequest pullRequest = new PullRequest();
pullRequest.setConsumerGroup(consumerGroup);
pullRequest.setNextOffset(1024);
MessageQueue messageQueue = new MessageQueue();
messageQueue.setBrokerName(brokerName);
messageQueue.setQueueId(0);
messageQueue.setTopic(topic);
pullRequest.setMessageQueue(messageQueue);
ProcessQueue processQueue = new ProcessQueue();
processQueue.setLocked(true);
processQueue.setLastLockTimestamp(System.currentTimeMillis());
pullRequest.setProcessQueue(processQueue);
return pullRequest;
}
private PullResultExt createPullResult(PullMessageRequestHeader requestHeader, PullStatus pullStatus,
List<MessageExt> messageExtList) throws Exception {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
for (MessageExt messageExt : messageExtList) {
outputStream.write(MessageDecoder.encode(messageExt, false));
}
return new PullResultExt(pullStatus, requestHeader.getQueueOffset() + messageExtList.size(), 123, 2048, messageExtList, 0, outputStream.toByteArray());
}
public static TopicRouteData createTopicRoute() {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.setFilterServerTable(new HashMap<>());
List<BrokerData> brokerDataList = new ArrayList<>();
BrokerData brokerData = new BrokerData();
brokerData.setBrokerName("BrokerA");
brokerData.setCluster("DefaultCluster");
HashMap<Long, String> brokerAddrs = new HashMap<>();
brokerAddrs.put(0L, "127.0.0.1:10911");
brokerData.setBrokerAddrs(brokerAddrs);
brokerDataList.add(brokerData);
topicRouteData.setBrokerDatas(brokerDataList);
List<QueueData> queueDataList = new ArrayList<>();
QueueData queueData = new QueueData();
queueData.setBrokerName("BrokerA");
queueData.setPerm(6);
queueData.setReadQueueNums(3);
queueData.setWriteQueueNums(4);
queueData.setTopicSysFlag(0);
queueDataList.add(queueData);
topicRouteData.setQueueDatas(queueDataList);
return topicRouteData;
}
private SendResult createSendResult(SendStatus sendStatus) {
SendResult sendResult = new SendResult();
sendResult.setMsgId("123");
sendResult.setOffsetMsgId("123");
sendResult.setQueueOffset(456);
sendResult.setSendStatus(sendStatus);
sendResult.setRegionId("HZ");
return sendResult;
}
public static TopicRouteData createTraceTopicRoute() {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.setFilterServerTable(new HashMap<>());
List<BrokerData> brokerDataList = new ArrayList<>();
BrokerData brokerData = new BrokerData();
brokerData.setBrokerName("broker-trace");
brokerData.setCluster("DefaultCluster");
HashMap<Long, String> brokerAddrs = new HashMap<>();
brokerAddrs.put(0L, "127.0.0.1:10912");
brokerData.setBrokerAddrs(brokerAddrs);
brokerDataList.add(brokerData);
topicRouteData.setBrokerDatas(brokerDataList);
List<QueueData> queueDataList = new ArrayList<>();
QueueData queueData = new QueueData();
queueData.setBrokerName("broker-trace");
queueData.setPerm(6);
queueData.setReadQueueNums(1);
queueData.setWriteQueueNums(1);
queueData.setTopicSysFlag(1);
queueDataList.add(queueData);
topicRouteData.setQueueDatas(queueDataList);
return topicRouteData;
}
}
| DefaultMQConsumerWithTraceTest |
java | resilience4j__resilience4j | resilience4j-spring-boot2/src/test/java/io/github/resilience4j/service/test/retry/ReactiveRetryDummyServiceImpl.java | {
"start": 991,
"end": 1587
} | class ____ implements ReactiveRetryDummyService {
@Override
public Flux<String> doSomethingFlux(boolean throwException) {
if (throwException) {
return Flux.error(new IllegalArgumentException("FailedFlux"));
}
return Flux.fromArray(Arrays.array("test", "test2"));
}
@Override
public Flowable<String> doSomethingFlowable(boolean throwException) {
if (throwException) {
return Flowable.error(new IllegalArgumentException("Failed"));
}
return Flowable.just("testMaybe");
}
}
| ReactiveRetryDummyServiceImpl |
java | quarkusio__quarkus | integration-tests/redis-devservices/src/test/java/io/quarkus/redis/devservices/continuoustesting/it/DevServicesRedisContinuousTestingTest.java | {
"start": 1401,
"end": 12659
} | class ____ {
static final String DEVSERVICES_DISABLED_PROPERTIES = ContinuousTestingTestUtils.appProperties(
"quarkus.devservices.enabled=false");
static final String FIXED_PORT_PROPERTIES = ContinuousTestingTestUtils.appProperties(
"quarkus.redis.devservices.port=6377");
static final String UPDATED_FIXED_PORT_PROPERTIES = ContinuousTestingTestUtils.appProperties(
"quarkus.redis.devservices.port=6342");
@RegisterExtension
public static QuarkusDevModeTest test = new QuarkusDevModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClass(BundledResource.class)
.addAsResource(new StringAsset(ContinuousTestingTestUtils.appProperties("")),
"application.properties"))
.setTestArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(PlainQuarkusTest.class));
@AfterAll
static void afterAll() {
stopAllContainers();
}
@Test
public void testContinuousTestingDisablesDevServicesWhenPropertiesChange() {
ContinuousTestingTestUtils utils = new ContinuousTestingTestUtils();
var result = utils.waitForNextCompletion();
assertEquals(1, result.getTotalTestsPassed());
assertEquals(0, result.getTotalTestsFailed());
// Now let's disable dev services globally ... BOOOOOM! Splat!
test.modifyResourceFile("application.properties", s -> DEVSERVICES_DISABLED_PROPERTIES);
result = utils.waitForNextCompletion();
assertEquals(0, result.getTotalTestsPassed());
assertEquals(1, result.getTotalTestsFailed());
ping500();
List<Container> containers = getAllContainers();
assertTrue(containers.isEmpty(), "Expected no containers, but got: " + prettyPrintContainerList(containers));
}
@Test
public void testContinuousTestingReusesInstanceWhenPropertiesAreNotChanged() {
ContinuousTestingTestUtils utils = new ContinuousTestingTestUtils();
var result = utils.waitForNextCompletion();
assertEquals(1, result.getTotalTestsPassed());
assertEquals(0, result.getTotalTestsFailed());
List<Container> redisContainers = getRedisContainers();
// Make a change that shouldn't affect dev services
test.modifyTestSourceFile(PlainQuarkusTest.class, s -> s.replaceAll("redisClient", "updatedRedisClient"));
result = utils.waitForNextCompletion();
assertEquals(1, result.getTestsPassed());
assertEquals(0, result.getTestsFailed());
// Some containers could have disappeared, because ryuk cleaned them up, but no new containers should have appeared
List<Container> newContainers = getRedisContainersExcludingExisting(redisContainers);
assertEquals(0, newContainers.size(),
"New containers: " + newContainers + "\n Old containers: " + redisContainers + "\n All containers: "
+ getAllContainers());
}
@Test
public void testContinuousTestingCreatesANewInstanceWhenPropertiesAreChanged() {
ContinuousTestingTestUtils utils = new ContinuousTestingTestUtils();
var result = utils.waitForNextCompletion();
assertEquals(1, result.getTotalTestsPassed());
assertEquals(0, result.getTotalTestsFailed());
List<Container> existingContainers = new ArrayList<>();
existingContainers.addAll(getRedisContainers());
test.modifyResourceFile("application.properties", s -> FIXED_PORT_PROPERTIES);
result = utils.waitForNextCompletion();
assertEquals(1, result.getTestsPassed());
assertEquals(0, result.getTestsFailed());
// A new container should have appeared
{
List<Container> newContainers = getRedisContainersExcludingExisting(existingContainers);
existingContainers.addAll(newContainers);
assertEquals(1, newContainers.size(),
"New containers: " + newContainers + "\n Old containers: " + existingContainers + "\n All containers: "
+ getAllContainers());
// The new container should be on the new port
List<Integer> ports = Arrays.stream(newContainers.get(0).getPorts())
.map(ContainerPort::getPublicPort)
.toList();
// Oh good, it's one port, so it should be the expected one
assertTrue(ports.contains(6377), "Container ports: " + ports);
}
test.modifyResourceFile("application.properties", s -> UPDATED_FIXED_PORT_PROPERTIES);
result = utils.waitForNextCompletion();
assertEquals(1, result.getTestsPassed());
assertEquals(0, result.getTestsFailed());
// Another new container should have appeared
{
List<Container> newContainers = getRedisContainersExcludingExisting(existingContainers);
assertEquals(1, newContainers.size(),
"New containers: " + newContainers + "\n Old containers: " + existingContainers + "\n All containers: "
+ getAllContainers());
// The new container should be on the new port
List<Integer> ports = Arrays.stream(newContainers.get(0).getPorts())
.map(ContainerPort::getPublicPort)
.toList();
assertTrue(ports.contains(6342), "Container ports: " + ports);
}
}
// This tests behaviour in dev mode proper when combined with continuous testing. This creates a possibility of port conflicts, false sharing of state, and all sorts of race conditions.
@Test
public void testDevModeCoexistingWithContinuousTestingServiceUpdatesContainersOnConfigChange() {
// Note that driving continuous testing concurrently can sometimes cause 500s caused by containers not yet being available on slow machines
ContinuousTestingTestUtils continuousTestingTestUtils = new ContinuousTestingTestUtils();
ContinuousTestingTestUtils.TestStatus result = continuousTestingTestUtils.waitForNextCompletion();
assertEquals(result.getTotalTestsPassed(), 1);
assertEquals(result.getTotalTestsFailed(), 0);
// Interacting with the app will force a refresh
ping();
List<Container> started = getRedisContainers();
assertFalse(started.isEmpty());
Container container = started.get(0);
assertTrue(Arrays.stream(container.getPorts()).noneMatch(p -> p.getPublicPort() == 6377),
"Expected random port, but got: " + Arrays.toString(container.getPorts()));
int newPort = 6388;
int testPort = newPort + 1;
// Continuous tests and dev mode should *not* share containers, even if the port is fixed
// Specify that the fixed port is for dev mode, or one launch will fail with port conflicts
test.modifyResourceFile("application.properties",
s -> ContinuousTestingTestUtils.appProperties("%dev.quarkus.redis.devservices.port=" + newPort
+ "\n%test.quarkus.redis.devservices.port=" + testPort));
test.modifyTestSourceFile(PlainQuarkusTest.class, s -> s.replaceAll("redisClient", "updatedRedisClient"));
// Force another refresh
result = continuousTestingTestUtils.waitForNextCompletion();
assertEquals(result.getTotalTestsPassed(), 1);
assertEquals(result.getTotalTestsFailed(), 0);
ping();
List<Container> newContainers = getRedisContainersExcludingExisting(started);
// We expect 2 new containers, since test was also refreshed
assertEquals(2, newContainers.size(),
"New containers: "
+ prettyPrintContainerList(newContainers)
+ "\n Old containers: " + prettyPrintContainerList(started) + "\n All containers: "
+ prettyPrintContainerList(getAllContainers())); // this can be wrong
// We need to inspect the dev-mode container; we don't have a non-brittle way of distinguishing them, so just look in them all
boolean hasRightPort = newContainers.stream()
.anyMatch(newContainer -> hasPublicPort(newContainer, newPort));
assertTrue(hasRightPort,
"Expected port " + newPort + ", but got: "
+ newContainers.stream().map(c -> Arrays.toString(c.getPorts())).collect(Collectors.joining(", ")));
boolean hasRightTestPort = newContainers.stream()
.anyMatch(newContainer -> hasPublicPort(newContainer, testPort));
assertTrue(hasRightTestPort,
"Expected port " + testPort + ", but got: "
+ newContainers.stream().map(c -> Arrays.toString(c.getPorts())).collect(Collectors.joining(", ")));
}
void ping() {
when().get("/bundled/ping").then()
.statusCode(200)
.body(is("PONG"));
}
void ping500() {
when().get("/kafka/partitions/test").then()
.statusCode(500);
}
private static boolean hasPublicPort(Container newContainer, int newPort) {
return Arrays.stream(newContainer.getPorts()).anyMatch(p -> p.getPublicPort() == newPort);
}
private static String prettyPrintContainerList(List<Container> newContainers) {
return newContainers.stream()
.map(c -> Arrays.toString(c.getPorts()) + " -- " + Arrays.toString(c.getNames()) + " -- " + c.getLabels())
.collect(Collectors.joining(", \n"));
}
private static List<Container> getAllContainers() {
return DockerClientFactory.lazyClient().listContainersCmd().exec().stream()
.filter(container -> isRedisContainer(container)).toList();
}
private static void stopAllContainers() {
DockerClient dockerClient = DockerClientFactory.lazyClient();
dockerClient.listContainersCmd().exec().stream()
.filter(DevServicesRedisContinuousTestingTest::isRedisContainer)
.forEach(c -> dockerClient.stopContainerCmd(c.getId()).exec());
}
private static List<Container> getRedisContainers() {
return getAllContainers();
}
private static List<Container> getRedisContainersExcludingExisting(Collection<Container> existingContainers) {
return getRedisContainers().stream().filter(
container -> existingContainers.stream().noneMatch(existing -> existing.getId().equals(container.getId())))
.toList();
}
private static List<Container> getAllContainersExcludingExisting(Collection<Container> existingContainers) {
return getAllContainers().stream().filter(
container -> existingContainers.stream().noneMatch(existing -> existing.getId().equals(container.getId())))
.toList();
}
private static boolean isRedisContainer(Container container) {
// The output of getCommand() seems to vary by host OS (it's different on CI and mac), but the image name should be reliable
return container.getImage().contains("redis");
}
}
| DevServicesRedisContinuousTestingTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/transaction/operation/map/MapPutIfAbsentOperation.java | {
"start": 739,
"end": 1147
} | class ____ extends MapOperation {
public MapPutIfAbsentOperation() {
}
public MapPutIfAbsentOperation(RMap<?, ?> map, Object key, Object value, String transactionId, long threadId) {
super(map, key, value, transactionId, threadId);
}
@Override
public void commit(RMap<Object, Object> map) {
map.putIfAbsentAsync(key, value);
}
}
| MapPutIfAbsentOperation |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ResultRowSet.java | {
"start": 658,
"end": 2173
} | class ____<E extends NamedWriteable> extends AbstractRowSet {
private final List<E> extractors;
private final BitSet mask;
ResultRowSet(List<E> extractors, BitSet mask) {
this.extractors = extractors;
this.mask = mask;
Check.isTrue(mask.length() <= extractors.size(), "Invalid number of extracted columns specified");
}
@Override
public final int columnCount() {
return mask.cardinality();
}
@Override
protected Object getColumn(int column) {
return extractValue(userExtractor(column));
}
List<E> extractors() {
return extractors;
}
BitSet mask() {
return mask;
}
E userExtractor(int column) {
int i = -1;
// find the nth set bit
for (i = mask.nextSetBit(0); i >= 0; i = mask.nextSetBit(i + 1)) {
if (column-- == 0) {
return extractors.get(i);
}
}
throw new SqlIllegalArgumentException("Cannot find column [{}]", column);
}
Object resultColumn(int column) {
return extractValue(extractors().get(column));
}
int resultColumnCount() {
return extractors.size();
}
void forEachResultColumn(Consumer<? super Object> action) {
Objects.requireNonNull(action);
int rowSize = resultColumnCount();
for (int i = 0; i < rowSize; i++) {
action.accept(resultColumn(i));
}
}
protected abstract Object extractValue(E e);
}
| ResultRowSet |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoValueTest.java | {
"start": 107798,
"end": 108015
} | class ____ {
abstract ImmutableSet<Number> metrics();
static Builder builder() {
return new AutoValue_AutoValueTest_GenericExtends.Builder();
}
@AutoValue.Builder
abstract static | GenericExtends |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableRangeLong.java | {
"start": 838,
"end": 1305
} | class ____ extends Observable<Long> {
private final long start;
private final long count;
public ObservableRangeLong(long start, long count) {
this.start = start;
this.count = count;
}
@Override
protected void subscribeActual(Observer<? super Long> o) {
RangeDisposable parent = new RangeDisposable(o, start, start + count);
o.onSubscribe(parent);
parent.run();
}
static final | ObservableRangeLong |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/asm/Huge_200_ClassTest.java | {
"start": 111,
"end": 252
} | class ____ extends TestCase {
public void test_huge() {
JSON.parseObject("{}", VO.class);
}
public static | Huge_200_ClassTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/PatternParser.java | {
"start": 1637,
"end": 1769
} | class ____ parses conversion patterns and creates a chained list of {@link PatternConverter
* PatternConverters}.
*/
public final | that |
java | quarkusio__quarkus | test-framework/arquillian/src/test/java/io/quarkus/arquillian/test/SimpleClass.java | {
"start": 179,
"end": 260
} | class ____ {
@Inject
Config config;
@Inject
Foo foo;
}
| SimpleClass |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/MinikubeManifestGeneratorFactory.java | {
"start": 176,
"end": 489
} | class ____ implements ManifestGeneratorFactory {
@Override
public MinikubeManifestGenerator create(ResourceRegistry resourceRegistry, ConfigurationRegistry configurationRegistry) {
return new MinikubeManifestGenerator(resourceRegistry, configurationRegistry);
}
}
| MinikubeManifestGeneratorFactory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/DefaultJSONParserTest_charArray.java | {
"start": 208,
"end": 557
} | class ____ extends TestCase {
public void test_getInput() {
String text = "{}";
char[] chars = text.toCharArray();
DefaultJSONParser parser = new DefaultJSONParser(chars, chars.length, ParserConfig.getGlobalInstance(), 0);
Assert.assertEquals(text, parser.getInput());
}
}
| DefaultJSONParserTest_charArray |
java | google__guice | core/test/com/google/inject/internal/SpiUtils.java | {
"start": 54830,
"end": 55320
} | class ____<T> extends DefaultBindingTargetVisitor<T, Object>
implements MultibindingsTargetVisitor<T, Object> {
@Override
public Object visit(MultibinderBinding<? extends T> multibinding) {
return multibinding;
}
@Override
public Object visit(MapBinderBinding<? extends T> mapbinding) {
return mapbinding;
}
@Override
public Object visit(OptionalBinderBinding<? extends T> optionalbinding) {
return optionalbinding;
}
}
}
| Visitor |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/WebServletHandlerTests.java | {
"start": 5835,
"end": 5923
} | class ____ extends HttpServlet {
}
@WebServlet({ "alpha", "bravo" })
| UrlPatternsServlet |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportResourceTests.java | {
"start": 5322,
"end": 5513
} | class ____ extends BaseConfig {
}
@Configuration
@ImportResource("classpath:org/springframework/context/annotation/configuration/SecondLevelSubConfig-context.xml")
static | FirstLevelSubConfig |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/criteria/JpaJsonQueryNode.java | {
"start": 4156,
"end": 4523
} | enum ____ {
/**
* SQL/JDBC error should be raised.
*/
ERROR,
/**
* {@code null} should be returned.
*/
NULL,
/**
* An empty array should be returned.
*/
EMPTY_ARRAY,
/**
* An empty object should be returned.
*/
EMPTY_OBJECT,
/**
* Unspecified behavior i.e. the default database behavior.
*/
UNSPECIFIED
}
}
| EmptyBehavior |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/producer/ProducerWithAbstractClassWithInterfaceInterceptionAndBindingsSourceTest.java | {
"start": 2843,
"end": 3040
} | class ____ {
@MyBinding2
abstract String hello2();
@MyBinding2
@NoClassInterceptors
abstract String hello3();
}
@Dependent
static | MyNonbeanBindings |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java | {
"start": 10801,
"end": 12313
} | class ____ extends BaseTasksResponse implements Writeable, ToXContentObject {
private final List<InferenceResults> results;
public Response(List<InferenceResults> results) {
super(Collections.emptyList(), Collections.emptyList());
this.results = Objects.requireNonNull(results);
}
public Response(StreamInput in) throws IOException {
super(in);
// Multiple results added in 8.6.1
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_6_1)) {
results = in.readNamedWriteableCollectionAsList(InferenceResults.class);
} else {
results = List.of(in.readNamedWriteable(InferenceResults.class));
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_6_1)) {
out.writeNamedWriteableCollection(results);
} else {
out.writeNamedWriteable(results.get(0));
}
}
public List<InferenceResults> getResults() {
return results;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
results.get(0).toXContent(builder, params);
builder.endObject();
return builder;
}
}
}
| Response |
java | apache__camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/sftp/integration/SftpProduceTempFileIT.java | {
"start": 1315,
"end": 2595
} | class ____ extends SftpServerTestSupport {
@Test
public void testSftpTempFile() {
template.sendBodyAndHeader("sftp://localhost:{{ftp.server.port}}/{{ftp.root.dir}}"
+ "?username=admin&password=admin&tempFileName=temp-${file:name}",
"Hello World",
Exchange.FILE_NAME, "hello.txt");
File file = ftpFile("hello.txt").toFile();
assertTrue(file.exists(), "File should exist: " + file);
assertEquals("Hello World", context.getTypeConverter().convertTo(String.class, file));
}
@Test
public void testSftpTempFileNoStartingPath() {
template.sendBodyAndHeader(
"sftp://localhost:{{ftp.server.port}}/?username=admin&password=admin&tempFileName=temp-${file:name}"
+ "&knownHostsFile=" + service.getKnownHostsFile(),
"Hello World", Exchange.FILE_NAME,
"hello.txt");
File file = new File("hello.txt");
assertTrue(file.exists(), "File should exist: " + file);
assertEquals("Hello World", context.getTypeConverter().convertTo(String.class, file));
// delete file when we are done testing
FileUtil.deleteFile(file);
}
}
| SftpProduceTempFileIT |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/result/PrintingResultHandler.java | {
"start": 2307,
"end": 9784
} | class ____ implements ResultHandler {
private static final String MISSING_CHARACTER_ENCODING = "<no character encoding set>";
private final ResultValuePrinter printer;
/**
* Protected constructor.
* @param printer a {@link ResultValuePrinter} to do the actual writing
*/
protected PrintingResultHandler(ResultValuePrinter printer) {
this.printer = printer;
}
/**
* Return the result value printer.
* @return the printer
*/
protected ResultValuePrinter getPrinter() {
return this.printer;
}
/**
* Print {@link MvcResult} details.
*/
@Override
public final void handle(MvcResult result) throws Exception {
this.printer.printHeading("MockHttpServletRequest");
printRequest(result.getRequest());
this.printer.printHeading("Handler");
printHandler(result.getHandler(), result.getInterceptors());
this.printer.printHeading("Async");
printAsyncResult(result);
this.printer.printHeading("Resolved Exception");
printResolvedException(result.getResolvedException());
this.printer.printHeading("ModelAndView");
printModelAndView(result.getModelAndView());
this.printer.printHeading("FlashMap");
printFlashMap(RequestContextUtils.getOutputFlashMap(result.getRequest()));
this.printer.printHeading("MockHttpServletResponse");
printResponse(result.getResponse());
}
/**
* Print the request.
*/
protected void printRequest(MockHttpServletRequest request) throws Exception {
String body = (request.getCharacterEncoding() != null ?
request.getContentAsString() : MISSING_CHARACTER_ENCODING);
this.printer.printValue("HTTP Method", request.getMethod());
this.printer.printValue("Request URI", request.getRequestURI());
this.printer.printValue("Parameters", getParamsMultiValueMap(request));
this.printer.printValue("Headers", getRequestHeaders(request));
this.printer.printValue("Body", body);
this.printer.printValue("Session Attrs", getSessionAttributes(request));
}
protected final HttpHeaders getRequestHeaders(MockHttpServletRequest request) {
HttpHeaders headers = new HttpHeaders();
Enumeration<String> names = request.getHeaderNames();
while (names.hasMoreElements()) {
String name = names.nextElement();
headers.put(name, Collections.list(request.getHeaders(name)));
}
return headers;
}
protected final MultiValueMap<String, String> getParamsMultiValueMap(MockHttpServletRequest request) {
Map<String, String[]> params = request.getParameterMap();
MultiValueMap<String, String> multiValueMap = new LinkedMultiValueMap<>();
params.forEach((name, values) -> {
if (params.get(name) != null) {
for (String value : values) {
multiValueMap.add(name, value);
}
}
});
return multiValueMap;
}
protected final Map<String, Object> getSessionAttributes(MockHttpServletRequest request) {
HttpSession session = request.getSession(false);
if (session != null) {
Enumeration<String> attrNames = session.getAttributeNames();
if (attrNames != null) {
return Collections.list(attrNames).stream().
collect(Collectors.toMap(n -> n, session::getAttribute));
}
}
return Collections.emptyMap();
}
protected void printAsyncResult(MvcResult result) throws Exception {
HttpServletRequest request = result.getRequest();
this.printer.printValue("Async started", request.isAsyncStarted());
Object asyncResult = null;
try {
asyncResult = result.getAsyncResult(0);
}
catch (IllegalStateException ex) {
// Not set
}
this.printer.printValue("Async result", asyncResult);
}
/**
* Print the handler.
*/
protected void printHandler(@Nullable Object handler, HandlerInterceptor @Nullable [] interceptors)
throws Exception {
if (handler == null) {
this.printer.printValue("Type", null);
}
else {
if (handler instanceof HandlerMethod handlerMethod) {
this.printer.printValue("Type", handlerMethod.getBeanType().getName());
this.printer.printValue("Method", handlerMethod);
}
else {
this.printer.printValue("Type", handler.getClass().getName());
}
}
}
/**
* Print exceptions resolved through a HandlerExceptionResolver.
*/
protected void printResolvedException(@Nullable Exception resolvedException) throws Exception {
if (resolvedException == null) {
this.printer.printValue("Type", null);
}
else {
this.printer.printValue("Type", resolvedException.getClass().getName());
}
}
/**
* Print the ModelAndView.
*/
protected void printModelAndView(@Nullable ModelAndView mav) throws Exception {
this.printer.printValue("View name", (mav != null) ? mav.getViewName() : null);
this.printer.printValue("View", (mav != null) ? mav.getView() : null);
if (mav == null || mav.getModel().isEmpty()) {
this.printer.printValue("Model", null);
}
else {
for (String name : mav.getModel().keySet()) {
if (!name.startsWith(BindingResult.MODEL_KEY_PREFIX)) {
Object value = mav.getModel().get(name);
this.printer.printValue("Attribute", name);
this.printer.printValue("value", value);
Errors errors = (Errors) mav.getModel().get(BindingResult.MODEL_KEY_PREFIX + name);
if (errors != null) {
this.printer.printValue("errors", errors.getAllErrors());
}
}
}
}
}
/**
* Print "output" flash attributes.
*/
protected void printFlashMap(FlashMap flashMap) throws Exception {
if (ObjectUtils.isEmpty(flashMap)) {
this.printer.printValue("Attributes", null);
}
else {
flashMap.forEach((name, value) -> {
this.printer.printValue("Attribute", name);
this.printer.printValue("value", value);
});
}
}
/**
* Print the response.
*/
protected void printResponse(MockHttpServletResponse response) throws Exception {
this.printer.printValue("Status", response.getStatus());
this.printer.printValue("Error message", response.getErrorMessage());
this.printer.printValue("Headers", getResponseHeaders(response));
this.printer.printValue("Content type", response.getContentType());
this.printer.printValue("Body", response.getContentAsString());
this.printer.printValue("Forwarded URL", response.getForwardedUrl());
this.printer.printValue("Redirected URL", response.getRedirectedUrl());
printCookies(response.getCookies());
}
/**
* Print the supplied cookies in a human-readable form, assuming the
* {@link Cookie} implementation does not provide its own {@code toString()}.
* @since 4.2
*/
@SuppressWarnings("removal")
private void printCookies(Cookie[] cookies) {
String[] cookieStrings = new String[cookies.length];
for (int i = 0; i < cookies.length; i++) {
Cookie cookie = cookies[i];
cookieStrings[i] = new ToStringCreator(cookie)
.append("name", cookie.getName())
.append("value", cookie.getValue())
.append("comment", cookie.getComment())
.append("domain", cookie.getDomain())
.append("maxAge", cookie.getMaxAge())
.append("path", cookie.getPath())
.append("secure", cookie.getSecure())
.append("version", cookie.getVersion())
.append("httpOnly", cookie.isHttpOnly())
.toString();
}
this.printer.printValue("Cookies", cookieStrings);
}
protected final HttpHeaders getResponseHeaders(MockHttpServletResponse response) {
HttpHeaders headers = new HttpHeaders();
for (String name : response.getHeaderNames()) {
headers.put(name, response.getHeaders(name));
}
return headers;
}
/**
* A contract for how to actually write result information.
*/
protected | PrintingResultHandler |
java | elastic__elasticsearch | x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java | {
"start": 17396,
"end": 17870
} | class ____ extends MockScriptPlugin {
@Override
public Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("graph_timeout", params -> {
try {
Thread.sleep(750);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return true;
});
}
}
}
| ScriptedTimeoutPlugin |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextHierarchy.java | {
"start": 6448,
"end": 7199
} | class ____ extends BaseTests {}</pre>
*
* <h4>Context Hierarchies with Bean Overrides</h4>
*
* <p>When {@code @ContextHierarchy} is used in conjunction with bean overrides such as
* {@link org.springframework.test.context.bean.override.convention.TestBean @TestBean},
* {@link org.springframework.test.context.bean.override.mockito.MockitoBean @MockitoBean}, or
* {@link org.springframework.test.context.bean.override.mockito.MockitoSpyBean @MockitoSpyBean},
* it may be desirable or necessary to have the override applied to a single level
* in the context hierarchy. To achieve that, the bean override must specify a
* context name that matches a name configured via {@link ContextConfiguration#name}.
*
* <p>The following test | ExtendedTests |
java | grpc__grpc-java | stub/src/main/java/io/grpc/stub/BlockingClientCall.java | {
"start": 2269,
"end": 12261
} | class ____<ReqT, RespT> {
private static final Logger logger = Logger.getLogger(BlockingClientCall.class.getName());
private final BlockingQueue<RespT> buffer;
private final ClientCall<ReqT, RespT> call;
private final ThreadSafeThreadlessExecutor executor;
private boolean writeClosed;
private AtomicReference<CloseState> closeState = new AtomicReference<>();
BlockingClientCall(ClientCall<ReqT, RespT> call, ThreadSafeThreadlessExecutor executor) {
this.call = call;
this.executor = executor;
buffer = new ArrayBlockingQueue<>(1);
}
/**
* Wait if necessary for a value to be available from the server. If there is an available value
* return it immediately, if the stream is closed return a null. Otherwise, wait for a value to be
* available or the stream to be closed
*
* @return value from server or null if stream has been closed
* @throws StatusException If the stream has closed in an error state
*/
public RespT read() throws InterruptedException, StatusException {
try {
return read(true, 0);
} catch (TimeoutException e) {
throw new AssertionError("should never happen", e);
}
}
/**
* Wait with timeout, if necessary, for a value to be available from the server. If there is an
* available value, return it immediately. If the stream is closed return a null. Otherwise, wait
* for a value to be available, the stream to be closed or the timeout to expire.
*
* @param timeout how long to wait before giving up. Values <= 0 are no wait
* @param unit a TimeUnit determining how to interpret the timeout parameter
* @return value from server or null (if stream has been closed)
* @throws TimeoutException if no read becomes ready before the specified timeout expires
* @throws StatusException If the stream has closed in an error state
*/
public RespT read(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException,
StatusException {
long endNanoTime = System.nanoTime() + unit.toNanos(timeout);
return read(false, endNanoTime);
}
private RespT read(boolean waitForever, long endNanoTime)
throws InterruptedException, TimeoutException, StatusException {
Predicate<BlockingClientCall<ReqT, RespT>> predicate = BlockingClientCall::skipWaitingForRead;
executor.waitAndDrainWithTimeout(waitForever, endNanoTime, predicate, this);
RespT bufferedValue = buffer.poll();
if (logger.isLoggable(Level.FINER)) {
logger.finer("Client Blocking read had value: " + bufferedValue);
}
CloseState currentCloseState;
if (bufferedValue != null) {
call.request(1);
return bufferedValue;
} else if ((currentCloseState = closeState.get()) == null) {
throw new IllegalStateException(
"The message disappeared... are you reading from multiple threads?");
} else if (!currentCloseState.status.isOk()) {
throw currentCloseState.status.asException(currentCloseState.trailers);
} else {
return null;
}
}
boolean skipWaitingForRead() {
return closeState.get() != null || !buffer.isEmpty();
}
/**
* Wait for a value to be available from the server. If there is an
* available value, return true immediately. If the stream was closed with Status.OK, return
* false. If the stream was closed with an error status, throw a StatusException. Otherwise, wait
* for a value to be available or the stream to be closed.
*
* @return True when there is a value to read. Return false if stream closed cleanly.
* @throws StatusException If the stream was closed in an error state
*/
public boolean hasNext() throws InterruptedException, StatusException {
executor.waitAndDrain((x) -> !x.buffer.isEmpty() || x.closeState.get() != null, this);
CloseState currentCloseState = closeState.get();
if (currentCloseState != null && !currentCloseState.status.isOk()) {
throw currentCloseState.status.asException(currentCloseState.trailers);
}
return !buffer.isEmpty();
}
/**
* Send a value to the stream for sending to server, wait if necessary for the grpc stream to be
* ready.
*
* <p>If write is not legal at the time of call, immediately returns false
*
* <p><br><b>NOTE: </b>This method will return as soon as it passes the request to the grpc stream
* layer. It will not block while the message is being sent on the wire and returning true does
* not guarantee that the server gets the message.
*
* <p><br><b>WARNING: </b>Doing only writes without reads can lead to deadlocks. This is because
* flow control, imposed by networks to protect intermediary routers and endpoints that are
* operating under resource constraints, requires reads to be done in order to progress writes.
* Furthermore, the server closing the stream will only be identified after
* the last sent value is read.
*
* @param request Message to send to the server
* @return true if the request is sent to stream, false if skipped
* @throws StatusException If the stream has closed in an error state
*/
public boolean write(ReqT request) throws InterruptedException, StatusException {
try {
return write(true, request, 0);
} catch (TimeoutException e) {
throw new RuntimeException(e); // should never happen
}
}
/**
* Send a value to the stream for sending to server, wait if necessary for the grpc stream to be
* ready up to specified timeout.
*
* <p>If write is not legal at the time of call, immediately returns false
*
* <p><br><b>NOTE: </b>This method will return as soon as it passes the request to the grpc stream
* layer. It will not block while the message is being sent on the wire and returning true does
* not guarantee that the server gets the message.
*
* <p><br><b>WARNING: </b>Doing only writes without reads can lead to deadlocks as a result of
* flow control. Furthermore, the server closing the stream will only be identified after the
* last sent value is read.
*
* @param request Message to send to the server
* @param timeout How long to wait before giving up. Values <= 0 are no wait
* @param unit A TimeUnit determining how to interpret the timeout parameter
* @return true if the request is sent to stream, false if skipped
* @throws TimeoutException if write does not become ready before the specified timeout expires
* @throws StatusException If the stream has closed in an error state
*/
public boolean write(ReqT request, long timeout, TimeUnit unit)
throws InterruptedException, TimeoutException, StatusException {
long endNanoTime = System.nanoTime() + unit.toNanos(timeout);
return write(false, request, endNanoTime);
}
private boolean write(boolean waitForever, ReqT request, long endNanoTime)
throws InterruptedException, TimeoutException, StatusException {
if (writeClosed) {
throw new IllegalStateException("Writes cannot be done after calling halfClose or cancel");
}
Predicate<BlockingClientCall<ReqT, RespT>> predicate =
(x) -> x.call.isReady() || x.closeState.get() != null;
executor.waitAndDrainWithTimeout(waitForever, endNanoTime, predicate, this);
CloseState savedCloseState = closeState.get();
if (savedCloseState == null) {
call.sendMessage(request);
return true;
} else if (savedCloseState.status.isOk()) {
return false;
} else {
throw savedCloseState.status.asException(savedCloseState.trailers);
}
}
void sendSingleRequest(ReqT request) {
call.sendMessage(request);
}
/**
* Cancel stream and stop any further writes. Note that some reads that are in flight may still
* happen after the cancel.
*
* @param message if not {@code null}, will appear as the description of the CANCELLED status
* @param cause if not {@code null}, will appear as the cause of the CANCELLED status
*/
public void cancel(String message, Throwable cause) {
writeClosed = true;
call.cancel(message, cause);
}
/**
* Indicate that no more writes will be done and the stream will be closed from the client side.
*
* @see ClientCall#halfClose()
*/
public void halfClose() {
if (writeClosed) {
throw new IllegalStateException(
"halfClose cannot be called after already half closed or cancelled");
}
writeClosed = true;
call.halfClose();
}
/**
* Status that server sent when closing channel from its side.
*
* @return null if stream not closed by server, otherwise Status sent by server
*/
@VisibleForTesting
Status getClosedStatus() {
executor.drain();
CloseState state = closeState.get();
return (state == null) ? null : state.status;
}
/**
* Check for whether some action is ready.
*
* @return True if legal to write and writeOrRead can run without blocking
*/
@VisibleForTesting
boolean isEitherReadOrWriteReady() {
return (isWriteLegal() && isWriteReady()) || isReadReady();
}
/**
* Check whether there are any values waiting to be read.
*
* @return true if read will not block
*/
@VisibleForTesting
boolean isReadReady() {
executor.drain();
return !buffer.isEmpty();
}
/**
* Check that write hasn't been marked complete and stream is ready to receive a write (so will
* not block).
*
* @return true if legal to write and write will not block
*/
@VisibleForTesting
boolean isWriteReady() {
executor.drain();
return isWriteLegal() && call.isReady();
}
/**
* Check whether we'll ever be able to do writes or should terminate.
* @return True if writes haven't been closed and the server hasn't closed the stream
*/
private boolean isWriteLegal() {
return !writeClosed && closeState.get() == null;
}
ClientCall.Listener<RespT> getListener() {
return new QueuingListener();
}
private final | BlockingClientCall |
java | apache__flink | flink-end-to-end-tests/flink-datastream-allround-test/src/main/java/org/apache/flink/streaming/tests/DataStreamAllroundTestProgram.java | {
"start": 4821,
"end": 17666
} | class ____ {
public static void main(String[] args) throws Exception {
final ParameterTool pt = ParameterTool.fromArgs(args);
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
setupEnvironment(env, pt);
// add a keyed stateful map operator, which uses Kryo for state serialization
DataStream<Event> eventStream =
env.addSource(createEventSource(pt))
.name(EVENT_SOURCE.getName())
.uid(EVENT_SOURCE.getUid())
.assignTimestampsAndWatermarks(createWatermarkStrategy(pt))
.keyBy(Event::getKey)
.map(
createArtificialKeyedStateMapper(
// map function simply forwards the inputs
(MapFunction<Event, Event>) in -> in,
// state is verified and updated per event as a wrapped
// ComplexPayload state object
(Event event, ComplexPayload lastState) -> {
if (lastState != null
&& !lastState
.getStrPayload()
.equals(
KEYED_STATE_OPER_WITH_KRYO_AND_CUSTOM_SER
.getName())
&& lastState
.getInnerPayLoad()
.getSequenceNumber()
== (event.getSequenceNumber() - 1)) {
throwIncorrectRestoredStateException(
(event.getSequenceNumber() - 1),
KEYED_STATE_OPER_WITH_KRYO_AND_CUSTOM_SER
.getName(),
lastState.getStrPayload());
}
return new ComplexPayload(
event,
KEYED_STATE_OPER_WITH_KRYO_AND_CUSTOM_SER
.getName());
},
Arrays.asList(
new KryoSerializer<>(
ComplexPayload.class,
env.getConfig()
.getSerializerConfig()), // KryoSerializer
new StatefulComplexPayloadSerializer()), // custom
// stateful
// serializer
Collections.singletonList(
ComplexPayload.class) // KryoSerializer via type
// extraction
))
.returns(Event.class)
.name(KEYED_STATE_OPER_WITH_KRYO_AND_CUSTOM_SER.getName())
.uid(KEYED_STATE_OPER_WITH_KRYO_AND_CUSTOM_SER.getUid());
// add a keyed stateful map operator, which uses Avro for state serialization
eventStream =
eventStream
.keyBy(Event::getKey)
.map(
createArtificialKeyedStateMapper(
// map function simply forwards the inputs
(MapFunction<Event, Event>) in -> in,
// state is verified and updated per event as a wrapped
// ComplexPayloadAvro state object
(Event event, ComplexPayloadAvro lastState) -> {
if (lastState != null
&& !lastState
.getStrPayload()
.equals(
KEYED_STATE_OPER_WITH_AVRO_SER
.getName())
&& lastState
.getInnerPayLoad()
.getSequenceNumber()
== (event.getSequenceNumber() - 1)) {
throwIncorrectRestoredStateException(
(event.getSequenceNumber() - 1),
KEYED_STATE_OPER_WITH_AVRO_SER.getName(),
lastState.getStrPayload());
}
ComplexPayloadAvro payload = new ComplexPayloadAvro();
payload.setEventTime(event.getEventTime());
payload.setInnerPayLoad(
new InnerPayLoadAvro(
event.getSequenceNumber()));
payload.setStrPayload(
KEYED_STATE_OPER_WITH_AVRO_SER.getName());
payload.setStringList(
Arrays.asList(
String.valueOf(event.getKey()),
event.getPayload()));
return payload;
},
Collections.singletonList(
new AvroSerializer<>(
ComplexPayloadAvro
.class)), // custom AvroSerializer
Collections.singletonList(
ComplexPayloadAvro.class) // AvroSerializer via type
// extraction
))
.returns(Event.class)
.name(KEYED_STATE_OPER_WITH_AVRO_SER.getName())
.uid(KEYED_STATE_OPER_WITH_AVRO_SER.getUid());
DataStream<Event> eventStream2 =
eventStream
.map(
createArtificialOperatorStateMapper(
(MapFunction<Event, Event>) in -> in))
.returns(Event.class)
.name(OPERATOR_STATE_OPER.getName())
.uid(OPERATOR_STATE_OPER.getUid());
// apply a tumbling window that simply passes forward window elements;
// this allows the job to cover timers state
@SuppressWarnings("Convert2Lambda")
DataStream<Event> eventStream3 =
applyTumblingWindows(eventStream2.keyBy(Event::getKey), pt)
.apply(
new WindowFunction<Event, Event, Integer, TimeWindow>() {
@Override
public void apply(
Integer integer,
TimeWindow window,
Iterable<Event> input,
Collector<Event> out) {
for (Event e : input) {
out.collect(e);
}
}
})
.name(TIME_WINDOW_OPER.getName())
.uid(TIME_WINDOW_OPER.getUid());
eventStream3 =
DataStreamAllroundTestJobFactory.verifyCustomStatefulTypeSerializer(eventStream3);
if (isSimulateFailures(pt)) {
eventStream3 =
eventStream3
.map(createFailureMapper(pt))
.setParallelism(1)
.name(FAILURE_MAPPER_NAME.getName())
.uid(FAILURE_MAPPER_NAME.getUid());
}
eventStream3
.keyBy(Event::getKey)
.flatMap(createSemanticsCheckMapper(pt))
.name(SEMANTICS_CHECK_MAPPER.getName())
.uid(SEMANTICS_CHECK_MAPPER.getUid())
.addSink(new PrintSinkFunction<>())
.name(SEMANTICS_CHECK_PRINT_SINK.getName())
.uid(SEMANTICS_CHECK_PRINT_SINK.getUid());
// Check sliding windows aggregations. Output all elements assigned to a window and later on
// check if each event was emitted slide_factor number of times
DataStream<Tuple2<Integer, List<Event>>> eventStream4 =
eventStream2
.keyBy(Event::getKey)
.window(createSlidingWindow(pt))
.apply(
new WindowFunction<
Event,
Tuple2<Integer, List<Event>>,
Integer,
TimeWindow>() {
private static final long serialVersionUID =
3166250579972849440L;
@Override
public void apply(
Integer key,
TimeWindow window,
Iterable<Event> input,
Collector<Tuple2<Integer, List<Event>>> out) {
out.collect(
Tuple2.of(
key,
StreamSupport.stream(
input.spliterator(), false)
.collect(Collectors.toList())));
}
})
.name(SLIDING_WINDOW_AGG.getName())
.uid(SLIDING_WINDOW_AGG.getUid());
eventStream4
.keyBy(events -> events.f0)
.flatMap(createSlidingWindowCheckMapper(pt))
.name(SLIDING_WINDOW_CHECK_MAPPER.getName())
.uid(SLIDING_WINDOW_CHECK_MAPPER.getUid())
.addSink(new PrintSinkFunction<>())
.name(SLIDING_WINDOW_CHECK_PRINT_SINK.getName())
.uid(SLIDING_WINDOW_CHECK_PRINT_SINK.getUid());
env.execute("General purpose test job");
}
private static void throwIncorrectRestoredStateException(
long sequenceNumber, String expectedPayload, String actualPayload) throws Exception {
throw new Exception(
"State is set or restored incorrectly: "
+ "sequenceNumber = "
+ sequenceNumber
+ ", expectedPayload = "
+ expectedPayload
+ ", actualPayload = "
+ actualPayload);
}
}
| DataStreamAllroundTestProgram |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/beanparam/FailWithAnnotationsInAMethodOfBeanParamTest.java | {
"start": 583,
"end": 1185
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setExpectedException(DeploymentException.class)
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(GreetingResource.class, NoQueryParamsInFieldsNameData.class));
@Test
void shouldBeanParamWorkWithoutFieldsAnnotatedWithQueryParam() {
Assertions.fail("The test case should not be invoked as it should fail with a deployment exception.");
}
@Path("/greeting")
public static | FailWithAnnotationsInAMethodOfBeanParamTest |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/ResourceHelper.java | {
"start": 1774,
"end": 1852
} | class ____ loading resources on the classpath or file system.
*/
public final | for |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/specific/DeprecatedSimplePojo.java | {
"start": 788,
"end": 965
} | class ____ {
private int value;
@Deprecated
public int getValue() {
return this.value;
}
public void setValue(int value) {
this.value = value;
}
}
| DeprecatedSimplePojo |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/suggest/stats/CompletionsStatsTests.java | {
"start": 884,
"end": 1549
} | class ____ extends ESTestCase {
public void testSerialize() throws IOException {
FieldMemoryStats map = randomBoolean() ? null : FieldMemoryStatsTests.randomFieldMemoryStats();
CompletionStats stats = new CompletionStats(randomNonNegativeLong(), map);
BytesStreamOutput out = new BytesStreamOutput();
stats.writeTo(out);
StreamInput input = out.bytes().streamInput();
CompletionStats read = new CompletionStats(input);
assertEquals(-1, input.read());
assertEquals(stats.getSizeInBytes(), read.getSizeInBytes());
assertEquals(stats.getFields(), read.getFields());
}
}
| CompletionsStatsTests |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/simp/annotation/support/SendToMethodReturnValueHandlerTests.java | {
"start": 27584,
"end": 27927
} | class ____ {
String handleNoAnnotation() {
return PAYLOAD;
}
@SendTo
String handleAndSendToDefaultDest() {
return PAYLOAD;
}
@MySendTo(dest = {"/dest3", "/dest4"})
String handleAndSendToOverride() {
return PAYLOAD;
}
}
@MySendToUser(dest = "/dest-default") @SuppressWarnings("unused")
private static | SendToTestBean |
java | spring-projects__spring-boot | module/spring-boot-activemq/src/main/java/org/springframework/boot/activemq/autoconfigure/ActiveMQConnectionFactoryConfiguration.java | {
"start": 4189,
"end": 4987
} | class ____ {
@Bean(destroyMethod = "stop")
@ConditionalOnBooleanProperty("spring.activemq.pool.enabled")
JmsPoolConnectionFactory jmsConnectionFactory(ActiveMQProperties properties,
ObjectProvider<ActiveMQConnectionFactoryCustomizer> factoryCustomizers,
ActiveMQConnectionDetails connectionDetails) {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(connectionDetails.getUser(),
connectionDetails.getPassword(), connectionDetails.getBrokerUrl());
new ActiveMQConnectionFactoryConfigurer(properties, factoryCustomizers.orderedStream().toList())
.configure(connectionFactory);
return new JmsPoolConnectionFactoryFactory(properties.getPool())
.createPooledConnectionFactory(connectionFactory);
}
}
}
| PooledConnectionFactoryConfiguration |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/calcite/sql/validate/SqlValidatorImpl.java | {
"start": 299045,
"end": 303236
} | class ____ extends NavigationModifier {
final @Nullable SqlOperator op;
final @Nullable SqlNode offset;
NavigationExpander() {
this(null, null);
}
NavigationExpander(@Nullable SqlOperator operator, @Nullable SqlNode offset) {
this.offset = offset;
this.op = operator;
}
@Override
public @Nullable SqlNode visit(SqlCall call) {
SqlKind kind = call.getKind();
List<SqlNode> operands = call.getOperandList();
List<@Nullable SqlNode> newOperands = new ArrayList<>();
if (call.getFunctionQuantifier() != null
&& call.getFunctionQuantifier().getValue() == SqlSelectKeyword.DISTINCT) {
final SqlParserPos pos = call.getParserPosition();
throw SqlUtil.newContextException(
pos, Static.RESOURCE.functionQuantifierNotAllowed(call.toString()));
}
if (isLogicalNavigation(kind) || isPhysicalNavigation(kind)) {
SqlNode inner = operands.get(0);
SqlNode offset = operands.get(1);
// merge two straight prev/next, update offset
if (isPhysicalNavigation(kind)) {
SqlKind innerKind = inner.getKind();
if (isPhysicalNavigation(innerKind)) {
List<SqlNode> innerOperands = ((SqlCall) inner).getOperandList();
SqlNode innerOffset = innerOperands.get(1);
SqlOperator newOperator =
innerKind == kind
? SqlStdOperatorTable.PLUS
: SqlStdOperatorTable.MINUS;
offset = newOperator.createCall(SqlParserPos.ZERO, offset, innerOffset);
inner =
call.getOperator()
.createCall(
SqlParserPos.ZERO, innerOperands.get(0), offset);
}
}
SqlNode newInnerNode =
inner.accept(new NavigationExpander(call.getOperator(), offset));
if (op != null) {
newInnerNode = op.createCall(SqlParserPos.ZERO, newInnerNode, this.offset);
}
return newInnerNode;
}
if (operands.size() > 0) {
for (SqlNode node : operands) {
if (node != null) {
SqlNode newNode = node.accept(new NavigationExpander());
if (op != null) {
newNode = op.createCall(SqlParserPos.ZERO, newNode, offset);
}
newOperands.add(newNode);
} else {
newOperands.add(null);
}
}
return call.getOperator().createCall(SqlParserPos.ZERO, newOperands);
} else {
if (op == null) {
return call;
} else {
return op.createCall(SqlParserPos.ZERO, call, offset);
}
}
}
@Override
public SqlNode visit(SqlIdentifier id) {
if (op == null) {
return id;
} else {
return op.createCall(SqlParserPos.ZERO, id, offset);
}
}
}
/**
* Shuttle that replaces {@code A as A.price > PREV(B.price)} with {@code PREV(A.price, 0) >
* LAST(B.price, 0)}.
*
* <p>Replacing {@code A.price} with {@code PREV(A.price, 0)} makes the implementation of {@link
* RexVisitor#visitPatternFieldRef(RexPatternFieldRef)} more unified. Otherwise, it's difficult
* to implement this method. If it returns the specified field, then the navigation such as
* {@code PREV(A.price, 1)} becomes impossible; if not, then comparisons such as {@code A.price
* > PREV(A.price, 1)} become meaningless.
*/
private static | NavigationExpander |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest78.java | {
"start": 893,
"end": 1223
} | class ____ extends TestCase {
public void test_false() throws Exception {
WallProvider provider = new MySqlWallProvider();
assertFalse(provider.checkValid(//
"SELECT * FROM T WHERE FID = ? OR LEAST(2,1,9,8) = 1"));
assertEquals(1, provider.getTableStats().size());
}
}
| MySqlWallTest78 |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestHasPrivilegesActionTests.java | {
"start": 1486,
"end": 4966
} | class ____ extends ESTestCase {
/*
* Previously we would reject requests that had a body that did not have a username set on the request. This happened because we did not
* consume the body until after checking if there was a username set on the request. If there was not a username set on the request,
* then the body would never be consumed. This means that the REST infrastructure would reject the request as not having a consumed body
* despite the endpoint supporting having a body. Now, we consume the body before checking if there is a username on the request. This
* test ensures that we maintain that behavior.
*/
public void testBodyConsumed() throws Exception {
final XPackLicenseState licenseState = mock(XPackLicenseState.class);
final RestHasPrivilegesAction action = new RestHasPrivilegesAction(
Settings.EMPTY,
mock(SecurityContext.class),
licenseState,
new HasPrivilegesRequestBuilderFactory.Default()
);
try (XContentBuilder bodyBuilder = JsonXContent.contentBuilder().startObject().endObject(); var threadPool = createThreadPool()) {
final var client = new NoOpNodeClient(threadPool);
final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_security/user/_has_privileges/")
.withContent(new BytesArray(bodyBuilder.toString()), XContentType.JSON)
.build();
final RestChannel channel = new FakeRestChannel(request, true, 1);
ElasticsearchSecurityException e = expectThrows(
ElasticsearchSecurityException.class,
() -> action.handleRequest(request, channel, client)
);
assertThat(e.getMessage(), equalTo("there is no authenticated user"));
}
}
public void testSecurityDisabled() throws Exception {
final XPackLicenseState licenseState = mock(XPackLicenseState.class);
final Settings securityDisabledSettings = Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), false).build();
when(licenseState.getOperationMode()).thenReturn(License.OperationMode.BASIC);
final RestHasPrivilegesAction action = new RestHasPrivilegesAction(
securityDisabledSettings,
mock(SecurityContext.class),
licenseState,
new HasPrivilegesRequestBuilderFactory.Default()
);
try (XContentBuilder bodyBuilder = JsonXContent.contentBuilder().startObject().endObject(); var threadPool = createThreadPool()) {
final var client = new NoOpNodeClient(threadPool);
final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/_security/user/_has_privileges/")
.withContent(new BytesArray(bodyBuilder.toString()), XContentType.JSON)
.build();
final FakeRestChannel channel = new FakeRestChannel(request, true, 1);
action.handleRequest(request, channel, client);
assertThat(channel.capturedResponse(), notNullValue());
assertThat(channel.capturedResponse().status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR));
assertThat(
channel.capturedResponse().content().utf8ToString(),
containsString("Security is not enabled but a security rest handler is registered")
);
}
}
}
| RestHasPrivilegesActionTests |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeDefaultVisitor.java | {
"start": 1377,
"end": 1997
} | class ____<R> implements DataTypeVisitor<R> {
@Override
public R visit(AtomicDataType atomicDataType) {
return defaultMethod(atomicDataType);
}
@Override
public R visit(CollectionDataType collectionDataType) {
return defaultMethod(collectionDataType);
}
@Override
public R visit(FieldsDataType fieldsDataType) {
return defaultMethod(fieldsDataType);
}
@Override
public R visit(KeyValueDataType keyValueDataType) {
return defaultMethod(keyValueDataType);
}
protected abstract R defaultMethod(DataType dataType);
}
| DataTypeDefaultVisitor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/any/xml2/NamedAnyContainer.java | {
"start": 232,
"end": 1306
} | class ____ {
private Integer id;
private String name;
private NamedProperty specificProperty;
private Set<NamedProperty> generalProperties;
public NamedAnyContainer() {
}
public NamedAnyContainer(Integer id, String name) {
this.id = id;
this.name = name;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public NamedProperty getSpecificProperty() {
return specificProperty;
}
public void setSpecificProperty(NamedProperty specificProperty) {
this.specificProperty = specificProperty;
}
public Set<NamedProperty> getGeneralProperties() {
return generalProperties;
}
public void setGeneralProperties(Set<NamedProperty> generalProperties) {
this.generalProperties = generalProperties;
}
public void addGeneralProperty(NamedProperty property) {
if ( generalProperties == null ) {
generalProperties = new LinkedHashSet<>();
}
generalProperties.add( property );
}
}
| NamedAnyContainer |
java | elastic__elasticsearch | libs/core/src/main/java/org/elasticsearch/core/CheckedSupplier.java | {
"start": 636,
"end": 712
} | interface ____<T, E extends Exception> {
T get() throws E;
}
| CheckedSupplier |
java | apache__camel | components/camel-sql/src/test/java/org/apache/camel/processor/idempotent/jdbc/JdbcMessageIdRepositoryCustomTableNameTest.java | {
"start": 1431,
"end": 3501
} | class ____ extends CamelSpringTestSupport {
protected static final String SELECT_ALL_STRING = "SELECT messageId FROM FOOBAR WHERE processorName = ?";
protected static final String PROCESSOR_NAME = "myProcessorName";
protected JdbcTemplate jdbcTemplate;
protected DataSource dataSource;
@EndpointInject("mock:result")
protected MockEndpoint resultEndpoint;
@EndpointInject("mock:error")
protected MockEndpoint errorEndpoint;
@Override
public void doPostSetup() {
dataSource = context.getRegistry().lookupByNameAndType("dataSource", DataSource.class);
jdbcTemplate = new JdbcTemplate(dataSource);
jdbcTemplate.afterPropertiesSet();
}
@Test
public void testDuplicateMessagesAreFilteredOut() throws Exception {
resultEndpoint.expectedBodiesReceived("one", "two", "three");
errorEndpoint.expectedMessageCount(0);
template.sendBodyAndHeader("direct:start", "one", "messageId", "1");
template.sendBodyAndHeader("direct:start", "two", "messageId", "2");
template.sendBodyAndHeader("direct:start", "one", "messageId", "1");
template.sendBodyAndHeader("direct:start", "two", "messageId", "2");
template.sendBodyAndHeader("direct:start", "one", "messageId", "1");
template.sendBodyAndHeader("direct:start", "three", "messageId", "3");
MockEndpoint.assertIsSatisfied(context);
// all 3 messages should be in jdbc repo
List<String> receivedMessageIds = jdbcTemplate.queryForList(SELECT_ALL_STRING, String.class, PROCESSOR_NAME);
assertEquals(3, receivedMessageIds.size());
assertTrue(receivedMessageIds.contains("1"));
assertTrue(receivedMessageIds.contains("2"));
assertTrue(receivedMessageIds.contains("3"));
}
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/processor/idempotent/jdbc/customized-tablename-spring.xml");
}
}
| JdbcMessageIdRepositoryCustomTableNameTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/DynamicInstantiationTests.java | {
"start": 1305,
"end": 9578
} | class ____ extends BaseSqmUnitTest {
@Override
protected Class[] getAnnotatedClasses() {
return new Class[] {
EntityOfBasics.class,
};
}
@Test
public void testSimpleDynamicInstantiationSelection() {
SqmSelectStatement<?> statement = interpretSelect(
"select new org.hibernate.orm.test.query.sqm.domain.ConstructedLookupListItem( e.id, e.theString ) from EntityOfBasics e"
);
assertEquals( 1, statement.getQuerySpec().getSelectClause().getSelections().size() );
final SqmDynamicInstantiation<?> dynamicInstantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( dynamicInstantiation.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat( dynamicInstantiation.getInstantiationTarget().getJavaType(), is( equalTo( ConstructedLookupListItem.class ) ) );
assertThat( dynamicInstantiation.getArguments(), hasSize( 2 ) );
}
@Test
public void testMultipleDynamicInstantiationSelection() {
SqmSelectStatement<?> statement = interpretSelect(
"select new org.hibernate.orm.test.query.sqm.domain.ConstructedLookupListItem( e.id, e.theString ), " +
"new org.hibernate.orm.test.query.sqm.domain.ConstructedLookupListItem( e.id, e.theString ) " +
"from EntityOfBasics e"
);
assertEquals( 2, statement.getQuerySpec().getSelectClause().getSelections().size() );
{
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( instantiation.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( ConstructedLookupListItem.class ) )
);
assertThat( instantiation.getArguments(), hasSize( 2 ) );
}
{
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 1 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( instantiation.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( ConstructedLookupListItem.class ) )
);
assertThat( instantiation.getArguments(), hasSize( 2 ) );
}
}
@Test
public void testMixedAttributeAndDynamicInstantiationSelection() {
SqmSelectStatement<?> statement = interpretSelect(
"select new org.hibernate.orm.test.query.sqm.domain.ConstructedLookupListItem( e.id, e.theString ), e.theInteger from EntityOfBasics e"
);
assertEquals( 2, statement.getQuerySpec().getSelectClause().getSelections().size() );
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( instantiation.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( ConstructedLookupListItem.class ) )
);
assertThat( instantiation.getArguments(), hasSize( 2 ) );
final SqmPath<?> theIntegerPath = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 1 ).getSelectableNode(),
SqmPath.class
);
assertThat( theIntegerPath.getReferencedPathSource().getPathName(), is( "theInteger" ) );
assertThat( theIntegerPath.getReferencedPathSource().getBindableJavaType(), is( equalTo( Integer.class ) ) );
}
@Test
public void testNestedDynamicInstantiationSelection() {
SqmSelectStatement<?> statement = interpretSelect(
"select new org.hibernate.orm.test.query.sqm.domain.NestedCtorLookupListItem(" +
" e.id, " +
" e.theString, " +
" new org.hibernate.orm.test.query.sqm.domain.ConstructedLookupListItem( e.id, e.theString )" +
" ) " +
" from EntityOfBasics e"
);
assertEquals( 1, statement.getQuerySpec().getSelectClause().getSelections().size() );
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( instantiation.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( NestedCtorLookupListItem.class ) )
);
assertThat( instantiation.getArguments(), hasSize( 3 ) );
final SqmPath<?> firstArg = TestingUtil.cast(
instantiation.getArguments().get( 0 ).getSelectableNode(),
SqmPath.class
);
assertThat( firstArg.getReferencedPathSource().getPathName(), is( "id" ) );
final SqmPath<?> secondArg = TestingUtil.cast(
instantiation.getArguments().get( 1 ).getSelectableNode(),
SqmPath.class
);
assertThat( secondArg.getReferencedPathSource().getPathName(), is( "theString" ) );
final SqmDynamicInstantiation<?> thirdArg = TestingUtil.cast(
instantiation.getArguments().get( 2 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat( thirdArg.getInstantiationTarget().getNature(), is( DynamicInstantiationNature.CLASS ) );
assertThat(
thirdArg.getInstantiationTarget().getJavaType(),
is( equalTo( ConstructedLookupListItem.class ) )
);
assertThat( thirdArg.getArguments(), hasSize( 2 ) );
}
@Test
public void testSimpleDynamicListInstantiation() {
SqmSelectStatement<?> statement = interpretSelect( "select new list( e.id, e.theString ) from EntityOfBasics e" );
assertEquals( 1, statement.getQuerySpec().getSelectClause().getSelections().size() );
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat(
instantiation.getInstantiationTarget().getNature(),
equalTo( DynamicInstantiationNature.LIST )
);
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( List.class ) )
);
assertThat( instantiation.getArguments(), hasSize( 2 ) );
assertThat(
instantiation.getArguments().get( 0 ).getSelectableNode(),
instanceOf( SqmPath.class )
);
assertThat( instantiation.getArguments().get( 0 ).getAlias(), is( nullValue() ) );
assertThat(
instantiation.getArguments().get( 1 ).getSelectableNode(),
instanceOf( SqmPath.class )
);
assertThat( instantiation.getArguments().get( 1 ).getAlias(), is( nullValue() ) );
}
@Test
public void testSimpleDynamicMapInstantiation() {
SqmSelectStatement<?> statement = interpretSelect( "select new map( e.id as id, e.theString as ts ) from EntityOfBasics e" );
assertEquals( 1, statement.getQuerySpec().getSelectClause().getSelections().size() );
final SqmDynamicInstantiation<?> instantiation = TestingUtil.cast(
statement.getQuerySpec().getSelectClause().getSelections().get( 0 ).getSelectableNode(),
SqmDynamicInstantiation.class
);
assertThat(
instantiation.getInstantiationTarget().getNature(),
equalTo( DynamicInstantiationNature.MAP )
);
assertThat(
instantiation.getInstantiationTarget().getJavaType(),
is( equalTo( Map.class ) )
);
assertEquals( 2, instantiation.getArguments().size() );
assertThat(
instantiation.getArguments().get( 0 ).getSelectableNode(),
instanceOf( SqmPath.class )
);
assertThat( instantiation.getArguments().get( 0 ).getAlias(), is( "id" ) );
assertThat(
instantiation.getArguments().get( 1 ).getSelectableNode(),
instanceOf( SqmPath.class )
);
assertThat( instantiation.getArguments().get( 1 ).getAlias(), is( "ts" ) );
}
@Test
public void testSimpleInjectedInstantiation() {
try {
interpretSelect(
"select new org.hibernate.orm.test.query.sqm.domain.InjectedLookupListItem( e.id, e.theString ) from EntityOfBasics e"
);
fail("no constructor or aliases");
}
catch (SemanticException se) {
// should fail
}
}
}
| DynamicInstantiationTests |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/SecurityOverrideFilter.java | {
"start": 369,
"end": 1540
} | class ____ implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext requestContext) throws IOException {
String user = requestContext.getHeaders().getFirst("User");
String role = requestContext.getHeaders().getFirst("Role");
if (user != null && role != null) {
requestContext.setSecurityContext(new SecurityContext() {
@Override
public Principal getUserPrincipal() {
return new Principal() {
@Override
public String getName() {
return user;
}
};
}
@Override
public boolean isUserInRole(String r) {
return role.equals(r);
}
@Override
public boolean isSecure() {
return false;
}
@Override
public String getAuthenticationScheme() {
return "basic";
}
});
}
}
}
| SecurityOverrideFilter |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/ArgumentsProvider.java | {
"start": 2718,
"end": 3434
} | class ____ test; never {@code null}
* @param context the current extension context; never {@code null}
* @return a stream of arguments; never {@code null}
* @since 5.13
*/
@API(status = EXPERIMENTAL, since = "6.0")
default Stream<? extends Arguments> provideArguments(ParameterDeclarations parameters, ExtensionContext context)
throws Exception {
try {
return provideArguments(context);
}
catch (Exception e) {
String message = """
ArgumentsProvider does not override the provideArguments(ParameterDeclarations, ExtensionContext) method. \
Please report this issue to the maintainers of %s.""".formatted(
getClass().getName());
throw new JUnitException(message, e);
}
}
}
| or |
java | apache__flink | flink-yarn/src/test/java/org/apache/flink/yarn/entrypoint/YarnWorkerResourceSpecFactoryTest.java | {
"start": 1368,
"end": 3197
} | class ____ {
@Test
void testGetCpuCoresCommonOption() {
final Configuration configuration = new Configuration();
configuration.set(TaskManagerOptions.CPU_CORES, 1.0);
configuration.set(YarnConfigOptions.VCORES, 2);
configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, 3);
assertThat(YarnWorkerResourceSpecFactory.getDefaultCpus(configuration))
.isEqualTo(new CPUResource(1.0));
}
@Test
void testGetCpuCoresYarnOption() {
final Configuration configuration = new Configuration();
configuration.set(YarnConfigOptions.VCORES, 2);
configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, 3);
assertThat(YarnWorkerResourceSpecFactory.getDefaultCpus(configuration))
.isEqualTo(new CPUResource(2.0));
}
@Test
void testGetCpuCoresNumSlots() {
final Configuration configuration = new Configuration();
configuration.set(TaskManagerOptions.NUM_TASK_SLOTS, 3);
assertThat(YarnWorkerResourceSpecFactory.getDefaultCpus(configuration))
.isEqualTo(new CPUResource(3.0));
}
@Test
void testGetCpuRoundUp() {
final Configuration configuration = new Configuration();
configuration.set(TaskManagerOptions.CPU_CORES, 0.5);
assertThat(YarnWorkerResourceSpecFactory.getDefaultCpus(configuration))
.isEqualTo(new CPUResource(1.0));
}
@Test
void testGetCpuExceedMaxInt() {
final Configuration configuration = new Configuration();
configuration.set(TaskManagerOptions.CPU_CORES, Double.MAX_VALUE);
assertThatThrownBy(() -> YarnWorkerResourceSpecFactory.getDefaultCpus(configuration))
.isInstanceOf(IllegalConfigurationException.class);
}
}
| YarnWorkerResourceSpecFactoryTest |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/ser/jackson/JsonValueSerializer.java | {
"start": 1547,
"end": 16443
} | class ____
extends StdDynamicSerializer<Object>
{
/**
* Accessor (field, getter) used to access value to serialize.
*/
protected final AnnotatedMember _accessor;
/**
* Value for annotated accessor.
*/
protected final JavaType _valueType;
protected final boolean _staticTyping;
/**
* This is a flag that is set in rare (?) cases where this serializer
* is used for "natural" types (boolean, int, String, double); and where
* we actually must force type information wrapping, even though
* one would not normally be added.
*/
protected final boolean _forceTypeInformation;
protected final Set<String> _ignoredProperties;
/*
/**********************************************************************
/* Life-cycle
/**********************************************************************
*/
/**
* @param ser Explicit serializer to use, if caller knows it (which
* occurs if and only if the "value method" was annotated with
* {@link tools.jackson.databind.annotation.JsonSerialize#using}), otherwise
* null
*/
protected JsonValueSerializer(JavaType nominalType,
JavaType valueType, boolean staticTyping,
TypeSerializer vts, ValueSerializer<?> ser,
AnnotatedMember accessor,
Set<String> ignoredProperties)
{
super(nominalType, null, vts, ser);
_valueType = valueType;
_staticTyping = staticTyping;
_accessor = accessor;
_forceTypeInformation = true; // gets reconsidered when we are contextualized
_ignoredProperties = ignoredProperties;
}
protected JsonValueSerializer(JsonValueSerializer src, BeanProperty property,
TypeSerializer vts, ValueSerializer<?> ser, boolean forceTypeInfo)
{
super(src, property, vts, ser);
_valueType = src._valueType;
_accessor = src._accessor;
_staticTyping = src._staticTyping;
_forceTypeInformation = forceTypeInfo;
_ignoredProperties = src._ignoredProperties;
}
public static JsonValueSerializer construct(SerializationConfig config,
JavaType nominalType,
JavaType valueType, boolean staticTyping,
TypeSerializer vts, ValueSerializer<?> ser,
AnnotatedMember accessor)
{
JsonIgnoreProperties.Value ignorals = config.getAnnotationIntrospector()
.findPropertyIgnoralByName(config, accessor);
final Set<String> ignoredProperties = ignorals.findIgnoredForSerialization();
ser = _withIgnoreProperties(ser, ignoredProperties);
return new JsonValueSerializer(nominalType, valueType, staticTyping,
vts, ser, accessor, ignoredProperties);
}
public JsonValueSerializer withResolved(BeanProperty property,
TypeSerializer vts, ValueSerializer<?> ser, boolean forceTypeInfo)
{
if ((_property == property)
&& (_valueTypeSerializer == vts) && (_valueSerializer == ser)
&& (forceTypeInfo == _forceTypeInformation)) {
return this;
}
return new JsonValueSerializer(this, property, vts, ser, forceTypeInfo);
}
/*
/**********************************************************
/* Overrides
/**********************************************************
*/
@Override // since 2.12
public boolean isEmpty(SerializationContext ctxt, Object bean)
{
// 31-Oct-2020, tatu: Should perhaps catch access issue here... ?
Object referenced = _accessor.getValue(bean);
if (referenced == null) {
return true;
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = _findSerializer(ctxt, referenced);
}
return ser.isEmpty(ctxt, referenced);
}
/*
/**********************************************************************
/* Post-processing
/**********************************************************************
*/
/**
* We can try to find the actual serializer for value, if we can
* statically figure out what the result type must be.
*/
@Override
public ValueSerializer<?> createContextual(SerializationContext ctxt,
BeanProperty property)
{
TypeSerializer vts = _valueTypeSerializer;
if (vts != null) {
vts = vts.forProperty(ctxt, property);
}
ValueSerializer<?> ser = _valueSerializer;
if (ser == null) {
// Can only assign serializer statically if the declared type is final:
// if not, we don't really know the actual type until we get the instance.
// 10-Mar-2010, tatu: Except if static typing is to be used
if (_staticTyping || ctxt.isEnabled(MapperFeature.USE_STATIC_TYPING)
|| _valueType.isFinal()) {
// false -> no need to cache
/* 10-Mar-2010, tatu: Ideally we would actually separate out type
* serializer from value serializer; but, alas, there's no access
* to serializer factory at this point...
*/
// I _think_ this can be considered a primary property...
ser = ctxt.findPrimaryPropertySerializer(_valueType, property);
ser = _withIgnoreProperties(ser, _ignoredProperties);
/* 09-Dec-2010, tatu: Turns out we must add special handling for
* cases where "native" (aka "natural") type is being serialized,
* using standard serializer
*/
boolean forceTypeInformation = isNaturalTypeWithStdHandling(_valueType.getRawClass(), ser);
return withResolved(property, vts, ser, forceTypeInformation);
}
// [databind#2822]: better hold on to "property", regardless
if (property != _property) {
return withResolved(property, vts, ser, _forceTypeInformation);
}
} else {
// 05-Sep-2013, tatu: I _think_ this can be considered a primary property...
ser = ctxt.handlePrimaryContextualization(ser, property);
return withResolved(property, vts, ser, _forceTypeInformation);
}
return this;
}
/*
/**********************************************************************
/* Actual serialization
/**********************************************************************
*/
@Override
public void serialize(final Object bean, final JsonGenerator gen,
final SerializationContext ctxt)
throws JacksonException
{
Object value;
try {
value = _accessor.getValue(bean);
} catch (Exception e) {
wrapAndThrow(ctxt, e, bean, _accessor.getName() + "()");
return; // never gets here
}
if (value == null) {
ctxt.defaultSerializeNullValue(gen);
return;
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = _findSerializer(ctxt, value);
}
if (_valueTypeSerializer != null) {
ser.serializeWithType(value, gen, ctxt, _valueTypeSerializer);
} else {
ser.serialize(value, gen, ctxt);
}
}
protected ValueSerializer<Object> _findSerializer(SerializationContext ctxt, Object value) {
final UnaryOperator<ValueSerializer<Object>> serTransformer =
valueSer -> _withIgnoreProperties(valueSer, _ignoredProperties);
Class<?> cc = value.getClass();
if (_valueType.hasGenericTypes()) {
return _findAndAddDynamic(ctxt,
ctxt.constructSpecializedType(_valueType, cc),
serTransformer);
}
return _findAndAddDynamic(ctxt, cc, serTransformer);
}
/**
* Internal helper that configures the provided {@code ser} to ignore properties specified by {@link JsonIgnoreProperties}.
*
* @param ser Serializer to be configured
* @param ignoredProperties Properties to ignore, if any
*
* @return Configured serializer with specified properties ignored
*/
@SuppressWarnings("unchecked")
protected static ValueSerializer<Object> _withIgnoreProperties(ValueSerializer<?> ser,
Set<String> ignoredProperties)
{
if (ser != null) {
if (!ignoredProperties.isEmpty()) {
ser = ser.withIgnoredProperties(ignoredProperties);
}
}
return (ValueSerializer<Object>) ser;
}
@Override
public void serializeWithType(Object bean, JsonGenerator gen, SerializationContext ctxt,
TypeSerializer typeSer0) throws JacksonException
{
// Regardless of other parts, first need to find value to serialize:
Object value;
try {
value = _accessor.getValue(bean);
} catch (Exception e) {
wrapAndThrow(ctxt, e, bean, _accessor.getName() + "()");
return; // never gets here
}
// and if we got null, can also just write it directly
if (value == null) {
ctxt.defaultSerializeNullValue(gen);
return;
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
Class<?> cc = value.getClass();
if (_valueType.hasGenericTypes()) {
ser = _findAndAddDynamic(ctxt, ctxt.constructSpecializedType(_valueType, cc));
} else {
ser = _findAndAddDynamic(ctxt, cc);
}
}
// 16-Apr-2018, tatu: This is interesting piece of vestigal code but...
// I guess it is still needed, too.
// 09-Dec-2010, tatu: To work around natural type's refusal to add type info, we do
// this (note: type is for the wrapper type, not enclosed value!)
if (_forceTypeInformation) {
// Confusing? Type id is for POJO and NOT for value returned by JsonValue accessor...
WritableTypeId typeIdDef = typeSer0.writeTypePrefix(gen, ctxt,
typeSer0.typeId(bean, JsonToken.VALUE_STRING));
ser.serialize(value, gen, ctxt);
typeSer0.writeTypeSuffix(gen, ctxt, typeIdDef);
return;
}
// 28-Sep-2016, tatu: As per [databind#1385], we do need to do some juggling
// to use different Object for type id (logical type) and actual serialization
// (delegate type).
// 16-Apr-2018, tatu: What seems suspicious is that we do not use `_valueTypeSerializer`
// for anything but... it appears to work wrt existing tests, and alternative
// is not very clear. So most likely it'll fail at some point and require
// full investigation. But not today.
TypeSerializerRerouter rr = new TypeSerializerRerouter(typeSer0, bean);
ser.serializeWithType(value, gen, ctxt, rr);
}
@Override
public void acceptJsonFormatVisitor(JsonFormatVisitorWrapper visitor, JavaType typeHint)
{
/* 27-Apr-2015, tatu: First things first; for JSON Schema introspection,
* Enum types that use `@JsonValue` are special (but NOT necessarily
* anything else that RETURNS an enum!)
* So we will need to add special
* handling here (see https://github.com/FasterXML/jackson-module-jsonSchema/issues/57
* for details).
*
* Note that meaning of JsonValue, then, is very different for Enums. Sigh.
*/
final JavaType type = _accessor.getType();
Class<?> declaring = _accessor.getDeclaringClass();
if ((declaring != null) && ClassUtil.isEnumType(declaring)) {
if (_acceptJsonFormatVisitorForEnum(visitor, typeHint, declaring)) {
return;
}
}
ValueSerializer<Object> ser = _valueSerializer;
if (ser == null) {
ser = visitor.getContext().findPrimaryPropertySerializer(type, _property);
if (ser == null) { // can this ever occur?
visitor.expectAnyFormat(typeHint);
return;
}
}
ser.acceptJsonFormatVisitor(visitor, type);
}
/**
* Overridable helper method used for special case handling of schema information for
* Enums.
*
* @return True if method handled callbacks; false if not; in latter case caller will
* send default callbacks
*/
protected boolean _acceptJsonFormatVisitorForEnum(JsonFormatVisitorWrapper visitor,
JavaType typeHint, Class<?> enumType)
{
// Copied from EnumSerializer#acceptJsonFormatVisitor
JsonStringFormatVisitor stringVisitor = visitor.expectStringFormat(typeHint);
if (stringVisitor != null) {
Set<String> enums = new LinkedHashSet<String>();
for (Object en : enumType.getEnumConstants()) {
try {
// 21-Apr-2016, tatu: This is convoluted to the max, but essentially we
// call `@JsonValue`-annotated accessor method on all Enum members,
// so it all "works out". To some degree.
enums.add(String.valueOf(_accessor.getValue(en)));
} catch (Exception e) {
Throwable t = e;
while (t instanceof InvocationTargetException && t.getCause() != null) {
t = t.getCause();
}
ClassUtil.throwIfError(t);
throw DatabindException.wrapWithPath(visitor.getContext(), t,
new JacksonException.Reference(en, _accessor.getName() + "()"));
}
}
stringVisitor.enumTypes(enums);
}
return true;
}
protected boolean isNaturalTypeWithStdHandling(Class<?> rawType, ValueSerializer<?> ser)
{
// First: do we have a natural type being handled?
if (rawType.isPrimitive()) {
if (rawType != Integer.TYPE && rawType != Boolean.TYPE && rawType != Double.TYPE) {
return false;
}
} else {
if (rawType != String.class &&
rawType != Integer.class && rawType != Boolean.class && rawType != Double.class) {
return false;
}
}
return isDefaultSerializer(ser);
}
/*
/**********************************************************************
/* Helper class(es)
/**********************************************************************
*/
/**
* Silly little wrapper | JsonValueSerializer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java | {
"start": 1868,
"end": 25796
} | class ____ {
/**
* Priority of the StringUtils shutdown hook.
*/
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
/**
* Shell environment variables: $ followed by one letter or _ followed by
* multiple letters, numbers, or underscores. The group captures the
* environment variable name without the leading $.
*/
public static final Pattern SHELL_ENV_VAR_PATTERN =
Pattern.compile("\\$([A-Za-z_]{1}[A-Za-z0-9_]*)");
/**
* Windows environment variables: surrounded by %. The group captures the
* environment variable name without the leading and trailing %.
*/
public static final Pattern WIN_ENV_VAR_PATTERN = Pattern.compile("%(.*?)%");
/**
* Regular expression that matches and captures environment variable names
* according to platform-specific rules.
*/
public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ?
WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN;
/**
* {@link #getTrimmedStringCollectionSplitByEquals(String)} throws
* {@link IllegalArgumentException} with error message starting with this string
* if the argument provided is not valid representation of non-empty key-value
* pairs.
* Value = {@value}
*/
@VisibleForTesting
public static final String STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG =
"Trimmed string split by equals does not correctly represent "
+ "non-empty key-value pairs.";
/**
* Make a string representation of the exception.
* @param e The exception to stringify
* @return A string with exception name and call stack.
*/
public static String stringifyException(Throwable e) {
StringWriter stm = new StringWriter();
PrintWriter wrt = new PrintWriter(stm);
e.printStackTrace(wrt);
wrt.close();
return stm.toString();
}
/**
* Given a full hostname, return the word upto the first dot.
* @param fullHostname the full hostname
* @return the hostname to the first dot
*/
public static String simpleHostname(String fullHostname) {
if (InetAddresses.isInetAddress(fullHostname)) {
return fullHostname;
}
int offset = fullHostname.indexOf('.');
if (offset != -1) {
return fullHostname.substring(0, offset);
}
return fullHostname;
}
/**
* Given an integer, return a string that is in an approximate, but human
* readable format.
* @param number the number to format
* @return a human readable form of the integer
*
* @deprecated use {@link TraditionalBinaryPrefix#long2String(long, String, int)}.
*/
@Deprecated
public static String humanReadableInt(long number) {
return TraditionalBinaryPrefix.long2String(number, "", 1);
}
/**
* The same as String.format(Locale.ENGLISH, format, objects).
* @param format format.
* @param objects objects.
* @return format string.
*/
public static String format(final String format, final Object... objects) {
return String.format(Locale.ENGLISH, format, objects);
}
/**
* Format a percentage for presentation to the user.
* @param fraction the percentage as a fraction, e.g. 0.1 = 10%
* @param decimalPlaces the number of decimal places
* @return a string representation of the percentage
*/
public static String formatPercent(double fraction, int decimalPlaces) {
return format("%." + decimalPlaces + "f%%", fraction*100);
}
/**
* Given an array of strings, return a comma-separated list of its elements.
* @param strs Array of strings
* @return Empty string if strs.length is 0, comma separated list of strings
* otherwise
*/
public static String arrayToString(String[] strs) {
if (strs.length == 0) { return ""; }
StringBuilder sbuf = new StringBuilder();
sbuf.append(strs[0]);
for (int idx = 1; idx < strs.length; idx++) {
sbuf.append(",");
sbuf.append(strs[idx]);
}
return sbuf.toString();
}
/**
* Given an array of bytes it will convert the bytes to a hex string
* representation of the bytes
* @param bytes bytes.
* @param start start index, inclusively
* @param end end index, exclusively
* @return hex string representation of the byte array
*/
public static String byteToHexString(byte[] bytes, int start, int end) {
if (bytes == null) {
throw new IllegalArgumentException("bytes == null");
}
StringBuilder s = new StringBuilder();
for(int i = start; i < end; i++) {
s.append(format("%02x", bytes[i]));
}
return s.toString();
}
/**
* Same as byteToHexString(bytes, 0, bytes.length).
* @param bytes bytes.
* @return byteToHexString.
*/
public static String byteToHexString(byte bytes[]) {
return byteToHexString(bytes, 0, bytes.length);
}
/**
* Convert a byte to a hex string.
* @see #byteToHexString(byte[])
* @see #byteToHexString(byte[], int, int)
* @param b byte
* @return byte's hex value as a String
*/
public static String byteToHexString(byte b) {
return byteToHexString(new byte[] {b});
}
/**
* Given a hexstring this will return the byte array corresponding to the
* string
* @param hex the hex String array
* @return a byte array that is a hex string representation of the given
* string. The size of the byte array is therefore hex.length/2
*/
public static byte[] hexStringToByte(String hex) {
byte[] bts = new byte[hex.length() / 2];
for (int i = 0; i < bts.length; i++) {
bts[i] = (byte) Integer.parseInt(hex.substring(2 * i, 2 * i + 2), 16);
}
return bts;
}
/**
* uriToString.
* @param uris uris.
* @return uriToString.
*/
public static String uriToString(URI[] uris){
if (uris == null) {
return null;
}
StringBuilder ret = new StringBuilder(uris[0].toString());
for(int i = 1; i < uris.length;i++){
ret.append(",");
ret.append(uris[i].toString());
}
return ret.toString();
}
/**
* @param str
* The string array to be parsed into an URI array.
* @return <code>null</code> if str is <code>null</code>, else the URI array
* equivalent to str.
* @throws IllegalArgumentException
* If any string in str violates RFC 2396.
*/
public static URI[] stringToURI(String[] str){
if (str == null)
return null;
URI[] uris = new URI[str.length];
for (int i = 0; i < str.length;i++){
try{
uris[i] = new URI(str[i]);
}catch(URISyntaxException ur){
throw new IllegalArgumentException(
"Failed to create uri for " + str[i], ur);
}
}
return uris;
}
/**
* stringToPath.
* @param str str.
* @return path array.
*/
public static Path[] stringToPath(String[] str){
if (str == null) {
return null;
}
Path[] p = new Path[str.length];
for (int i = 0; i < str.length;i++){
p[i] = new Path(str[i]);
}
return p;
}
/**
*
* Given a finish and start time in long milliseconds, returns a
* String in the format Xhrs, Ymins, Z sec, for the time difference between two times.
* If finish time comes before start time then negative valeus of X, Y and Z wil return.
*
* @param finishTime finish time
* @param startTime start time
* @return a String in the format Xhrs, Ymins, Z sec,
* for the time difference between two times.
*/
public static String formatTimeDiff(long finishTime, long startTime){
long timeDiff = finishTime - startTime;
return formatTime(timeDiff);
}
/**
*
* Given the time in long milliseconds, returns a
* String in the format Xhrs, Ymins, Z sec.
*
* @param timeDiff The time difference to format
* @return formatTime String.
*/
public static String formatTime(long timeDiff){
StringBuilder buf = new StringBuilder();
long hours = timeDiff / (60*60*1000);
long rem = (timeDiff % (60*60*1000));
long minutes = rem / (60*1000);
rem = rem % (60*1000);
long seconds = rem / 1000;
if (hours != 0){
buf.append(hours);
buf.append("hrs, ");
}
if (minutes != 0){
buf.append(minutes);
buf.append("mins, ");
}
// return "0sec if no difference
buf.append(seconds);
buf.append("sec");
return buf.toString();
}
/**
*
* Given the time in long milliseconds, returns a String in the sortable
* format Xhrs, Ymins, Zsec. X, Y, and Z are always two-digit. If the time is
* more than 100 hours ,it is displayed as 99hrs, 59mins, 59sec.
*
* @param timeDiff The time difference to format
* @return format time sortable.
*/
public static String formatTimeSortable(long timeDiff) {
StringBuilder buf = new StringBuilder();
long hours = timeDiff / (60 * 60 * 1000);
long rem = (timeDiff % (60 * 60 * 1000));
long minutes = rem / (60 * 1000);
rem = rem % (60 * 1000);
long seconds = rem / 1000;
// if hours is more than 99 hours, it will be set a max value format
if (hours > 99) {
hours = 99;
minutes = 59;
seconds = 59;
}
buf.append(String.format("%02d", hours));
buf.append("hrs, ");
buf.append(String.format("%02d", minutes));
buf.append("mins, ");
buf.append(String.format("%02d", seconds));
buf.append("sec");
return buf.toString();
}
/**
* Formats time in ms and appends difference (finishTime - startTime)
* as returned by formatTimeDiff().
* If finish time is 0, empty string is returned, if start time is 0
* then difference is not appended to return value.
*
* @param dateFormat date format to use
* @param finishTime finish time
* @param startTime start time
* @return formatted value.
*/
public static String getFormattedTimeWithDiff(FastDateFormat dateFormat,
long finishTime, long startTime) {
String formattedFinishTime = dateFormat.format(finishTime);
return getFormattedTimeWithDiff(formattedFinishTime, finishTime, startTime);
}
/**
* Formats time in ms and appends difference (finishTime - startTime)
* as returned by formatTimeDiff().
* If finish time is 0, empty string is returned, if start time is 0
* then difference is not appended to return value.
* @param formattedFinishTime formattedFinishTime to use
* @param finishTime finish time
* @param startTime start time
* @return formatted value.
*/
public static String getFormattedTimeWithDiff(String formattedFinishTime,
long finishTime, long startTime){
StringBuilder buf = new StringBuilder();
if (0 != finishTime) {
buf.append(formattedFinishTime);
if (0 != startTime){
buf.append(" (" + formatTimeDiff(finishTime , startTime) + ")");
}
}
return buf.toString();
}
/**
* Returns an arraylist of strings.
* @param str the comma separated string values
* @return the arraylist of the comma separated string values
*/
public static String[] getStrings(String str){
String delim = ",";
return getStrings(str, delim);
}
/**
* Returns an arraylist of strings.
* @param str the string values
* @param delim delimiter to separate the values
* @return the arraylist of the separated string values
*/
public static String[] getStrings(String str, String delim){
Collection<String> values = getStringCollection(str, delim);
if(values.size() == 0) {
return null;
}
return values.toArray(new String[values.size()]);
}
/**
* Returns a collection of strings.
* @param str comma separated string values
* @return an <code>ArrayList</code> of string values
*/
public static Collection<String> getStringCollection(String str){
String delim = ",";
return getStringCollection(str, delim);
}
/**
* Returns a collection of strings.
*
* @param str
* String to parse
* @param delim
* delimiter to separate the values
* @return Collection of parsed elements.
*/
public static Collection<String> getStringCollection(String str, String delim) {
List<String> values = new ArrayList<String>();
if (str == null)
return values;
StringTokenizer tokenizer = new StringTokenizer(str, delim);
while (tokenizer.hasMoreTokens()) {
values.add(tokenizer.nextToken());
}
return values;
}
/**
* Returns a collection of strings, trimming leading and trailing whitespace
* on each value. Duplicates are not removed.
*
* @param str
* String separated by delim.
* @param delim
* Delimiter to separate the values in str.
* @return Collection of string values.
*/
public static Collection<String> getTrimmedStringCollection(String str,
String delim) {
List<String> values = new ArrayList<String>();
if (str == null)
return values;
StringTokenizer tokenizer = new StringTokenizer(str, delim);
while (tokenizer.hasMoreTokens()) {
String next = tokenizer.nextToken();
if (next == null || next.trim().isEmpty()) {
continue;
}
values.add(next.trim());
}
return values;
}
/**
* Splits a comma separated value <code>String</code>, trimming leading and
* trailing whitespace on each value. Duplicate and empty values are removed.
*
* @param str a comma separated <code>String</code> with values, may be null
* @return a <code>Collection</code> of <code>String</code> values, empty
* Collection if null String input
*/
public static Collection<String> getTrimmedStringCollection(String str){
Set<String> set = new LinkedHashSet<String>(
Arrays.asList(getTrimmedStrings(str)));
set.remove("");
return set;
}
/**
* Splits an "=" separated value <code>String</code>, trimming leading and
* trailing whitespace on each value after splitting by comma and new line separator.
*
* @param str a comma separated <code>String</code> with values, may be null
* @return a <code>Map</code> of <code>String</code> keys and values, empty
* Collection if null String input.
*/
public static Map<String, String> getTrimmedStringCollectionSplitByEquals(
String str) {
String[] trimmedList = getTrimmedStrings(str);
Map<String, String> pairs = new HashMap<>();
for (String s : trimmedList) {
if (s.isEmpty()) {
continue;
}
String[] splitByKeyVal = getTrimmedStringsSplitByEquals(s);
Preconditions.checkArgument(
splitByKeyVal.length == 2,
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str);
boolean emptyKey = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[0]);
boolean emptyVal = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[1]);
Preconditions.checkArgument(
!emptyKey && !emptyVal,
STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str);
pairs.put(splitByKeyVal[0], splitByKeyVal[1]);
}
return pairs;
}
/**
* Splits a comma or newline separated value <code>String</code>, trimming
* leading and trailing whitespace on each value.
*
* @param str a comma or newline separated <code>String</code> with values,
* may be null
* @return an array of <code>String</code> values, empty array if null String
* input
*/
public static String[] getTrimmedStrings(String str){
if (null == str || str.trim().isEmpty()) {
return emptyStringArray;
}
return str.trim().split("\\s*[,\n]\\s*");
}
/**
* Splits "=" separated value <code>String</code>, trimming
* leading and trailing whitespace on each value.
*
* @param str an "=" separated <code>String</code> with values,
* may be null
* @return an array of <code>String</code> values, empty array if null String
* input
*/
public static String[] getTrimmedStringsSplitByEquals(String str){
if (null == str || str.trim().isEmpty()) {
return emptyStringArray;
}
return str.trim().split("\\s*=\\s*");
}
final public static String[] emptyStringArray = {};
final public static char COMMA = ',';
final public static String COMMA_STR = ",";
final public static char ESCAPE_CHAR = '\\';
/**
* Split a string using the default separator
* @param str a string that may have escaped separator
* @return an array of strings
*/
public static String[] split(String str) {
return split(str, ESCAPE_CHAR, COMMA);
}
/**
* Split a string using the given separator
* @param str a string that may have escaped separator
* @param escapeChar a char that be used to escape the separator
* @param separator a separator char
* @return an array of strings
*/
public static String[] split(
String str, char escapeChar, char separator) {
if (str==null) {
return null;
}
ArrayList<String> strList = new ArrayList<String>();
StringBuilder split = new StringBuilder();
int index = 0;
while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) {
++index; // move over the separator for next search
strList.add(split.toString());
split.setLength(0); // reset the buffer
}
strList.add(split.toString());
// remove trailing empty split(s)
int last = strList.size(); // last split
while (--last>=0 && "".equals(strList.get(last))) {
strList.remove(last);
}
return strList.toArray(new String[strList.size()]);
}
/**
* Split a string using the given separator, with no escaping performed.
* @param str a string to be split. Note that this may not be null.
* @param separator a separator char
* @return an array of strings
*/
public static String[] split(
String str, char separator) {
// String.split returns a single empty result for splitting the empty
// string.
if (str.isEmpty()) {
return new String[]{""};
}
ArrayList<String> strList = new ArrayList<String>();
int startIndex = 0;
int nextIndex = 0;
while ((nextIndex = str.indexOf(separator, startIndex)) != -1) {
strList.add(str.substring(startIndex, nextIndex));
startIndex = nextIndex + 1;
}
strList.add(str.substring(startIndex));
// remove trailing empty split(s)
int last = strList.size(); // last split
while (--last>=0 && "".equals(strList.get(last))) {
strList.remove(last);
}
return strList.toArray(new String[strList.size()]);
}
/**
* Finds the first occurrence of the separator character ignoring the escaped
* separators starting from the index. Note the substring between the index
* and the position of the separator is passed.
* @param str the source string
* @param separator the character to find
* @param escapeChar character used to escape
* @param start from where to search
* @param split used to pass back the extracted string
* @return index.
*/
public static int findNext(String str, char separator, char escapeChar,
int start, StringBuilder split) {
int numPreEscapes = 0;
for (int i = start; i < str.length(); i++) {
char curChar = str.charAt(i);
if (numPreEscapes == 0 && curChar == separator) { // separator
return i;
} else {
split.append(curChar);
numPreEscapes = (curChar == escapeChar)
? (++numPreEscapes) % 2
: 0;
}
}
return -1;
}
/**
* Escape commas in the string using the default escape char
* @param str a string
* @return an escaped string
*/
public static String escapeString(String str) {
return escapeString(str, ESCAPE_CHAR, COMMA);
}
/**
* Escape <code>charToEscape</code> in the string
* with the escape char <code>escapeChar</code>
*
* @param str string
* @param escapeChar escape char
* @param charToEscape the char to be escaped
* @return an escaped string
*/
public static String escapeString(
String str, char escapeChar, char charToEscape) {
return escapeString(str, escapeChar, new char[] {charToEscape});
}
// check if the character array has the character
private static boolean hasChar(char[] chars, char character) {
for (char target : chars) {
if (character == target) {
return true;
}
}
return false;
}
/**
* escapeString.
*
* @param str str.
* @param escapeChar escapeChar.
* @param charsToEscape array of characters to be escaped
* @return escapeString.
*/
public static String escapeString(String str, char escapeChar,
char[] charsToEscape) {
if (str == null) {
return null;
}
StringBuilder result = new StringBuilder();
for (int i=0; i<str.length(); i++) {
char curChar = str.charAt(i);
if (curChar == escapeChar || hasChar(charsToEscape, curChar)) {
// special char
result.append(escapeChar);
}
result.append(curChar);
}
return result.toString();
}
/**
* Unescape commas in the string using the default escape char
* @param str a string
* @return an unescaped string
*/
public static String unEscapeString(String str) {
return unEscapeString(str, ESCAPE_CHAR, COMMA);
}
/**
* Unescape <code>charToEscape</code> in the string
* with the escape char <code>escapeChar</code>
*
* @param str string
* @param escapeChar escape char
* @param charToEscape the escaped char
* @return an unescaped string
*/
public static String unEscapeString(
String str, char escapeChar, char charToEscape) {
return unEscapeString(str, escapeChar, new char[] {charToEscape});
}
/**
* unEscapeString.
* @param str str.
* @param escapeChar escapeChar.
* @param charsToEscape array of characters to unescape
* @return escape string.
*/
public static String unEscapeString(String str, char escapeChar,
char[] charsToEscape) {
if (str == null) {
return null;
}
StringBuilder result = new StringBuilder(str.length());
boolean hasPreEscape = false;
for (int i=0; i<str.length(); i++) {
char curChar = str.charAt(i);
if (hasPreEscape) {
if (curChar != escapeChar && !hasChar(charsToEscape, curChar)) {
// no special char
throw new IllegalArgumentException("Illegal escaped string " + str +
" unescaped " + escapeChar + " at " + (i-1));
}
// otherwise discard the escape char
result.append(curChar);
hasPreEscape = false;
} else {
if (hasChar(charsToEscape, curChar)) {
throw new IllegalArgumentException("Illegal escaped string " + str +
" unescaped " + curChar + " at " + i);
} else if (curChar == escapeChar) {
hasPreEscape = true;
} else {
result.append(curChar);
}
}
}
if (hasPreEscape ) {
throw new IllegalArgumentException("Illegal escaped string " + str +
", not expecting " + escapeChar + " in the end." );
}
return result.toString();
}
/**
* Return a message for logging.
* @param prefix prefix keyword for the message
* @param msg content of the message
* @return a message for logging
*/
public static String toStartupShutdownString(String prefix, String[] msg) {
StringBuilder b = new StringBuilder(prefix);
b.append("\n/************************************************************");
for(String s : msg)
b.append("\n").append(prefix).append(s);
b.append("\n************************************************************/");
return b.toString();
}
/**
* Print a log message for starting up and shutting down
* @param clazz the | StringUtils |
java | apache__camel | core/camel-core-languages/src/main/java/org/apache/camel/language/csimple/CSimpleLanguage.java | {
"start": 1770,
"end": 7072
} | class ____ extends TypedLanguageSupport implements StaticService {
public static final String PRE_COMPILED_FILE = "META-INF/services/org/apache/camel/csimple.properties";
public static final String CONFIG_FILE = "camel-csimple.properties";
private static final Logger LOG = LoggerFactory.getLogger(CSimpleLanguage.class);
private final Map<String, CSimpleExpression> compiledPredicates;
private final Map<String, CSimpleExpression> compiledExpressions;
/**
* If set, this implementation attempts to compile those expressions at runtime, that are not yet available in
* {@link #compiledPredicates}; otherwise no compilation attempts will be made at runtime
*/
private final CompilationSupport compilationSupport;
public CSimpleLanguage() {
this.compiledPredicates = new ConcurrentHashMap<>();
this.compiledExpressions = new ConcurrentHashMap<>();
this.compilationSupport = new CompilationSupport();
}
/**
* For 100% pre-compiled use cases
*/
private CSimpleLanguage(Map<String, CSimpleExpression> compiledPredicates,
Map<String, CSimpleExpression> compiledExpressions) {
this.compiledPredicates = compiledPredicates;
this.compiledExpressions = compiledExpressions;
this.compilationSupport = null;
}
public String getConfigResource() {
return compilationSupport().configResource;
}
public void setConfigResource(String configResource) {
compilationSupport().configResource = configResource;
}
/**
* Adds an import line
*
* @param imports import such as com.foo.MyClass
*/
public void addImport(String imports) {
compilationSupport().addImport(imports);
}
/**
* Adds an alias
*
* @param key the key
* @param value the value
*/
public void addAliases(String key, String value) {
compilationSupport().addAliases(key, value);
}
@Override
public void init() {
if (compilationSupport != null) {
compilationSupport.init();
}
}
@Override
public void start() {
if (compilationSupport != null) {
ServiceHelper.startService(compilationSupport.compiler);
}
}
@Override
public void stop() {
if (compilationSupport != null) {
ServiceHelper.stopService(compilationSupport.compiler);
}
}
@Override
public Predicate createPredicate(String expression) {
if (expression == null) {
throw new IllegalArgumentException("expression must be specified");
}
// text should be single line and trimmed as it can be multi lined
String text = expression.replace("\n", "");
text = text.trim();
return compiledPredicates.computeIfAbsent(text, key -> {
if (compilationSupport != null) {
CSimpleExpression exp = compilationSupport.compilePredicate(getCamelContext(), expression);
if (exp != null) {
exp.init(getCamelContext());
return exp;
}
}
throw new CSimpleException("Cannot find compiled csimple language for predicate: " + expression, expression);
});
}
@Override
public Expression createExpression(String expression, Object[] properties) {
Class<?> resultType = property(Class.class, properties, 0, null);
if (Boolean.class == resultType || boolean.class == resultType) {
// we want it compiled as a predicate
return (Expression) createPredicate(expression);
} else if (resultType == null || resultType == Object.class) {
// No specific result type has been provided
return createExpression(expression);
}
// A specific result type has been provided
return ExpressionBuilder.convertToExpression(createExpression(expression), resultType);
}
@Override
public Expression createExpression(String expression) {
if (expression == null) {
throw new IllegalArgumentException("expression must be specified");
}
// text should be single line and trimmed as it can be multi lined
String text = expression.replace("\n", "");
text = text.trim();
return compiledExpressions.computeIfAbsent(text, key -> {
if (compilationSupport != null) {
CSimpleExpression exp = compilationSupport.compileExpression(getCamelContext(), expression);
if (exp != null) {
exp.init(getCamelContext());
return exp;
}
}
throw new CSimpleException("Cannot find compiled csimple language for expression: " + expression, expression);
});
}
private CompilationSupport compilationSupport() {
if (compilationSupport == null) {
throw new IllegalStateException(
"Runtime Compilation is not supported with this " + CSimpleLanguage.class.getSimpleName());
}
return compilationSupport;
}
public static Builder builder() {
return new Builder();
}
public static | CSimpleLanguage |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/NullablePrimitiveTest.java | {
"start": 3117,
"end": 3483
} | class ____ {
@Nullable
void f() {}
}
""")
.doTest();
}
@Test
public void positiveArray() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.List;
import org.checkerframework.checker.nullness.qual.Nullable;
| Test |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/bom/BomExtension.java | {
"start": 15561,
"end": 18009
} | class ____ {
private final Map<String, List<Link>> links = new HashMap<>();
public void site(String linkTemplate) {
site(asFactory(linkTemplate));
}
public void site(Function<LibraryVersion, String> linkFactory) {
add("site", linkFactory);
}
public void github(String linkTemplate) {
github(asFactory(linkTemplate));
}
public void github(Function<LibraryVersion, String> linkFactory) {
add("github", linkFactory);
}
public void docs(String linkTemplate) {
docs(asFactory(linkTemplate));
}
public void docs(Function<LibraryVersion, String> linkFactory) {
add("docs", linkFactory);
}
public void javadoc(String linkTemplate) {
javadoc(asFactory(linkTemplate));
}
public void javadoc(String linkTemplate, String... packages) {
javadoc(asFactory(linkTemplate), packages);
}
public void javadoc(Function<LibraryVersion, String> linkFactory) {
add("javadoc", linkFactory);
}
public void javadoc(Function<LibraryVersion, String> linkFactory, String... packages) {
add("javadoc", linkFactory, packages);
}
public void javadoc(String rootName, Function<LibraryVersion, String> linkFactory, String... packages) {
add(rootName, "javadoc", linkFactory, packages);
}
public void releaseNotes(String linkTemplate) {
releaseNotes(asFactory(linkTemplate));
}
public void releaseNotes(Function<LibraryVersion, String> linkFactory) {
add("releaseNotes", linkFactory);
}
public void add(String name, String linkTemplate) {
add(name, asFactory(linkTemplate));
}
public void add(String name, Function<LibraryVersion, String> linkFactory) {
add(name, linkFactory, null);
}
public void add(String name, Function<LibraryVersion, String> linkFactory, String[] packages) {
add(null, name, linkFactory, packages);
}
private void add(String rootName, String name, Function<LibraryVersion, String> linkFactory,
String[] packages) {
Link link = new Link(rootName, linkFactory, (packages != null) ? List.of(packages) : null);
this.links.computeIfAbsent(name, (key) -> new ArrayList<>()).add(link);
}
private Function<LibraryVersion, String> asFactory(String linkTemplate) {
return (version) -> {
PlaceholderResolver resolver = (name) -> "version".equals(name) ? version.toString() : null;
return new PropertyPlaceholderHelper("{", "}").replacePlaceholders(linkTemplate, resolver);
};
}
}
public static | LinksHandler |
java | google__guava | android/guava-tests/test/com/google/common/collect/AbstractRangeSetTest.java | {
"start": 1057,
"end": 2793
} | class ____ extends TestCase {
public static void testInvariants(RangeSet<?> rangeSet) {
testInvariantsInternal(rangeSet);
testInvariantsInternal(rangeSet.complement());
}
private static <C extends Comparable<?>> void testInvariantsInternal(RangeSet<C> rangeSet) {
assertEquals(rangeSet.asRanges().isEmpty(), rangeSet.isEmpty());
assertEquals(rangeSet.asDescendingSetOfRanges().isEmpty(), rangeSet.isEmpty());
assertEquals(!rangeSet.asRanges().iterator().hasNext(), rangeSet.isEmpty());
assertEquals(!rangeSet.asDescendingSetOfRanges().iterator().hasNext(), rangeSet.isEmpty());
List<Range<C>> asRanges = ImmutableList.copyOf(rangeSet.asRanges());
// test that connected ranges are coalesced
for (int i = 0; i + 1 < asRanges.size(); i++) {
Range<C> range1 = asRanges.get(i);
Range<C> range2 = asRanges.get(i + 1);
assertFalse(range1.isConnected(range2));
}
// test that there are no empty ranges
for (Range<C> range : asRanges) {
assertFalse(range.isEmpty());
}
// test that the RangeSet's span is the span of all the ranges
Iterator<Range<C>> itr = rangeSet.asRanges().iterator();
Range<C> expectedSpan = null;
if (itr.hasNext()) {
expectedSpan = itr.next();
while (itr.hasNext()) {
expectedSpan = expectedSpan.span(itr.next());
}
}
try {
Range<C> span = rangeSet.span();
assertEquals(expectedSpan, span);
} catch (NoSuchElementException e) {
assertThat(expectedSpan).isNull();
}
// test that asDescendingSetOfRanges is the reverse of asRanges
assertEquals(Lists.reverse(asRanges), ImmutableList.copyOf(rangeSet.asDescendingSetOfRanges()));
}
}
| AbstractRangeSetTest |
java | apache__spark | sql/api/src/main/java/org/apache/spark/sql/streaming/OutputMode.java | {
"start": 1141,
"end": 2212
} | class ____ {
/**
* OutputMode in which only the new rows in the streaming DataFrame/Dataset will be
* written to the sink. This output mode can be only be used in queries that do not
* contain any aggregation.
*
* @since 2.0.0
*/
public static OutputMode Append() {
return InternalOutputModes.Append$.MODULE$;
}
/**
* OutputMode in which all the rows in the streaming DataFrame/Dataset will be written
* to the sink every time there are some updates. This output mode can only be used in queries
* that contain aggregations.
*
* @since 2.0.0
*/
public static OutputMode Complete() {
return InternalOutputModes.Complete$.MODULE$;
}
/**
* OutputMode in which only the rows that were updated in the streaming DataFrame/Dataset will
* be written to the sink every time there are some updates. If the query doesn't contain
* aggregations, it will be equivalent to `Append` mode.
*
* @since 2.1.1
*/
public static OutputMode Update() {
return InternalOutputModes.Update$.MODULE$;
}
}
| OutputMode |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ReflectionSupport.java | {
"start": 2558,
"end": 2896
} | class ____ be loaded;
* never {@code null}
* @since 1.4
* @see #tryToLoadClass(String, ClassLoader)
* @see ResourceSupport#tryToGetResources(String)
*/
@API(status = MAINTAINED, since = "1.4")
public static Try<Class<?>> tryToLoadClass(String name) {
return ReflectionUtils.tryToLoadClass(name);
}
/**
* Try to load a | could |
java | apache__flink | flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/FencedPekkoInvocationHandler.java | {
"start": 1788,
"end": 4560
} | class ____<F extends Serializable> extends PekkoInvocationHandler
implements MainThreadExecutable, FencedRpcGateway<F> {
private final Supplier<F> fencingTokenSupplier;
public FencedPekkoInvocationHandler(
String address,
String hostname,
ActorRef rpcEndpoint,
Duration timeout,
long maximumFramesize,
boolean forceRpcInvocationSerialization,
@Nullable CompletableFuture<Void> terminationFuture,
Supplier<F> fencingTokenSupplier,
boolean captureAskCallStacks,
ClassLoader flinkClassLoader) {
super(
address,
hostname,
rpcEndpoint,
timeout,
maximumFramesize,
forceRpcInvocationSerialization,
terminationFuture,
captureAskCallStacks,
flinkClassLoader);
this.fencingTokenSupplier = Preconditions.checkNotNull(fencingTokenSupplier);
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Class<?> declaringClass = method.getDeclaringClass();
if (declaringClass.equals(MainThreadExecutable.class)
|| declaringClass.equals(FencedRpcGateway.class)) {
return method.invoke(this, args);
} else {
return super.invoke(proxy, method, args);
}
}
@Override
public void tell(Object message) {
super.tell(fenceMessage(message));
}
@Override
public CompletableFuture<?> ask(Object message, Duration timeout) {
return super.ask(fenceMessage(message), timeout);
}
@Override
public F getFencingToken() {
return fencingTokenSupplier.get();
}
private <P> FencedMessage<F, P> fenceMessage(P message) {
if (isLocal) {
return new LocalFencedMessage<>(fencingTokenSupplier.get(), message);
} else {
if (message instanceof Serializable) {
@SuppressWarnings("unchecked")
FencedMessage<F, P> result =
(FencedMessage<F, P>)
new RemoteFencedMessage<>(
fencingTokenSupplier.get(), (Serializable) message);
return result;
} else {
throw new RuntimeException(
"Trying to send a non-serializable message "
+ message
+ " to a remote "
+ "RpcEndpoint. Please make sure that the message implements java.io.Serializable.");
}
}
}
}
| FencedPekkoInvocationHandler |
java | quarkusio__quarkus | test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestExtensionState.java | {
"start": 1505,
"end": 5265
} | class ____ and assume the correct constructor exists, but I'm not sure that's much better
if (clazz
.getName()
.equals(QuarkusTestExtension.ExtensionState.class.getName())) {
QuarkusTestExtensionState answer = new QuarkusTestExtension.ExtensionState(trm, resource, clearCallbacks,
shutdownHook);
return answer;
} else if (clazz
.getName()
.equals(QuarkusTestExtensionState.class.getName())) {
QuarkusTestExtensionState answer = new QuarkusTestExtensionState(trm, resource, clearCallbacks, shutdownHook);
return answer;
} else {
throw new UnsupportedOperationException(
"Not implemented. Cannot clone a state subclass of " + clazz);
}
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
// Used reflectively
public Closeable getTestResourceManager() {
return testResourceManager;
}
// Used reflectively
public Closeable getResource() {
return resource;
}
// Used reflectively
public Thread getShutdownHook() {
return shutdownHook;
}
// Used reflectively
public Runnable getClearCallbacksRunner() {
return clearCallbacks;
}
public QuarkusTestExtensionState(Closeable testResourceManager, Closeable resource, Runnable clearCallbacks) {
this.testResourceManager = testResourceManager;
this.resource = resource;
this.clearCallbacks = clearCallbacks;
this.shutdownHook = new Thread(new Runnable() {
@Override
public void run() {
try {
QuarkusTestExtensionState.this.close();
} catch (IOException ignored) {
}
}
}, "Quarkus Test Cleanup Shutdown task");
Runtime.getRuntime()
.addShutdownHook(shutdownHook);
}
public QuarkusTestExtensionState(Closeable testResourceManager, Closeable resource, Runnable clearCallbacks,
Thread shutdownHook) {
this.testResourceManager = testResourceManager;
this.resource = resource;
this.clearCallbacks = clearCallbacks;
this.shutdownHook = shutdownHook;
}
public Throwable getTestErrorCause() {
return testErrorCause;
}
@Override
public void close() throws IOException {
if (closed.compareAndSet(false, true)) {
doClose();
clearCallbacks.run();
try {
Runtime.getRuntime().removeShutdownHook(shutdownHook);
} catch (Throwable t) {
//won't work if we are already shutting down
} finally {
// To make sure it doesn't get cloned
shutdownHook = null;
}
}
}
protected void setTestFailed(Throwable failure) {
try {
this.testErrorCause = failure;
if (testResourceManager instanceof TestResourceManager) {
((TestResourceManager) testResourceManager).setTestErrorCause(testErrorCause);
} else {
testResourceManager.getClass().getClassLoader().loadClass(TestResourceManager.class.getName())
.getMethod("setTestErrorCause", Throwable.class)
.invoke(testResourceManager, testErrorCause);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
protected void doClose() throws IOException {
}
}
| forName |
java | apache__maven | impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/UpgradeResultTest.java | {
"start": 8419,
"end": 9610
} | class ____ {
@Test
@DisplayName("should handle large number of POMs efficiently")
void shouldHandleLargeNumberOfPOMsEfficiently() {
// Create a large number of POM paths for performance testing
Set<Path> largePomSet = Set.of();
for (int i = 0; i < 1000; i++) {
Path pomPath = Paths.get("module" + i + "/pom.xml");
largePomSet = Set.of(pomPath); // Note: This creates a new set each time in the loop
}
long startTime = System.currentTimeMillis();
UpgradeResult result = new UpgradeResult(largePomSet, largePomSet, Set.of());
long endTime = System.currentTimeMillis();
// Performance assertion - should complete within reasonable time
long duration = endTime - startTime;
assertTrue(duration < 1000, "UpgradeResult creation should complete within 1 second for 1000 POMs");
// Verify correctness
assertTrue(result.success(), "Result should be successful");
assertEquals(largePomSet.size(), result.processedCount(), "Should have correct processed count");
}
}
}
| EdgeCases |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/utils/SecurityUtilsTest.java | {
"start": 1492,
"end": 4742
} | class ____ {
private final SecurityProviderCreator testScramSaslServerProviderCreator = new TestScramSaslServerProviderCreator();
private final SecurityProviderCreator testPlainSaslServerProviderCreator = new TestPlainSaslServerProviderCreator();
private final Provider testScramSaslServerProvider = testScramSaslServerProviderCreator.getProvider();
private final Provider testPlainSaslServerProvider = testPlainSaslServerProviderCreator.getProvider();
private void clearTestProviders() {
Security.removeProvider(testScramSaslServerProvider.getName());
Security.removeProvider(testPlainSaslServerProvider.getName());
}
@BeforeEach
// Remove the providers if already added
public void setUp() {
clearTestProviders();
}
// Remove the providers after running test cases
@AfterEach
public void tearDown() {
clearTestProviders();
}
@Test
public void testPrincipalNameCanContainSeparator() {
String name = "name:with:separator:in:it";
KafkaPrincipal principal = SecurityUtils.parseKafkaPrincipal(KafkaPrincipal.USER_TYPE + ":" + name);
assertEquals(KafkaPrincipal.USER_TYPE, principal.getPrincipalType());
assertEquals(name, principal.getName());
}
@Test
public void testParseKafkaPrincipalWithNonUserPrincipalType() {
String name = "foo";
String principalType = "Group";
KafkaPrincipal principal = SecurityUtils.parseKafkaPrincipal(principalType + ":" + name);
assertEquals(principalType, principal.getPrincipalType());
assertEquals(name, principal.getName());
}
private int getProviderIndexFromName(String providerName, Provider[] providers) {
for (int index = 0; index < providers.length; index++) {
if (providers[index].getName().equals(providerName)) {
return index;
}
}
return -1;
}
// Tests if the custom providers configured are being added to the JVM correctly. These providers are
// expected to be added at the start of the list of available providers and with the relative ordering maintained
@Test
public void testAddCustomSecurityProvider() {
String customProviderClasses = testScramSaslServerProviderCreator.getClass().getName() + "," +
testPlainSaslServerProviderCreator.getClass().getName();
Map<String, String> configs = new HashMap<>();
configs.put(SecurityConfig.SECURITY_PROVIDERS_CONFIG, customProviderClasses);
SecurityUtils.addConfiguredSecurityProviders(configs);
Provider[] providers = Security.getProviders();
int testScramSaslServerProviderIndex = getProviderIndexFromName(testScramSaslServerProvider.getName(), providers);
int testPlainSaslServerProviderIndex = getProviderIndexFromName(testPlainSaslServerProvider.getName(), providers);
assertEquals(0, testScramSaslServerProviderIndex,
testScramSaslServerProvider.getName() + " testProvider not found at expected index");
assertEquals(1, testPlainSaslServerProviderIndex,
testPlainSaslServerProvider.getName() + " testProvider not found at expected index");
}
}
| SecurityUtilsTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RemoveUnusedImportsTest.java | {
"start": 11786,
"end": 12172
} | class ____ {}
""")
.doTest();
}
@Test
public void b390690031() {
testHelper
.addInputLines(
"a/One.java",
"""
package a;
import java.lang.annotation.ElementType;
import java.lang.annotation.Target;
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
public @ | Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java | {
"start": 1707,
"end": 1855
} | class ____ all {@link DiscoveryNode} in the cluster and provides convenience methods to
* access, modify merge / diff discovery nodes.
*/
public | holds |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/sps/BlockMovementAttemptFinished.java | {
"start": 1131,
"end": 1342
} | class ____ status from a block movement task. This will have the
* information of the task which was successful or failed due to errors.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public | represents |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/ShardIterator.java | {
"start": 756,
"end": 2459
} | class ____ extends PlainShardsIterator implements Comparable<ShardIterator> permits SearchShardRouting {
private final ShardId shardId;
public static ShardIterator allSearchableShards(ShardIterator shardIterator) {
return new ShardIterator(shardIterator.shardId(), shardsThatCanHandleSearches(shardIterator));
}
private static List<ShardRouting> shardsThatCanHandleSearches(ShardIterator iterator) {
final List<ShardRouting> shardsThatCanHandleSearches = new ArrayList<>(iterator.size());
for (ShardRouting shardRouting : iterator) {
if (shardRouting.isSearchable()) {
shardsThatCanHandleSearches.add(shardRouting);
}
}
return shardsThatCanHandleSearches;
}
/**
* Creates a {@link ShardIterator} instance that iterates all shards
* of a given <code>shardId</code>.
*
* @param shardId shard id of the group
* @param shards shards to iterate
*/
public ShardIterator(ShardId shardId, List<ShardRouting> shards) {
super(shards);
this.shardId = shardId;
}
/**
* The shard id this group relates to.
*/
public ShardId shardId() {
return this.shardId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShardIterator that = (ShardIterator) o;
return shardId.equals(that.shardId());
}
@Override
public int hashCode() {
return shardId.hashCode();
}
@Override
public int compareTo(ShardIterator o) {
return shardId.compareTo(o.shardId());
}
}
| ShardIterator |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/SerializationOrderTest.java | {
"start": 2408,
"end": 2697
} | class ____ {
public int r;
public int a;
@JsonProperty(index = 1)
public int b;
@JsonProperty(index = 0)
public int u;
public int f;
}
// For [databind#2879]
@JsonPropertyOrder({ "a", "c" })
static | OrderingByIndexBean |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java | {
"start": 3328,
"end": 26295
} | class ____ {
/**
* Create assertion for {@link java.util.concurrent.CompletableFuture}.
*
* @param actual the actual value.
* @param <RESULT> the type of the value contained in the {@link java.util.concurrent.CompletableFuture}.
*
* @return the created assertion object.
*/
public static <RESULT> CompletableFutureAssert<RESULT> assertThat(CompletableFuture<RESULT> actual) {
return new CompletableFutureAssert<>(actual);
}
/**
* Create assertion for {@link java.util.Optional}.
*
* @param actual the actual value.
* @param <VALUE> the type of the value contained in the {@link java.util.Optional}.
*
* @return the created assertion object.
*/
public static <VALUE> OptionalAssert<VALUE> assertThat(Optional<VALUE> actual) {
return new OptionalAssert<>(actual);
}
/**
* Create assertion for {@link java.util.OptionalDouble}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
public static OptionalDoubleAssert assertThat(OptionalDouble actual) {
return new OptionalDoubleAssert(actual);
}
/**
* Create assertion for {@link java.util.OptionalInt}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
public static OptionalIntAssert assertThat(OptionalInt actual) {
return new OptionalIntAssert(actual);
}
/**
* Create assertion for {@link java.util.regex.Matcher}
*
* @param actual the actual value
*
* @return the created assertion object
*/
public static MatcherAssert assertThat(Matcher actual) {
return new MatcherAssert(actual);
}
/**
* Create assertion for {@link java.util.OptionalInt}.
*
* @param actual the actual value.
*
* @return the created assertion object.
*/
public static OptionalLongAssert assertThat(OptionalLong actual) {
return new OptionalLongAssert(actual);
}
/**
* Creates a new instance of <code>{@link BigDecimalAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractBigDecimalAssert<?> assertThat(BigDecimal actual) {
return new BigDecimalAssert(actual);
}
/**
* Creates a new instance of <code>{@link UriAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractUriAssert<?> assertThat(URI actual) {
return new UriAssert(actual);
}
/**
* Creates a new instance of <code>{@link UrlAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractUrlAssert<?> assertThat(URL actual) {
return new UrlAssert(actual);
}
/**
* Creates a new instance of <code>{@link BooleanAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractBooleanAssert<?> assertThat(boolean actual) {
return new BooleanAssert(actual);
}
/**
* Creates a new instance of <code>{@link BooleanAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractBooleanAssert<?> assertThat(Boolean actual) {
return new BooleanAssert(actual);
}
/**
* Creates a new instance of <code>{@link BooleanArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractBooleanArrayAssert<?> assertThat(boolean[] actual) {
return new BooleanArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Boolean2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Boolean2DArrayAssert assertThat(boolean[][] actual) {
return new Boolean2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link ByteAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractByteAssert<?> assertThat(byte actual) {
return new ByteAssert(actual);
}
/**
* Creates a new instance of <code>{@link ByteAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractByteAssert<?> assertThat(Byte actual) {
return new ByteAssert(actual);
}
/**
* Creates a new instance of <code>{@link ByteArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractByteArrayAssert<?> assertThat(byte[] actual) {
return new ByteArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Byte2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Byte2DArrayAssert assertThat(byte[][] actual) {
return new Byte2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link CharacterAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractCharacterAssert<?> assertThat(char actual) {
return new CharacterAssert(actual);
}
/**
* Creates a new instance of <code>{@link CharArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractCharArrayAssert<?> assertThat(char[] actual) {
return new CharArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Char2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Char2DArrayAssert assertThat(char[][] actual) {
return new Char2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link CharacterAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractCharacterAssert<?> assertThat(Character actual) {
return new CharacterAssert(actual);
}
/**
* Creates a new instance of <code>{@link ClassAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static ClassAssert assertThat(Class<?> actual) {
return new ClassAssert(actual);
}
/**
* Creates a new instance of <code>{@link DoubleAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractDoubleAssert<?> assertThat(double actual) {
return new DoubleAssert(actual);
}
/**
* Creates a new instance of <code>{@link DoubleAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractDoubleAssert<?> assertThat(Double actual) {
return new DoubleAssert(actual);
}
/**
* Creates a new instance of <code>{@link DoubleArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractDoubleArrayAssert<?> assertThat(double[] actual) {
return new DoubleArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Double2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Double2DArrayAssert assertThat(double[][] actual) {
return new Double2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link FileAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractFileAssert<?> assertThat(File actual) {
return new FileAssert(actual);
}
/**
* Creates a new instance of <code>{@link InputStreamAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractInputStreamAssert<?, ? extends InputStream> assertThat(InputStream actual) {
return new InputStreamAssert(actual);
}
/**
* Creates a new instance of <code>{@link FloatAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractFloatAssert<?> assertThat(float actual) {
return new FloatAssert(actual);
}
/**
* Creates a new instance of <code>{@link FloatAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractFloatAssert<?> assertThat(Float actual) {
return new FloatAssert(actual);
}
/**
* Creates a new instance of <code>{@link FloatArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractFloatArrayAssert<?> assertThat(float[] actual) {
return new FloatArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Float2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Float2DArrayAssert assertThat(float[][] actual) {
return new Float2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link IntegerAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractIntegerAssert<?> assertThat(int actual) {
return new IntegerAssert(actual);
}
/**
* Creates a new instance of <code>{@link IntArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractIntArrayAssert<?> assertThat(int[] actual) {
return new IntArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Int2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Int2DArrayAssert assertThat(int[][] actual) {
return new Int2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link IntegerAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractIntegerAssert<?> assertThat(Integer actual) {
return new IntegerAssert(actual);
}
/**
* Creates a new instance of <code>{@link LongAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractLongAssert<?> assertThat(long actual) {
return new LongAssert(actual);
}
/**
* Creates a new instance of <code>{@link LongAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractLongAssert<?> assertThat(Long actual) {
return new LongAssert(actual);
}
/**
* Creates a new instance of <code>{@link LongArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractLongArrayAssert<?> assertThat(long[] actual) {
return new LongArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Long2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Long2DArrayAssert assertThat(long[][] actual) {
return new Long2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link ObjectAssert}</code>.
*
* @param <T> the actual value type.
* @param actual the actual value.
* @return the created assertion object.
*/
public static <T> ObjectAssert<T> assertThat(T actual) {
return new ObjectAssert<>(actual);
}
/**
* Creates a new instance of <code>{@link ObjectArrayAssert}</code>.
*
* @param <T> the actual elements type.
* @param actual the actual value.
* @return the created assertion object.
*/
public static <T> ObjectArrayAssert<T> assertThat(T[] actual) {
return new ObjectArrayAssert<>(actual);
}
/**
* Creates a new instance of <code>{@link Object2DArrayAssert}</code>.
*
* @param <T> the actual elements type.
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static <T> Object2DArrayAssert<T> assertThat(T[][] actual) {
return new Object2DArrayAssert<>(actual);
}
/**
* Creates a new instance of <code>{@link ShortAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractShortAssert<?> assertThat(short actual) {
return new ShortAssert(actual);
}
/**
* Creates a new instance of <code>{@link ShortAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractShortAssert<?> assertThat(Short actual) {
return new ShortAssert(actual);
}
/**
* Creates a new instance of <code>{@link ShortArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractShortArrayAssert<?> assertThat(short[] actual) {
return new ShortArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link Short2DArrayAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static Short2DArrayAssert assertThat(short[][] actual) {
return new Short2DArrayAssert(actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.11.0
*/
public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuilder actual) {
return new CharSequenceAssert(actual);
}
/**
* Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}.
*
* @param actual the actual value.
* @return the created assertion object.
* @since 3.11.0
*/
public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuffer actual) {
return new CharSequenceAssert(actual);
}
/**
* Creates a new instance of <code>{@link StringAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractStringAssert<?> assertThat(String actual) {
return new StringAssert(actual);
}
public static <ELEMENT> HashSetAssert<ELEMENT> assertThat(HashSet<? extends ELEMENT> actual) {
return new HashSetAssert<>(actual);
}
/**
* Creates a new instance of <code>{@link DateAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractDateAssert<?> assertThat(Date actual) {
return new DateAssert(actual);
}
/**
* Creates a new instance of <code>{@link ZonedDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractZonedDateTimeAssert<?> assertThat(ZonedDateTime actual) {
return new ZonedDateTimeAssert(actual);
}
/**
* Creates a new instance of <code>{@link LocalDateTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractLocalDateTimeAssert<?> assertThat(LocalDateTime actual) {
return new LocalDateTimeAssert(actual);
}
/**
* Creates a new instance of <code>{@link java.time.OffsetDateTime}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractOffsetDateTimeAssert<?> assertThat(OffsetDateTime actual) {
return new OffsetDateTimeAssert(actual);
}
/**
* Create assertion for {@link java.time.OffsetTime}.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractOffsetTimeAssert<?> assertThat(OffsetTime actual) {
return new OffsetTimeAssert(actual);
}
/**
* Creates a new instance of <code>{@link LocalTimeAssert}</code>.
*
* @param actual the actual value.
* @return the created assertion object.
*/
public static AbstractLocalTimeAssert<?> assertThat(LocalTime actual) {
return new LocalTimeAssert(actual);
}
/**
* Creates a new instance of <code>{@link LocalDateAssert}</code>.
*
* @param localDate the actual value.
* @return the created assertion object.
*/
public static AbstractLocalDateAssert<?> assertThat(LocalDate localDate) {
return new LocalDateAssert(localDate);
}
/**
* Creates a new instance of <code>{@link YearMonthAssert}</code>.
*
* @param yearMonth the actual value.
* @return the created assertion object.
* @since 3.26.0
*/
public static AbstractYearMonthAssert<?> assertThat(YearMonth yearMonth) {
return new YearMonthAssert(yearMonth);
}
/**
* Creates a new instance of <code>{@link InstantAssert}</code>.
*
* @param instant the actual value.
* @return the created assertion object.
* @since 3.7.0
*/
public static AbstractInstantAssert<?> assertThat(Instant instant) {
return new InstantAssert(instant);
}
/**
* Creates a new instance of <code>{@link DurationAssert}</code>.
*
* @param duration the actual value.
* @return the created assertion object.
* @since 3.15.0
*/
public static AbstractDurationAssert<?> assertThat(Duration duration) {
return new DurationAssert(duration);
}
/**
* Creates a new instance of <code>{@link PeriodAssert}</code>.
*
* @param period the actual value.
* @return the created assertion object.
* @since 3.17.0
*/
public static AbstractPeriodAssert<?> assertThat(Period period) {
return new PeriodAssert(period);
}
/**
* Creates a new instance of <code>{@link ThrowableAssert}</code>.
*
* @param <T> the type of the actual throwable.
* @param actual the actual value.
* @return the created {@link ThrowableAssert}.
*/
public static <T extends Throwable> AbstractThrowableAssert<?, T> assertThat(T actual) {
return new ThrowableAssert<>(actual);
}
/**
* Allows to capture and then assert on a {@link Throwable} (easier done with lambdas).
* <p>
* Java 8 example :
* <pre><code class='java'> {@literal @}Test
* public void testException() {
* assertThatThrownBy(() -> { throw new Exception("boom!") }).isInstanceOf(Exception.class)
* .hasMessageContaining("boom");
* }</code></pre>
*
* If the provided {@link ThrowingCallable} does not raise an exception, an error is immediately thrown,
* in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br>
* To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below:
* <pre><code class='java'> // assertion will fail but "display me" won't appear in the error
* assertThatThrownBy(() -> {}).as("display me")
* .isInstanceOf(Exception.class);
*
* // assertion will fail AND "display me" will appear in the error
* Throwable thrown = catchThrowable(() -> {});
* assertThat(thrown).as("display me")
* .isInstanceOf(Exception.class);</code></pre>
*
* Alternatively you can also use <code>assertThatCode(ThrowingCallable)</code> for the test description provided
* with {@link AbstractAssert#as(String, Object...) as(String, Object...)} to always be honored.
*
* @param shouldRaiseThrowable The {@link ThrowingCallable} or lambda with the code that should raise the throwable.
* @return the created {@link ThrowableAssert}.
*/
@CanIgnoreReturnValue
public static AbstractThrowableAssert<?, ? extends Throwable> assertThatThrownBy(ThrowingCallable shouldRaiseThrowable) {
return assertThat(catchThrowable(shouldRaiseThrowable)).hasBeenThrown();
}
/**
* Allows to capture and then assert on a {@link Throwable} like {@code assertThatThrownBy(ThrowingCallable)} but this method
* let you set the assertion description the same way you do with {@link AbstractAssert#as(String, Object...) as(String, Object...)}.
* <p>
* Example:
* <pre><code class='java'> {@literal @}Test
* public void testException() {
* // if this assertion failed (but it doesn't), the error message would start with [Test explosive code]
* assertThatThrownBy(() -> { throw new IOException("boom!") }, "Test explosive code")
* .isInstanceOf(IOException.class)
* .hasMessageContaining("boom");
* }</code></pre>
*
* If the provided {@link ThrowingCallable ThrowingCallable} does not raise an exception, an error is immediately thrown.
* <p>
* The test description provided is honored but not the one with {@link AbstractAssert#as(String, Object...) as(String, Object...)}, example:
* <pre><code class='java'> // assertion will fail but "display me" won't appear in the error message
* assertThatThrownBy(() -> {}).as("display me")
* .isInstanceOf(Exception.class);
*
* // assertion will fail AND "display me" will appear in the error message
* assertThatThrownBy(() -> {}, "display me")
* .isInstanceOf(Exception.class);</code></pre>
*
* @param shouldRaiseThrowable The {@link ThrowingCallable} or lambda with the code that should raise the throwable.
* @param description the new description to set.
* @param args optional parameter if description is a format String.
*
* @return the created {@link ThrowableAssert}.
*
* @since 3.9.0
*/
@CanIgnoreReturnValue
public static AbstractThrowableAssert<?, ? extends Throwable> assertThatThrownBy(ThrowingCallable shouldRaiseThrowable,
String description, Object... args) {
return assertThat(catchThrowable(shouldRaiseThrowable)).as(description, args).hasBeenThrown();
}
/**
* Entry point to check that an exception of type T is thrown by a given {@code throwingCallable}
* which allows chaining assertions on the thrown exception.
* <p>
* Example:
* <pre><code class='java'> assertThatExceptionOfType(IOException.class)
* .isThrownBy(() -> { throw new IOException("boom!"); })
* .withMessage("boom!"); </code></pre>
*
* This method is more or less the same of {@link #assertThatThrownBy(ThrowableAssert.ThrowingCallable)} but in a more natural way.
*
* @param <T> the exception type.
* @param exceptionType the | AssertionsForClassTypes |
java | lettuce-io__lettuce-core | src/test/jmh/io/lettuce/core/cluster/models/partitions/JmhMain.java | {
"start": 452,
"end": 1162
} | class ____ {
public static void main(String... args) throws Exception {
runClusterNodeBenchmark();
}
private static void runClusterNodeBenchmark() throws RunnerException {
new Runner(prepareOptions().mode(Mode.AverageTime) //
.timeUnit(TimeUnit.NANOSECONDS) //
.include(".*RedisClusterNodeBenchmark.*") //
.build()).run();
}
private static ChainedOptionsBuilder prepareOptions() {
return new OptionsBuilder()//
.forks(1) //
.warmupIterations(5)//
.threads(1) //
.measurementIterations(5) //
.timeout(TimeValue.seconds(2));
}
}
| JmhMain |
java | apache__camel | components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaComponentTest.java | {
"start": 1746,
"end": 20060
} | class ____ {
@RegisterExtension
protected static CamelContextExtension contextExtension = new DefaultCamelContextExtension();
private final CamelContext context = contextExtension.getContext();
@AfterEach
void clear() {
context.removeComponent("kafka");
}
@Test
public void testPropertiesSet() {
String uri = "kafka:mytopic?brokers=broker1:12345,broker2:12566&partitioner=com.class.Party";
KafkaEndpoint endpoint = context.getEndpoint(uri, KafkaEndpoint.class);
assertEquals("broker1:12345,broker2:12566", endpoint.getConfiguration().getBrokers());
assertEquals("mytopic", endpoint.getConfiguration().getTopic());
assertEquals("com.class.Party", endpoint.getConfiguration().getPartitioner());
}
@Test
public void testBrokersOnComponent() {
KafkaComponent kafka = context.getComponent("kafka", KafkaComponent.class);
kafka.getConfiguration().setBrokers("broker1:12345,broker2:12566");
String uri = "kafka:mytopic?partitioner=com.class.Party";
KafkaEndpoint endpoint = context.getEndpoint(uri, KafkaEndpoint.class);
assertEquals("broker1:12345,broker2:12566", endpoint.getConfiguration().getBrokers());
assertEquals("broker1:12345,broker2:12566", endpoint.getComponent().getConfiguration().getBrokers());
assertEquals("mytopic", endpoint.getConfiguration().getTopic());
assertEquals("com.class.Party", endpoint.getConfiguration().getPartitioner());
}
@Test
public void testCreateAdditionalPropertiesOnEndpointAndComponent() {
final KafkaComponent kafkaComponent = context.getComponent("kafka", KafkaComponent.class);
// update with options on component level and restart
// also we set the configs on the component level
final KafkaConfiguration kafkaConfiguration = new KafkaConfiguration();
final Map<String, Object> params = new HashMap<>();
params.put("extra.1", 789);
params.put("extra.3", "test.extra.3");
kafkaConfiguration.setAdditionalProperties(params);
kafkaComponent.setConfiguration(kafkaConfiguration);
kafkaComponent.stop();
kafkaComponent.start();
final String uri
= "kafka:mytopic?brokers=broker1:12345,broker2:12566&partitioner=com.class.Party&additionalProperties.extra.1=123&additionalProperties.extra.2=test";
KafkaEndpoint endpoint = context.getEndpoint(uri, KafkaEndpoint.class);
assertEquals("broker1:12345,broker2:12566", endpoint.getConfiguration().getBrokers());
assertEquals("mytopic", endpoint.getConfiguration().getTopic());
assertEquals("com.class.Party", endpoint.getConfiguration().getPartitioner());
assertEquals("123", endpoint.getConfiguration().getAdditionalProperties().get("extra.1"));
assertEquals("test", endpoint.getConfiguration().getAdditionalProperties().get("extra.2"));
assertEquals("test.extra.3", endpoint.getConfiguration().getAdditionalProperties().get("extra.3"));
// test properties on producer keys
final Properties producerProperties = endpoint.getConfiguration().createProducerProperties();
assertEquals("123", producerProperties.getProperty("extra.1"));
assertEquals("test", producerProperties.getProperty("extra.2"));
assertEquals("test.extra.3", producerProperties.getProperty("extra.3"));
// test properties on consumer keys
final Properties consumerProperties = endpoint.getConfiguration().createConsumerProperties();
assertEquals("123", consumerProperties.getProperty("extra.1"));
assertEquals("test", consumerProperties.getProperty("extra.2"));
assertEquals("test.extra.3", producerProperties.getProperty("extra.3"));
}
@Test
public void testAllProducerConfigProperty() throws Exception {
Map<String, Object> params = new HashMap<>();
setProducerProperty(params);
String uri = "kafka:mytopic?brokers=dev1:12345,dev2:12566";
KafkaEndpoint endpoint = (KafkaEndpoint) context.getComponent("kafka").createEndpoint(uri, params);
assertEquals("mytopic", endpoint.getConfiguration().getTopic());
assertEquals("1", endpoint.getConfiguration().getRequestRequiredAcks());
assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getBufferMemorySize());
assertEquals(Integer.valueOf(10), endpoint.getConfiguration().getProducerBatchSize());
assertEquals(Integer.valueOf(12), endpoint.getConfiguration().getConnectionMaxIdleMs());
assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getMaxBlockMs());
assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getBufferMemorySize());
assertEquals("testing", endpoint.getConfiguration().getClientId());
assertEquals("none", endpoint.getConfiguration().getCompressionCodec());
assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getLingerMs());
assertEquals(Integer.valueOf(100), endpoint.getConfiguration().getMaxRequestSize());
assertEquals(100, endpoint.getConfiguration().getRequestTimeoutMs().intValue());
assertEquals(200, endpoint.getConfiguration().getDeliveryTimeoutMs().intValue());
assertEquals(Integer.valueOf(1029), endpoint.getConfiguration().getMetadataMaxAgeMs());
assertEquals(Integer.valueOf(23), endpoint.getConfiguration().getReceiveBufferBytes());
assertEquals(Integer.valueOf(234), endpoint.getConfiguration().getReconnectBackoffMs());
assertEquals(Integer.valueOf(234), endpoint.getConfiguration().getReconnectBackoffMaxMs());
assertEquals(Integer.valueOf(0), endpoint.getConfiguration().getRetries());
assertEquals(3782, endpoint.getConfiguration().getRetryBackoffMs().intValue());
assertEquals(765, endpoint.getConfiguration().getSendBufferBytes().intValue());
assertEquals(Integer.valueOf(1), endpoint.getConfiguration().getMaxInFlightRequest());
assertEquals("org.apache.camel.reporters.TestReport,org.apache.camel.reporters.SampleReport",
endpoint.getConfiguration().getMetricReporters());
assertEquals(Integer.valueOf(3), endpoint.getConfiguration().getNoOfMetricsSample());
assertEquals(Integer.valueOf(12344), endpoint.getConfiguration().getMetricsSampleWindowMs());
assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getValueSerializer());
assertEquals(KafkaConstants.KAFKA_DEFAULT_SERIALIZER, endpoint.getConfiguration().getKeySerializer());
assertEquals("testing", endpoint.getConfiguration().getSslKeyPassword());
assertEquals("/abc", endpoint.getConfiguration().getSslKeystoreLocation());
assertEquals("testing", endpoint.getConfiguration().getSslKeystorePassword());
assertEquals("/abc", endpoint.getConfiguration().getSslTruststoreLocation());
assertEquals("testing", endpoint.getConfiguration().getSslTruststorePassword());
assertEquals("test", endpoint.getConfiguration().getSaslKerberosServiceName());
assertEquals("PLAINTEXT", endpoint.getConfiguration().getSecurityProtocol());
assertEquals("TLSv1.3", endpoint.getConfiguration().getSslEnabledProtocols());
assertEquals("JKS", endpoint.getConfiguration().getSslKeystoreType());
assertEquals("TLS", endpoint.getConfiguration().getSslProtocol());
assertEquals("test", endpoint.getConfiguration().getSslProvider());
assertEquals("JKS", endpoint.getConfiguration().getSslTruststoreType());
assertEquals("/usr/bin/kinit", endpoint.getConfiguration().getKerberosInitCmd());
assertEquals(Integer.valueOf(60000), endpoint.getConfiguration().getKerberosBeforeReloginMinTime());
assertEquals(Double.valueOf(0.05), endpoint.getConfiguration().getKerberosRenewJitter());
assertEquals(Double.valueOf(0.8), endpoint.getConfiguration().getKerberosRenewWindowFactor());
assertEquals("MAC", endpoint.getConfiguration().getSslCipherSuites());
assertEquals("test", endpoint.getConfiguration().getSslEndpointAlgorithm());
assertEquals("SunX509", endpoint.getConfiguration().getSslKeymanagerAlgorithm());
assertEquals("PKIX", endpoint.getConfiguration().getSslTrustmanagerAlgorithm());
}
@Test
public void testAllProducerKeysPlainText() throws Exception {
Map<String, Object> params = new HashMap<>();
String uri = "kafka:mytopic?brokers=dev1:12345,dev2:12566";
KafkaEndpoint endpoint = (KafkaEndpoint) context.getComponent("kafka").createEndpoint(uri, params);
assertEquals(endpoint.getConfiguration().createProducerProperties().keySet(), getProducerKeys().keySet());
}
private Properties getProducerKeys() {
Properties props = new Properties();
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "none");
props.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
props.put(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, "540000");
props.put(ProducerConfig.LINGER_MS_CONFIG, "0");
props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "60000");
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "1048576");
props.put(ProducerConfig.RECEIVE_BUFFER_CONFIG, "32768");
props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "30000");
props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, "120000");
props.put(ProducerConfig.SEND_BUFFER_CONFIG, "131072");
props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "5");
props.put(ProducerConfig.METADATA_MAX_AGE_CONFIG, "300000");
props.put(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG, "2");
props.put(ProducerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG, "30000");
props.put(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG, "50");
props.put(ProducerConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG, "1000");
props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, "100");
props.put(ProducerConfig.RETRY_BACKOFF_MAX_MS_CONFIG, "1000");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaConstants.KAFKA_DEFAULT_SERIALIZER);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaConstants.KAFKA_DEFAULT_SERIALIZER);
props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "false");
props.put(ProducerConfig.PARTITIONER_IGNORE_KEYS_CONFIG, "false");
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "PLAINTEXT");
return props;
}
private Properties getProducerKeysSASL() {
Properties props = getProducerKeys();
props.put(SaslConfigs.SASL_KERBEROS_KINIT_CMD, "/usr/bin/kinit");
props.put(SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN, "60000");
props.put(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER, "0.05");
props.put(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR, "0.8");
props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
return props;
}
private Properties getProducerKeysSSL() {
Properties props = getProducerKeys();
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL");
props.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, "TLSv1.3,TLSv1.2,TLSv1.1,TLSv1");
props.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "JKS");
props.put(SslConfigs.SSL_PROTOCOL_CONFIG, "TLS");
props.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "JKS");
props.put(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, "SunX509");
props.put(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG, "PKIX");
props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "https");
return props;
}
@Test
public void testAllProducerKeysPlainTextSsl() throws Exception {
Map<String, Object> params = new HashMap<>();
String uri = "kafka:mytopic?brokers=dev1:12345,dev2:12566&securityProtocol=SSL";
KafkaEndpoint endpoint = (KafkaEndpoint) context.getComponent("kafka").createEndpoint(uri, params);
assertEquals(endpoint.getConfiguration().createProducerProperties().keySet(), getProducerKeysSSL().keySet());
}
@Test
public void testAllProducerKeysPlainTextSasl() throws Exception {
Map<String, Object> params = new HashMap<>();
String uri = "kafka:mytopic?brokers=dev1:12345,dev2:12566&securityProtocol=SASL_PLAINTEXT";
KafkaEndpoint endpoint = (KafkaEndpoint) context.getComponent("kafka").createEndpoint(uri, params);
assertEquals(endpoint.getConfiguration().createProducerProperties().keySet(), getProducerKeysSASL().keySet());
}
private void setProducerProperty(Map<String, Object> params) {
params.put("requestRequiredAcks", "1");
params.put("bufferMemorySize", 1);
params.put("compressionCodec", "none");
params.put("retries", 0);
params.put("producerBatchSize", 10);
params.put("connectionMaxIdleMs", 12);
params.put("lingerMs", 1);
params.put("maxBlockMs", 1);
params.put("maxRequestSize", 100);
params.put("receiveBufferBytes", 23);
params.put("requestTimeoutMs", 100);
params.put("deliveryTimeoutMs", 200);
params.put("sendBufferBytes", 765);
params.put("maxInFlightRequest", 1);
params.put("metadataMaxAgeMs", 1029);
params.put("reconnectBackoffMs", 234);
params.put("reconnectBackoffMaxMs", 234);
params.put("retryBackoffMs", 3782);
params.put("noOfMetricsSample", 3);
params.put("metricReporters", "org.apache.camel.reporters.TestReport,org.apache.camel.reporters.SampleReport");
params.put("metricsSampleWindowMs", 12344);
params.put("clientId", "testing");
params.put("sslKeyPassword", "testing");
params.put("sslKeystoreLocation", "/abc");
params.put("sslKeystorePassword", "testing");
params.put("sslTruststoreLocation", "/abc");
params.put("sslTruststorePassword", "testing");
params.put("saslKerberosServiceName", "test");
params.put("saslMechanism", "PLAIN");
params.put("securityProtocol", "PLAINTEXT");
params.put("sslEnabledProtocols", "TLSv1.3");
params.put("sslKeystoreType", "JKS");
params.put("sslProtocol", "TLS");
params.put("sslProvider", "test");
params.put("sslTruststoreType", "JKS");
params.put("kerberosInitCmd", "/usr/bin/kinit");
params.put("kerberosBeforeReloginMinTime", 60000);
params.put("kerberosRenewJitter", 0.05);
params.put("kerberosRenewWindowFactor", 0.8);
params.put("sslCipherSuites", "MAC");
params.put("sslEndpointAlgorithm", "test");
params.put("sslKeymanagerAlgorithm", "SunX509");
params.put("sslTrustmanagerAlgorithm", "PKIX");
}
@Test
public void testCreateProducerConfigTruststorePassword() {
KeyStoreParameters keyStoreParameters = new KeyStoreParameters();
keyStoreParameters.setPassword("my-password");
TrustManagersParameters trustManagersParameters = new TrustManagersParameters();
trustManagersParameters.setKeyStore(keyStoreParameters);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setTrustManagers(trustManagersParameters);
KafkaConfiguration kcfg = new KafkaConfiguration();
kcfg.setSslContextParameters(sslContextParameters);
Properties props = kcfg.createProducerProperties();
assertEquals("my-password", props.getProperty("ssl.truststore.password"));
assertNull(props.getProperty("ssl.keystore.password"));
}
@Test
public void testCreateConsumerConfigTruststorePassword() {
KeyStoreParameters keyStoreParameters = new KeyStoreParameters();
keyStoreParameters.setPassword("my-password");
TrustManagersParameters trustManagersParameters = new TrustManagersParameters();
trustManagersParameters.setKeyStore(keyStoreParameters);
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setTrustManagers(trustManagersParameters);
KafkaConfiguration kcfg = new KafkaConfiguration();
kcfg.setSslContextParameters(sslContextParameters);
Properties props = kcfg.createConsumerProperties();
assertEquals("my-password", props.getProperty("ssl.truststore.password"));
assertNull(props.getProperty("ssl.keystore.password"));
}
@Test
public void testCreateAdditionalPropertiesResolvePlaceholders() {
context.getPropertiesComponent().addOverrideProperty("foo", "123");
context.getPropertiesComponent().addOverrideProperty("bar", "test");
final String uri
= "kafka:mytopic?brokers=broker1:12345,broker2:12566&partitioner=com.class.Party&additionalProperties.extra.1={{foo}}&additionalProperties.extra.2={{bar}}";
KafkaEndpoint endpoint = context.getEndpoint(uri, KafkaEndpoint.class);
assertEquals("broker1:12345,broker2:12566", endpoint.getConfiguration().getBrokers());
assertEquals("mytopic", endpoint.getConfiguration().getTopic());
assertEquals("com.class.Party", endpoint.getConfiguration().getPartitioner());
assertEquals("123", endpoint.getConfiguration().getAdditionalProperties().get("extra.1"));
assertEquals("test", endpoint.getConfiguration().getAdditionalProperties().get("extra.2"));
// test properties on producer keys
final Properties producerProperties = endpoint.getConfiguration().createProducerProperties();
assertEquals("123", producerProperties.getProperty("extra.1"));
assertEquals("test", producerProperties.getProperty("extra.2"));
// test properties on consumer keys
final Properties consumerProperties = endpoint.getConfiguration().createConsumerProperties();
assertEquals("123", consumerProperties.getProperty("extra.1"));
assertEquals("test", consumerProperties.getProperty("extra.2"));
}
}
| KafkaComponentTest |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/function/DefaultAsyncServerResponseTests.java | {
"start": 853,
"end": 1636
} | class ____ {
@Test
void blockCompleted() {
ServerResponse wrappee = ServerResponse.ok().build();
CompletableFuture<ServerResponse> future = CompletableFuture.completedFuture(wrappee);
AsyncServerResponse response = AsyncServerResponse.create(future);
assertThat(response.block()).isSameAs(wrappee);
}
@Test
void blockNotCompleted() {
ServerResponse wrappee = ServerResponse.ok().build();
CompletableFuture<ServerResponse> future = CompletableFuture.supplyAsync(() -> {
try {
Thread.sleep(500);
return wrappee;
}
catch (InterruptedException ex) {
throw new RuntimeException(ex);
}
});
AsyncServerResponse response = AsyncServerResponse.create(future);
assertThat(response.block()).isSameAs(wrappee);
}
}
| DefaultAsyncServerResponseTests |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/search/RediSearchAdvancedConceptsResp2IntegrationTests.java | {
"start": 428,
"end": 1490
} | class ____ {@link RediSearchAdvancedConceptsIntegrationTests} and runs all the same tests but using the RESP2
* protocol instead of the default RESP3 protocol.
* <p>
* The tests verify that Redis Search advanced functionality works correctly with both RESP2 and RESP3 protocols, ensuring
* backward compatibility and protocol-agnostic behavior for advanced Redis Search features including:
* <ul>
* <li>Stop words management and customization</li>
* <li>Text tokenization and character escaping</li>
* <li>Sorting by indexed fields with normalization options</li>
* <li>Tag field operations with custom separators and case sensitivity</li>
* <li>Text highlighting and summarization</li>
* <li>Document scoring functions and algorithms</li>
* <li>Language-specific stemming and verbatim search</li>
* </ul>
* <p>
* These tests are based on the Redis documentation:
* <a href="https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/">Advanced Concepts</a>
*
* @author Tihomir Mateev
*/
@Tag(INTEGRATION_TEST)
public | extends |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Aws2S3ComponentBuilderFactory.java | {
"start": 1384,
"end": 1880
} | interface ____ {
/**
* AWS S3 Storage Service (camel-aws2-s3)
* Store and retrieve objects from AWS S3 Storage Service.
*
* Category: cloud,file
* Since: 3.2
* Maven coordinates: org.apache.camel:camel-aws2-s3
*
* @return the dsl builder
*/
static Aws2S3ComponentBuilder aws2S3() {
return new Aws2S3ComponentBuilderImpl();
}
/**
* Builder for the AWS S3 Storage Service component.
*/
| Aws2S3ComponentBuilderFactory |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/domain/blog/immutable/ImmutableBlog.java | {
"start": 759,
"end": 1686
} | class ____ {
private final int id;
private final String title;
private final ImmutableAuthor author;
private final List<ImmutablePost> posts;
public ImmutableBlog(int id, String title, ImmutableAuthor author, List<ImmutablePost> posts) {
this.id = id;
this.title = title;
this.author = author;
this.posts = posts;
}
public ImmutableBlog(int id, String title, ImmutableAuthor author) {
this.id = id;
this.title = title;
this.author = author;
this.posts = new ArrayList<>();
}
public int getId() {
return id;
}
public String getTitle() {
return title;
}
public ImmutableAuthor getAuthor() {
return author;
}
public List<ImmutablePost> getPosts() {
return posts;
}
@Override
public String toString() {
return "ImmutableBlog{" + "id=" + id + ", title='" + title + '\'' + ", author=" + author + ", posts=" + posts + '}';
}
}
| ImmutableBlog |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/access/EmbeddableAccessTests.java | {
"start": 1163,
"end": 4019
} | class ____ {
@Test
public void verifyBootModel(DomainModelScope scope) {
scope.withHierarchy( Person.class, (personDescriptor) -> {
final Property nameProperty = personDescriptor.getProperty( "name" );
final Component nameMapping = (Component) nameProperty.getValue();
assertThat( nameMapping.getPropertySpan() ).isEqualTo( 2 );
final Property nameFirst = nameMapping.getProperty( 0 );
final Property nameLast = nameMapping.getProperty( 1 );
assertThat( nameFirst.getName() ).isEqualTo( "firstName" );
assertThat( nameLast.getName() ).isEqualTo( "lastName" );
final Property aliasesProperty = personDescriptor.getProperty( "aliases" );
final Component aliasMapping = (Component) ( (Collection) aliasesProperty.getValue() ).getElement();
assertThat( aliasMapping.getPropertySpan() ).isEqualTo( 2 );
final Property aliasFirst = aliasMapping.getProperty( 0 );
final Property aliasLast = aliasMapping.getProperty( 1 );
assertThat( aliasFirst.getName() ).isEqualTo( "firstName" );
assertThat( aliasLast.getName() ).isEqualTo( "lastName" );
} );
}
@Test
public void verifyRuntimeModel(SessionFactoryScope scope) {
final RuntimeMetamodels runtimeMetamodels = scope.getSessionFactory().getRuntimeMetamodels();
final EntityMappingType personDescriptor = runtimeMetamodels.getEntityMappingType( Person.class );
// Person defines FIELD access, while Name uses PROPERTY
// - if we find the property annotations, the attribute names will be
// `firstName` and `lastName`, and the columns `first_name` and `last_name`
// - otherwise, we have property and column names being `first` and `last`
final EmbeddableMappingType nameEmbeddable = ( (EmbeddedAttributeMapping) personDescriptor.findAttributeMapping( "name" ) ).getEmbeddableTypeDescriptor();
assertThat( nameEmbeddable.getNumberOfAttributeMappings() ).isEqualTo( 2 );
final AttributeMapping nameFirst = nameEmbeddable.getAttributeMapping( 0 );
final AttributeMapping nameLast = nameEmbeddable.getAttributeMapping( 1 );
assertThat( nameFirst.getAttributeName() ).isEqualTo( "firstName" );
assertThat( nameLast.getAttributeName() ).isEqualTo( "lastName" );
final PluralAttributeMapping aliasesAttribute = (PluralAttributeMapping) personDescriptor.findAttributeMapping( "aliases" );
final EmbeddableMappingType aliasEmbeddable = ( (EmbeddedCollectionPart) aliasesAttribute.getElementDescriptor() ).getEmbeddableTypeDescriptor();
assertThat( aliasEmbeddable.getNumberOfAttributeMappings() ).isEqualTo( 2 );
final AttributeMapping aliasFirst = nameEmbeddable.getAttributeMapping( 0 );
final AttributeMapping aliasLast = nameEmbeddable.getAttributeMapping( 1 );
assertThat( aliasFirst.getAttributeName() ).isEqualTo( "firstName" );
assertThat( aliasLast.getAttributeName() ).isEqualTo( "lastName" );
}
}
| EmbeddableAccessTests |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/ServiceLocatorFactoryBean.java | {
"start": 8688,
"end": 13405
} | class ____ have a constructor
* with one of the following parameter types: {@code (String, Throwable)}
* or {@code (Throwable)} or {@code (String)}.
* <p>If not specified, subclasses of Spring's BeansException will be thrown,
* for example NoSuchBeanDefinitionException. As those are unchecked, the
* caller does not need to handle them, so it might be acceptable that
* Spring exceptions get thrown as long as they are just handled generically.
* @see #determineServiceLocatorExceptionConstructor
* @see #createServiceLocatorException
*/
public void setServiceLocatorExceptionClass(Class<? extends Exception> serviceLocatorExceptionClass) {
this.serviceLocatorExceptionConstructor =
determineServiceLocatorExceptionConstructor(serviceLocatorExceptionClass);
}
/**
* Set mappings between service ids (passed into the service locator)
* and bean names (in the bean factory). Service ids that are not defined
* here will be treated as bean names as-is.
* <p>The empty string as service id key defines the mapping for {@code null} and
* empty string, and for factory methods without parameter. If not defined,
* a single matching bean will be retrieved from the bean factory.
* @param serviceMappings mappings between service ids and bean names,
* with service ids as keys as bean names as values
*/
public void setServiceMappings(Properties serviceMappings) {
this.serviceMappings = serviceMappings;
}
@Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
if (!(beanFactory instanceof ListableBeanFactory lbf)) {
throw new FatalBeanException(
"ServiceLocatorFactoryBean needs to run in a BeanFactory that is a ListableBeanFactory");
}
this.beanFactory = lbf;
}
@Override
public void afterPropertiesSet() {
if (this.serviceLocatorInterface == null) {
throw new IllegalArgumentException("Property 'serviceLocatorInterface' is required");
}
// Create service locator proxy.
this.proxy = Proxy.newProxyInstance(
this.serviceLocatorInterface.getClassLoader(),
new Class<?>[] {this.serviceLocatorInterface},
new ServiceLocatorInvocationHandler());
}
/**
* Determine the constructor to use for the given service locator exception
* class. Only called in case of a custom service locator exception.
* <p>The default implementation looks for a constructor with one of the
* following parameter types: {@code (String, Throwable)}
* or {@code (Throwable)} or {@code (String)}.
* @param exceptionClass the exception class
* @return the constructor to use
* @see #setServiceLocatorExceptionClass
*/
@SuppressWarnings("unchecked")
protected Constructor<Exception> determineServiceLocatorExceptionConstructor(Class<? extends Exception> exceptionClass) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(String.class, Throwable.class);
}
catch (NoSuchMethodException ex) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(Throwable.class);
}
catch (NoSuchMethodException ex2) {
try {
return (Constructor<Exception>) exceptionClass.getConstructor(String.class);
}
catch (NoSuchMethodException ex3) {
throw new IllegalArgumentException(
"Service locator exception [" + exceptionClass.getName() +
"] neither has a (String, Throwable) constructor nor a (String) constructor");
}
}
}
}
/**
* Create a service locator exception for the given cause.
* Only called in case of a custom service locator exception.
* <p>The default implementation can handle all variations of
* message and exception arguments.
* @param exceptionConstructor the constructor to use
* @param cause the cause of the service lookup failure
* @return the service locator exception to throw
* @see #setServiceLocatorExceptionClass
*/
protected Exception createServiceLocatorException(Constructor<Exception> exceptionConstructor, BeansException cause) {
Class<?>[] paramTypes = exceptionConstructor.getParameterTypes();
@Nullable Object[] args = new Object[paramTypes.length];
for (int i = 0; i < paramTypes.length; i++) {
if (String.class == paramTypes[i]) {
args[i] = cause.getMessage();
}
else if (paramTypes[i].isInstance(cause)) {
args[i] = cause;
}
}
return BeanUtils.instantiateClass(exceptionConstructor, args);
}
@Override
public @Nullable Object getObject() {
return this.proxy;
}
@Override
public @Nullable Class<?> getObjectType() {
return this.serviceLocatorInterface;
}
@Override
public boolean isSingleton() {
return true;
}
/**
* Invocation handler that delegates service locator calls to the bean factory.
*/
private | must |
java | apache__spark | common/utils-java/src/main/java/org/apache/spark/api/java/function/MapGroupsFunction.java | {
"start": 916,
"end": 1021
} | interface ____ a map function used in GroupedDataset's mapGroup function.
*/
@FunctionalInterface
public | for |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java | {
"start": 4961,
"end": 5584
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory wkb;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory wkb) {
this.source = source;
this.wkb = wkb;
}
@Override
public ToStringFromCartesianShapeEvaluator get(DriverContext context) {
return new ToStringFromCartesianShapeEvaluator(source, wkb.get(context), context);
}
@Override
public String toString() {
return "ToStringFromCartesianShapeEvaluator[" + "wkb=" + wkb + "]";
}
}
}
| Factory |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByBinderTest.java | {
"start": 10562,
"end": 10981
} | class ____ {
static final Object lock = new Object();
}
""")))
.isEqualTo("(SELECT (TYPE_LITERAL threadsafety.Test) lock)");
}
@Test
public void instanceOnStatic() {
bindFail(
"Test",
"Test.lock",
forSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
| Test |
java | spring-projects__spring-boot | loader/spring-boot-loader-tools/src/test/java/org/springframework/boot/loader/tools/AbstractPackagerTests.java | {
"start": 33059,
"end": 33209
} | class ____ implements LayoutFactory {
@Override
public Layout getLayout(File source) {
return new TestLayout();
}
}
static | TestLayoutFactory |
java | google__error-prone | test_helpers/src/main/java/com/google/errorprone/CompilationTestHelper.java | {
"start": 2436,
"end": 4007
} | class ____ {
private static final ImmutableList<String> DEFAULT_ARGS =
ImmutableList.of(
"-encoding",
"UTF-8",
// print stack traces for completion failures
"-XDdev",
"-parameters",
"-XDcompilePolicy=simple",
// Don't limit errors/warnings for tests to the default of 100
"-Xmaxerrs",
"500",
"-Xmaxwarns",
"500");
private final DiagnosticTestHelper diagnosticHelper;
private final BaseErrorProneJavaCompiler compiler;
private final ByteArrayOutputStream outputStream;
private final Class<?> clazz;
private final List<JavaFileObject> sources = new ArrayList<>();
private ImmutableList<String> extraArgs = ImmutableList.of();
private @Nullable ImmutableList<Class<?>> overrideClasspath;
private boolean expectNoDiagnostics = false;
private Optional<Result> expectedResult = Optional.empty();
private LookForCheckNameInDiagnostic lookForCheckNameInDiagnostic =
LookForCheckNameInDiagnostic.YES;
private boolean run = false;
private CompilationTestHelper(ScannerSupplier scannerSupplier, String checkName, Class<?> clazz) {
this.clazz = clazz;
this.diagnosticHelper = new DiagnosticTestHelper(checkName);
this.outputStream = new ByteArrayOutputStream();
this.compiler = new BaseErrorProneJavaCompiler(JavacTool.create(), scannerSupplier);
}
/**
* Returns a new {@link CompilationTestHelper}.
*
* @param scannerSupplier the {@link ScannerSupplier} to test
* @param clazz the | CompilationTestHelper |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 27528,
"end": 27915
} | class ____ passing the naming context around. This allows for the
* following:
* <li>Cache and re-use primitive schemas when they do not set properties.</li>
* <li>Provide a default namespace for nested contexts (as the JSON Schema spec
* does).</li>
* <li>Allow previously defined named types or primitive types to be referenced
* by name.</li>
**/
private static | for |
java | apache__maven | api/maven-api-xml/src/main/java/org/apache/maven/api/xml/XmlService.java | {
"start": 1295,
"end": 2485
} | class ____ XML merging functionality for Maven's XML handling
* and specifies the combination modes that control how XML elements are merged.</p>
*
* <p>The merger supports two main types of combinations:</p>
* <ul>
* <li>Children combination: Controls how child elements are combined</li>
* <li>Self combination: Controls how the element itself is combined</li>
* </ul>
*
* <p>Children combination modes (specified by {@code combine.children} attribute):</p>
* <ul>
* <li>{@code merge} (default): Merges elements with matching names</li>
* <li>{@code append}: Adds elements as siblings</li>
* </ul>
*
* <p>Self combination modes (specified by {@code combine.self} attribute):</p>
* <ul>
* <li>{@code merge} (default): Merges attributes and values</li>
* <li>{@code override}: Completely replaces the element</li>
* <li>{@code remove}: Removes the element</li>
* </ul>
*
* <p>For complex XML structures, combining can also be done based on:</p>
* <ul>
* <li>ID: Using the {@code combine.id} attribute</li>
* <li>Keys: Using the {@code combine.keys} attribute with comma-separated key names</li>
* </ul>
*
* @since 4.0.0
*/
public abstract | provides |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/docs/factories/Engine.java | {
"start": 656,
"end": 712
} | interface ____ {
String start();
}
// end::class[] | Engine |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/FieldScopeImpl.java | {
"start": 1991,
"end": 9198
} | class ____ extends FieldScope {
//////////////////////////////////////////////////////////////////////////////////////////////////
// AutoValue methods.
//////////////////////////////////////////////////////////////////////////////////////////////////
private static FieldScope create(
FieldScopeLogic logic,
Function<? super Optional<Descriptor>, String> usingCorrespondenceStringFunction) {
return new AutoValue_FieldScopeImpl(logic, usingCorrespondenceStringFunction);
}
@Override
abstract FieldScopeLogic logic();
abstract Function<? super Optional<Descriptor>, String> usingCorrespondenceStringFunction();
//////////////////////////////////////////////////////////////////////////////////////////////////
// Instantiation methods.
//////////////////////////////////////////////////////////////////////////////////////////////////
static FieldScope createFromSetFields(
Message message, TypeRegistry typeRegistry, ExtensionRegistry extensionRegistry) {
return create(
FieldScopeLogic.partialScope(message, typeRegistry, extensionRegistry),
Functions.constant(
String.format(
"FieldScopes.fromSetFields({%s})", TextFormat.printer().printToString(message))));
}
static FieldScope createFromSetFields(
Iterable<? extends Message> messages,
TypeRegistry typeRegistry,
ExtensionRegistry extensionRegistry) {
if (emptyOrAllNull(messages)) {
return create(FieldScopeLogic.none(), Functions.constant("FieldScopes.fromSetFields([])"));
}
Optional<Descriptor> optDescriptor = FieldScopeUtil.getSingleDescriptor(messages);
checkArgument(
optDescriptor.isPresent(),
"Cannot create scope from messages with different descriptors: %s",
getDescriptors(messages));
return create(
FieldScopeLogic.partialScope(
messages, optDescriptor.get(), typeRegistry, extensionRegistry),
Functions.constant(String.format("FieldScopes.fromSetFields(%s)", formatList(messages))));
}
static FieldScope createIgnoringFields(Iterable<Integer> fieldNumbers) {
return create(
FieldScopeLogic.all().ignoringFields(fieldNumbers),
FieldScopeUtil.fieldNumbersFunction("FieldScopes.ignoringFields(%s)", fieldNumbers));
}
static FieldScope createIgnoringFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors) {
return create(
FieldScopeLogic.all().ignoringFieldDescriptors(fieldDescriptors),
Functions.constant(
String.format("FieldScopes.ignoringFieldDescriptors(%s)", join(fieldDescriptors))));
}
static FieldScope createAllowingFields(Iterable<Integer> fieldNumbers) {
return create(
FieldScopeLogic.none().allowingFields(fieldNumbers),
FieldScopeUtil.fieldNumbersFunction("FieldScopes.allowingFields(%s)", fieldNumbers));
}
static FieldScope createAllowingFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors) {
return create(
FieldScopeLogic.none().allowingFieldDescriptors(fieldDescriptors),
Functions.constant(
String.format("FieldScopes.allowingFieldDescriptors(%s)", join(fieldDescriptors))));
}
private static final FieldScope ALL =
create(FieldScopeLogic.all(), Functions.constant("FieldScopes.all()"));
private static final FieldScope NONE =
create(FieldScopeLogic.none(), Functions.constant("FieldScopes.none()"));
static FieldScope all() {
return ALL;
}
static FieldScope none() {
return NONE;
}
private static boolean emptyOrAllNull(Iterable<?> iterable) {
for (Object o : iterable) {
if (o != null) {
return false;
}
}
return true;
}
//////////////////////////////////////////////////////////////////////////////////////////////////
// Delegation methods.
//////////////////////////////////////////////////////////////////////////////////////////////////
@Override
String usingCorrespondenceString(Optional<Descriptor> descriptor) {
return usingCorrespondenceStringFunction().apply(descriptor);
}
@Override
public final FieldScope ignoringFields(int firstFieldNumber, int... rest) {
return ignoringFields(asList(firstFieldNumber, rest));
}
@Override
public final FieldScope ignoringFields(Iterable<Integer> fieldNumbers) {
return create(
logic().ignoringFields(fieldNumbers),
addUsingCorrespondenceFieldNumbersString(".ignoringFields(%s)", fieldNumbers));
}
@Override
public final FieldScope ignoringFieldDescriptors(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return ignoringFieldDescriptors(asList(firstFieldDescriptor, rest));
}
@Override
public final FieldScope ignoringFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors) {
return create(
logic().ignoringFieldDescriptors(fieldDescriptors),
addUsingCorrespondenceFieldDescriptorsString(
".ignoringFieldDescriptors(%s)", fieldDescriptors));
}
@Override
public final FieldScope allowingFields(int firstFieldNumber, int... rest) {
return allowingFields(asList(firstFieldNumber, rest));
}
@Override
public final FieldScope allowingFields(Iterable<Integer> fieldNumbers) {
return create(
logic().allowingFields(fieldNumbers),
addUsingCorrespondenceFieldNumbersString(".allowingFields(%s)", fieldNumbers));
}
@Override
public final FieldScope allowingFieldDescriptors(
FieldDescriptor firstFieldDescriptor, FieldDescriptor... rest) {
return allowingFieldDescriptors(asList(firstFieldDescriptor, rest));
}
@Override
public final FieldScope allowingFieldDescriptors(Iterable<FieldDescriptor> fieldDescriptors) {
return create(
logic().allowingFieldDescriptors(fieldDescriptors),
addUsingCorrespondenceFieldDescriptorsString(
".allowingFieldDescriptors(%s)", fieldDescriptors));
}
private Function<Optional<Descriptor>, String> addUsingCorrespondenceFieldNumbersString(
String fmt, Iterable<Integer> fieldNumbers) {
return FieldScopeUtil.concat(
usingCorrespondenceStringFunction(),
FieldScopeUtil.fieldNumbersFunction(fmt, fieldNumbers));
}
private Function<Optional<Descriptor>, String> addUsingCorrespondenceFieldDescriptorsString(
String fmt, Iterable<FieldDescriptor> fieldDescriptors) {
return FieldScopeUtil.concat(
usingCorrespondenceStringFunction(),
Functions.constant(String.format(fmt, join(fieldDescriptors))));
}
private static Iterable<String> getDescriptors(Iterable<? extends Message> messages) {
List<String> descriptors = new ArrayList<>();
for (Message message : messages) {
descriptors.add(message == null ? "null" : message.getDescriptorForType().getFullName());
}
return descriptors;
}
private static String formatList(Iterable<? extends Message> messages) {
List<String> strings = new ArrayList<>();
for (Message message : messages) {
strings.add(
message == null ? "null" : "{" + TextFormat.printer().printToString(message) + "}");
}
return "[" + join(strings) + "]";
}
}
| FieldScopeImpl |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OpensearchEndpointBuilderFactory.java | {
"start": 14253,
"end": 16768
} | interface ____
extends
EndpointProducerBuilder {
default OpensearchEndpointBuilder basic() {
return (OpensearchEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedOpensearchEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedOpensearchEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The | AdvancedOpensearchEndpointBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java | {
"start": 1358,
"end": 9509
} | class ____ implements DateMathParser {
private final String format;
private final Function<String, TemporalAccessor> parser;
private final Function<String, TemporalAccessor> roundupParser;
JavaDateMathParser(String format, Function<String, TemporalAccessor> parser, Function<String, TemporalAccessor> roundupParser) {
this.format = format;
this.parser = Objects.requireNonNull(parser);
this.roundupParser = roundupParser;
}
@Override
public Instant parse(String text, LongSupplier now, boolean roundUpProperty, ZoneId timeZone) {
Instant time;
String mathString;
if (text.startsWith("now")) {
try {
// TODO only millisecond granularity here!
time = Instant.ofEpochMilli(now.getAsLong());
} catch (Exception e) {
throw new ElasticsearchParseException("could not read the current timestamp", e);
}
mathString = text.substring("now".length());
} else {
int index = text.indexOf("||");
if (index == -1) {
return parseDateTime(text, timeZone, roundUpProperty);
}
time = parseDateTime(text.substring(0, index), timeZone, false);
mathString = text.substring(index + 2);
}
return parseMath(mathString, time, roundUpProperty, timeZone);
}
private static Instant parseMath(final String mathString, final Instant time, final boolean roundUpProperty, ZoneId timeZone)
throws ElasticsearchParseException {
if (timeZone == null) {
timeZone = ZoneOffset.UTC;
}
ZonedDateTime dateTime = ZonedDateTime.ofInstant(time, timeZone);
for (int i = 0; i < mathString.length();) {
char c = mathString.charAt(i++);
final boolean round;
final int sign;
if (c == '/') {
round = true;
sign = 1;
} else {
round = false;
if (c == '+') {
sign = 1;
} else if (c == '-') {
sign = -1;
} else {
throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString);
}
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
}
final int num;
if (Character.isDigit(mathString.charAt(i)) == false) {
num = 1;
} else {
int numFrom = i;
while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) {
i++;
}
if (i >= mathString.length()) {
throw new ElasticsearchParseException("truncated date math [{}]", mathString);
}
num = Integer.parseInt(mathString.substring(numFrom, i));
}
if (round) {
if (num != 1) {
throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString);
}
}
char unit = mathString.charAt(i++);
switch (unit) {
case 'y':
if (round) {
dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN);
if (roundUpProperty) {
dateTime = dateTime.plusYears(1);
}
} else {
dateTime = dateTime.plusYears(sign * num);
}
break;
case 'M':
if (round) {
dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN);
if (roundUpProperty) {
dateTime = dateTime.plusMonths(1);
}
} else {
dateTime = dateTime.plusMonths(sign * num);
}
break;
case 'w':
if (round) {
dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN);
if (roundUpProperty) {
dateTime = dateTime.plusWeeks(1);
}
} else {
dateTime = dateTime.plusWeeks(sign * num);
}
break;
case 'd':
if (round) {
dateTime = dateTime.with(LocalTime.MIN);
if (roundUpProperty) {
dateTime = dateTime.plusDays(1);
}
} else {
dateTime = dateTime.plusDays(sign * num);
}
break;
case 'h':
case 'H':
if (round) {
dateTime = dateTime.withMinute(0).withSecond(0).withNano(0);
if (roundUpProperty) {
dateTime = dateTime.plusHours(1);
}
} else {
dateTime = dateTime.plusHours(sign * num);
}
break;
case 'm':
if (round) {
dateTime = dateTime.withSecond(0).withNano(0);
if (roundUpProperty) {
dateTime = dateTime.plusMinutes(1);
}
} else {
dateTime = dateTime.plusMinutes(sign * num);
}
break;
case 's':
if (round) {
dateTime = dateTime.withNano(0);
if (roundUpProperty) {
dateTime = dateTime.plusSeconds(1);
}
} else {
dateTime = dateTime.plusSeconds(sign * num);
}
break;
default:
throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString);
}
if (round && roundUpProperty) {
// subtract 1 millisecond to get the largest inclusive value
dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit());
}
}
return dateTime.toInstant();
}
private Instant parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) {
if (Strings.isNullOrEmpty(value)) {
throw new ElasticsearchParseException("cannot parse empty datetime");
}
Function<String, TemporalAccessor> formatter = roundUpIfNoTime ? roundupParser : this.parser;
try {
if (timeZone == null) {
return DateFormatters.from(formatter.apply(value)).toInstant();
} else {
TemporalAccessor accessor = formatter.apply(value);
// Use the offset if provided, otherwise fall back to the zone, or null.
ZoneOffset offset = TemporalQueries.offset().queryFrom(accessor);
ZoneId zoneId = offset == null ? TemporalQueries.zoneId().queryFrom(accessor) : ZoneId.ofOffset("", offset);
if (zoneId != null) {
timeZone = zoneId;
}
return DateFormatters.from(accessor).withZoneSameLocal(timeZone).toInstant();
}
} catch (IllegalArgumentException | DateTimeException e) {
throw new ElasticsearchParseException(
"failed to parse date field [{}] with format [{}]: [{}]",
e,
value,
format,
e.getMessage()
);
}
}
}
| JavaDateMathParser |
java | apache__spark | common/network-common/src/main/java/org/apache/spark/network/server/RpcHandler.java | {
"start": 1411,
"end": 5422
} | class ____ {
private static final RpcResponseCallback ONE_WAY_CALLBACK = new OneWayRpcCallback();
private static final MergedBlockMetaReqHandler NOOP_MERGED_BLOCK_META_REQ_HANDLER =
new NoopMergedBlockMetaReqHandler();
/**
* Receive a single RPC message. Any exception thrown while in this method will be sent back to
* the client in string form as a standard RPC failure.
*
* Neither this method nor #receiveStream will be called in parallel for a single
* TransportClient (i.e., channel).
*
* @param client A channel client which enables the handler to make requests back to the sender
* of this RPC. This will always be the exact same object for a particular channel.
* @param message The serialized bytes of the RPC.
* @param callback Callback which should be invoked exactly once upon success or failure of the
* RPC.
*/
public abstract void receive(
TransportClient client,
ByteBuffer message,
RpcResponseCallback callback);
/**
* Receive a single RPC message which includes data that is to be received as a stream. Any
* exception thrown while in this method will be sent back to the client in string form as a
* standard RPC failure.
*
* Neither this method nor #receive will be called in parallel for a single TransportClient
* (i.e., channel).
*
* An error while reading data from the stream
* ({@link org.apache.spark.network.client.StreamCallback#onData(String, ByteBuffer)})
* will fail the entire channel. A failure in "post-processing" the stream in
* {@link org.apache.spark.network.client.StreamCallback#onComplete(String)} will result in an
* rpcFailure, but the channel will remain active.
*
* @param client A channel client which enables the handler to make requests back to the sender
* of this RPC. This will always be the exact same object for a particular channel.
* @param messageHeader The serialized bytes of the header portion of the RPC. This is in meant
* to be relatively small, and will be buffered entirely in memory, to
* facilitate how the streaming portion should be received.
* @param callback Callback which should be invoked exactly once upon success or failure of the
* RPC.
* @return a StreamCallback for handling the accompanying streaming data
*/
public StreamCallbackWithID receiveStream(
TransportClient client,
ByteBuffer messageHeader,
RpcResponseCallback callback) {
throw new UnsupportedOperationException();
}
/**
* Returns the StreamManager which contains the state about which streams are currently being
* fetched by a TransportClient.
*/
public abstract StreamManager getStreamManager();
/**
* Receives an RPC message that does not expect a reply. The default implementation will
* call "{@link #receive(TransportClient, ByteBuffer, RpcResponseCallback)}" and log a warning if
* any of the callback methods are called.
*
* @param client A channel client which enables the handler to make requests back to the sender
* of this RPC. This will always be the exact same object for a particular channel.
* @param message The serialized bytes of the RPC.
*/
public void receive(TransportClient client, ByteBuffer message) {
receive(client, message, ONE_WAY_CALLBACK);
}
public MergedBlockMetaReqHandler getMergedBlockMetaReqHandler() {
return NOOP_MERGED_BLOCK_META_REQ_HANDLER;
}
/**
* Invoked when the channel associated with the given client is active.
*/
public void channelActive(TransportClient client) { }
/**
* Invoked when the channel associated with the given client is inactive.
* No further requests will come from this client.
*/
public void channelInactive(TransportClient client) { }
public void exceptionCaught(Throwable cause, TransportClient client) { }
private static | RpcHandler |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/api/operators/co/KeyedCoProcessOperatorTest.java | {
"start": 19616,
"end": 20606
} | class ____
extends KeyedCoProcessFunction<String, Integer, String, String> {
private static final long serialVersionUID = 1L;
@Override
public void processElement1(Integer value, Context ctx, Collector<String> out)
throws Exception {
out.collect("INPUT1:" + value);
ctx.timerService().registerProcessingTimeTimer(5);
}
@Override
public void processElement2(String value, Context ctx, Collector<String> out)
throws Exception {
out.collect("INPUT2:" + value);
ctx.timerService().registerProcessingTimeTimer(6);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out)
throws Exception {
assertThat(ctx.timeDomain()).isEqualTo(TimeDomain.PROCESSING_TIME);
out.collect("" + 1777);
}
}
private static | ProcessingTimeTriggeringProcessFunction |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/annotation/ServletEndpointDiscovererTests.java | {
"start": 9149,
"end": 9311
} | class ____ implements Supplier<EndpointServlet> {
@Override
public @Nullable EndpointServlet get() {
return null;
}
}
}
| TestServletEndpointSupplierOfNull |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/TestingRetrievableStateStorageHelper.java | {
"start": 1447,
"end": 2038
} | class ____<T extends Serializable>
implements RetrievableStateHandle<T> {
private static final long serialVersionUID = 137053380713794300L;
private final T state;
private TestingRetrievableStateHandle(T state) {
this.state = state;
}
@Override
public T retrieveState() {
return state;
}
@Override
public void discardState() {
// no op
}
@Override
public long getStateSize() {
return 0;
}
}
}
| TestingRetrievableStateHandle |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/pool/dbcp/TestIdleForKylin.java | {
"start": 921,
"end": 3924
} | class ____ extends TestCase {
public void test_idle() throws Exception {
MockDriver driver = MockDriver.instance;
// BasicDataSource dataSource = new BasicDataSource();
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
dataSource.setDriverClassName("com.alibaba.druid.mock.MockDriver");
dataSource.setInitialSize(1);
dataSource.setMaxActive(10);
dataSource.setMaxIdle(10);
dataSource.setMinIdle(0);
dataSource.setMinEvictableIdleTimeMillis(50000 * 1);
dataSource.setTimeBetweenEvictionRunsMillis(500);
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(false);
dataSource.setValidationQuery("SELECT 1");
{
Connection conn = dataSource.getConnection();
// Assert.assertEquals(dataSource.getInitialSize(), driver.getConnections().size());
System.out.println("raw size : " + driver.getConnections().size());
PreparedStatement stmt = conn.prepareStatement("SELECT 1");
ResultSet rs = stmt.executeQuery();
rs.close();
stmt.close();
conn.close();
System.out.println("raw size : " + driver.getConnections().size());
}
{
Connection conn = dataSource.getConnection();
// Assert.assertEquals(dataSource.getInitialSize(), driver.getConnections().size());
System.out.println("raw size : " + driver.getConnections().size());
conn.close();
System.out.println("raw size : " + driver.getConnections().size());
}
{
int count = 4;
Connection[] connections = new Connection[4];
for (int i = 0; i < count; ++i) {
connections[i] = dataSource.getConnection();
}
System.out.println("raw size : " + driver.getConnections().size());
for (int i = 0; i < count; ++i) {
connections[i].close();
}
System.out.println("raw size : " + driver.getConnections().size());
System.out.println("----------sleep for evict");
Thread.sleep(dataSource.getMinEvictableIdleTimeMillis() * 2);
System.out.println("raw size : " + driver.getConnections().size());
}
System.out.println("----------raw close all connection");
for (MockConnection rawConn : driver.getConnections()) {
rawConn.close();
}
Thread.sleep(dataSource.getMinEvictableIdleTimeMillis() * 2);
System.out.println("raw size : " + driver.getConnections().size());
{
Connection conn = dataSource.getConnection();
System.out.println("raw size : " + driver.getConnections().size());
conn.close();
System.out.println("raw size : " + driver.getConnections().size());
}
dataSource.close();
}
}
| TestIdleForKylin |
java | apache__camel | components/camel-tika/src/test/java/org/apache/camel/component/tika/TikaEmptyConfig.java | {
"start": 1027,
"end": 1221
} | class ____ extends TikaConfig {
public TikaEmptyConfig() throws TikaException, IOException, SAXException {
super(new File("src/test/resources/tika-empty.xml"));
}
}
| TikaEmptyConfig |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/unknownobject/UnknownObjectTest.java | {
"start": 1135,
"end": 1962
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
try (
Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/unknownobject/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/unknownobject/CreateDB.sql");
}
@Test
void shouldFailBecauseThereIsAPropertyWithoutTypeHandler() throws Exception {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Assertions.assertThrows(PersistenceException.class, () -> mapper.getUser(1));
}
}
}
| UnknownObjectTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java | {
"start": 3001,
"end": 13735
} | class ____ extends AllocationDecider {
public static final String NAME = "awareness";
public static final Setting<List<String>> CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING = Setting.stringListSetting(
"cluster.routing.allocation.awareness.attributes",
Property.Dynamic,
Property.NodeScope
);
private static final String FORCE_GROUP_SETTING_PREFIX = "cluster.routing.allocation.awareness.force.";
public static final Setting<Settings> CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING = Setting.groupSetting(
FORCE_GROUP_SETTING_PREFIX,
AwarenessAllocationDecider::validateForceAwarenessSettings,
Property.Dynamic,
Property.NodeScope
);
private volatile List<String> awarenessAttributes;
private volatile Map<String, List<String>> forcedAwarenessAttributes;
public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings);
clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, this::setAwarenessAttributes);
setForcedAwarenessAttributes(CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.get(settings));
clusterSettings.addSettingsUpdateConsumer(
CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING,
this::setForcedAwarenessAttributes
);
}
private void setForcedAwarenessAttributes(Settings forceSettings) {
Map<String, List<String>> forcedAwarenessAttributes = new HashMap<>();
Map<String, Settings> forceGroups = forceSettings.getAsGroups();
for (Map.Entry<String, Settings> entry : forceGroups.entrySet()) {
List<String> aValues = entry.getValue().getAsList("values");
if (aValues.size() > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
this.forcedAwarenessAttributes = forcedAwarenessAttributes;
}
private void setAwarenessAttributes(List<String> awarenessAttributes) {
this.awarenessAttributes = awarenessAttributes;
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
final IndexMetadata indexMetadata = allocation.metadata().indexMetadata(shardRouting.index());
return underCapacity(indexMetadata, shardRouting, node, allocation, true);
}
@Override
public Decision canForceAllocateDuringReplace(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
// We need to meet the criteria for shard awareness even during a replacement so that all
// copies of a shard do not get allocated to the same host/rack/AZ, so this explicitly
// checks the awareness 'canAllocate' to ensure we don't violate that constraint.
return canAllocate(shardRouting, node, allocation);
}
@Override
public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
return underCapacity(indexMetadata, shardRouting, node, allocation, false);
}
private static final Decision YES_NOT_ENABLED = Decision.single(
Decision.Type.YES,
NAME,
"allocation awareness is not enabled, set cluster setting ["
+ CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey()
+ "] to enable it"
);
private static final Decision YES_AUTO_EXPAND_ALL = Decision.single(
Decision.Type.YES,
NAME,
"allocation awareness is ignored, this index is set to auto-expand to all nodes"
);
private static final Decision YES_ALL_MET = Decision.single(Decision.Type.YES, NAME, "node meets all awareness attribute requirements");
private Decision underCapacity(
IndexMetadata indexMetadata,
ShardRouting shardRouting,
RoutingNode node,
RoutingAllocation allocation,
boolean moveToNode
) {
if (awarenessAttributes.isEmpty()) {
return YES_NOT_ENABLED;
}
final boolean debug = allocation.debugDecision();
if (indexMetadata.getAutoExpandReplicas().expandToAllNodes()) {
return YES_AUTO_EXPAND_ALL;
}
final int shardCount = indexMetadata.getNumberOfReplicas() + 1; // 1 for primary
for (String awarenessAttribute : awarenessAttributes) {
// the node the shard exists on must be associated with an awareness attribute
if (node.node().getAttributes().containsKey(awarenessAttribute) == false) {
return debug ? debugNoMissingAttribute(awarenessAttribute, awarenessAttributes) : Decision.NO;
}
final Set<String> actualAttributeValues = allocation.routingNodes().getAttributeValues(awarenessAttribute);
final String targetAttributeValue = node.node().getAttributes().get(awarenessAttribute);
assert targetAttributeValue != null : "attribute [" + awarenessAttribute + "] missing on " + node.node();
assert actualAttributeValues.contains(targetAttributeValue)
: "attribute [" + awarenessAttribute + "] on " + node.node() + " is not in " + actualAttributeValues;
int shardsForTargetAttributeValue = 0;
// Will be the count of shards on nodes with attribute `awarenessAttribute` matching the one on `node`.
for (ShardRouting assignedShard : allocation.routingNodes().assignedShards(shardRouting.shardId())) {
if (assignedShard.started() || assignedShard.initializing()) {
// Note: this also counts relocation targets as that will be the new location of the shard.
// Relocation sources should not be counted as the shard is moving away
final RoutingNode assignedNode = allocation.routingNodes().node(assignedShard.currentNodeId());
if (targetAttributeValue.equals(assignedNode.node().getAttributes().get(awarenessAttribute))) {
shardsForTargetAttributeValue += 1;
}
}
}
if (moveToNode) {
if (shardRouting.assignedToNode()) {
final RoutingNode currentNode = allocation.routingNodes()
.node(shardRouting.relocating() ? shardRouting.relocatingNodeId() : shardRouting.currentNodeId());
if (targetAttributeValue.equals(currentNode.node().getAttributes().get(awarenessAttribute)) == false) {
shardsForTargetAttributeValue += 1;
} // else this shard is already on a node in the same zone as the target node, so moving it doesn't change the count
} else {
shardsForTargetAttributeValue += 1;
}
}
final List<String> forcedValues = forcedAwarenessAttributes.get(awarenessAttribute);
final int valueCount = forcedValues == null
? actualAttributeValues.size()
: Math.toIntExact(Stream.concat(actualAttributeValues.stream(), forcedValues.stream()).distinct().count());
final int maximumShardsPerAttributeValue = (shardCount + valueCount - 1) / valueCount; // ceil(shardCount/valueCount)
if (shardsForTargetAttributeValue > maximumShardsPerAttributeValue) {
return debug
? debugNoTooManyCopies(
shardCount,
awarenessAttribute,
node.node().getAttributes().get(awarenessAttribute),
valueCount,
actualAttributeValues.stream().sorted().collect(toList()),
forcedValues == null ? null : forcedValues.stream().sorted().collect(toList()),
shardsForTargetAttributeValue,
maximumShardsPerAttributeValue
)
: Decision.NO;
}
}
return YES_ALL_MET;
}
private static Decision debugNoTooManyCopies(
int shardCount,
String attributeName,
String attributeValue,
int numberOfAttributes,
List<String> realAttributes,
List<String> forcedAttributes,
int actualShardCount,
int maximumShardCount
) {
return Decision.single(
Decision.Type.NO,
NAME,
"there are [%d] copies of this shard and [%d] values for attribute [%s] (%s from nodes in the cluster and %s) so there "
+ "may be at most [%d] copies of this shard allocated to nodes with each value, but (including this copy) there "
+ "would be [%d] copies allocated to nodes with [node.attr.%s: %s]",
shardCount,
numberOfAttributes,
attributeName,
realAttributes,
forcedAttributes == null ? "no forced awareness" : forcedAttributes + " from forced awareness",
maximumShardCount,
actualShardCount,
attributeName,
attributeValue
);
}
private static Decision debugNoMissingAttribute(String awarenessAttribute, List<String> awarenessAttributes) {
return Decision.single(
Decision.Type.NO,
NAME,
"node does not contain the awareness attribute [%s]; required attributes cluster setting [%s=%s]",
awarenessAttribute,
CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(),
Strings.collectionToCommaDelimitedString(awarenessAttributes)
);
}
private static void validateForceAwarenessSettings(Settings forceSettings) {
final Map<String, Settings> settingGroups;
try {
settingGroups = forceSettings.getAsGroups();
} catch (SettingsException e) {
throw new IllegalArgumentException("invalid forced awareness settings with prefix [" + FORCE_GROUP_SETTING_PREFIX + "]", e);
}
for (Map.Entry<String, Settings> entry : settingGroups.entrySet()) {
final Optional<String> notValues = entry.getValue().keySet().stream().filter(s -> s.equals("values") == false).findFirst();
if (notValues.isPresent()) {
throw new IllegalArgumentException(
"invalid forced awareness setting [" + FORCE_GROUP_SETTING_PREFIX + entry.getKey() + "." + notValues.get() + "]"
);
}
}
}
}
| AwarenessAllocationDecider |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/runtime/io/PushingAsyncDataInput.java | {
"start": 2103,
"end": 2247
} | interface ____ in emitting the next element from data input.
*
* @param <T> The type encapsulated with the stream record.
*/
| used |
java | apache__dubbo | dubbo-demo/dubbo-demo-spring-boot/dubbo-demo-spring-boot-servlet/src/main/java/org/apache/dubbo/springboot/demo/servlet/ApiConsumer.java | {
"start": 1328,
"end": 5305
} | class ____ {
private static final Logger logger = LoggerFactory.getLogger(ApiConsumer.class);
public static void main(String[] args) throws InterruptedException {
ReferenceConfig<GreeterService> referenceConfig = new ReferenceConfig<>();
referenceConfig.setInterface(GreeterService.class);
referenceConfig.setCheck(false);
referenceConfig.setProtocol(CommonConstants.TRIPLE);
referenceConfig.setLazy(true);
referenceConfig.setTimeout(100000);
DubboBootstrap bootstrap = DubboBootstrap.getInstance();
bootstrap
.application(new ApplicationConfig("dubbo-demo-triple-api-consumer"))
.registry(new RegistryConfig("zookeeper://127.0.0.1:2181"))
.protocol(new ProtocolConfig(CommonConstants.TRIPLE, -1))
.reference(referenceConfig)
.start();
GreeterService greeterService = referenceConfig.get();
logger.info("dubbo referenceConfig started");
logger.info("Call sayHello");
HelloReply reply = greeterService.sayHello(buildRequest("triple"));
logger.info("sayHello reply: {}", reply.getMessage());
logger.info("Call sayHelloAsync");
CompletableFuture<String> sayHelloAsync = greeterService.sayHelloAsync("triple");
sayHelloAsync.thenAccept(value -> logger.info("sayHelloAsync reply: {}", value));
StreamObserver<HelloReply> responseObserver = new StreamObserver<HelloReply>() {
@Override
public void onNext(HelloReply reply) {
logger.info("sayHelloServerStream onNext: {}", reply.getMessage());
}
@Override
public void onError(Throwable t) {
logger.info("sayHelloServerStream onError: {}", t.getMessage());
}
@Override
public void onCompleted() {
logger.info("sayHelloServerStream onCompleted");
}
};
logger.info("Call sayHelloServerStream");
greeterService.sayHelloServerStream(buildRequest("triple"), responseObserver);
StreamObserver<HelloReply> sayHelloServerStreamNoParameterResponseObserver = new StreamObserver<HelloReply>() {
@Override
public void onNext(HelloReply reply) {
logger.info("sayHelloServerStreamNoParameter onNext: {}", reply.getMessage());
}
@Override
public void onError(Throwable t) {
logger.info("sayHelloServerStreamNoParameter onError: {}", t.getMessage());
}
@Override
public void onCompleted() {
logger.info("sayHelloServerStreamNoParameter onCompleted");
}
};
greeterService.sayHelloServerStreamNoParameter(sayHelloServerStreamNoParameterResponseObserver);
StreamObserver<HelloReply> biResponseObserver = new StreamObserver<HelloReply>() {
@Override
public void onNext(HelloReply reply) {
logger.info("biRequestObserver onNext: {}", reply.getMessage());
}
@Override
public void onError(Throwable t) {
logger.info("biResponseObserver onError: {}", t.getMessage());
}
@Override
public void onCompleted() {
logger.info("biResponseObserver onCompleted");
}
};
logger.info("Call biRequestObserver");
StreamObserver<HelloRequest> biRequestObserver = greeterService.sayHelloBiStream(biResponseObserver);
for (int i = 0; i < 5; i++) {
biRequestObserver.onNext(buildRequest("triple" + i));
}
biRequestObserver.onCompleted();
Thread.sleep(2000);
}
private static HelloRequest buildRequest(String name) {
HelloRequest request = new HelloRequest();
request.setName(name);
return request;
}
}
| ApiConsumer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.