language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ShareConsumeRequestManager.java
|
{
"start": 60693,
"end": 72771
}
|
class ____ extends TimedRequestState {
/**
* The share session handler.
*/
private final ShareSessionHandler sessionHandler;
/**
* The node to send the request to.
*/
private final int nodeId;
/**
* The map of acknowledgements to send
*/
private final Map<TopicIdPartition, Acknowledgements> acknowledgementsToSend;
/**
* The map of acknowledgements to be retried in the next attempt.
*/
private final Map<TopicIdPartition, Acknowledgements> incompleteAcknowledgements;
/**
* The in-flight acknowledgements
*/
private final Map<TopicIdPartition, Acknowledgements> inFlightAcknowledgements;
/**
* This handles completing a future when all results are known.
*/
private final ResultHandler resultHandler;
/**
* Indicates whether this was part of commitAsync, commitSync or close operation.
*/
private final AcknowledgeRequestType requestType;
/**
* Boolean to indicate if the request has been processed.
* <p>
* Set to true once we process the response and do not retry the request.
* <p>
* Initialized to false every time we build a request.
*/
private boolean isProcessed;
/**
* Timeout in milliseconds indicating how long the request would be retried if it fails with a retriable exception.
*/
private final long timeoutMs;
AcknowledgeRequestState(LogContext logContext,
String owner,
long deadlineMs,
long retryBackoffMs,
long retryBackoffMaxMs,
ShareSessionHandler sessionHandler,
int nodeId,
Map<TopicIdPartition, Acknowledgements> acknowledgementsMap,
ResultHandler resultHandler,
AcknowledgeRequestType acknowledgeRequestType) {
super(logContext, owner, retryBackoffMs, retryBackoffMaxMs, deadlineTimer(time, deadlineMs));
this.sessionHandler = sessionHandler;
this.nodeId = nodeId;
this.acknowledgementsToSend = acknowledgementsMap;
this.resultHandler = resultHandler;
this.inFlightAcknowledgements = new HashMap<>();
this.incompleteAcknowledgements = new HashMap<>();
this.requestType = acknowledgeRequestType;
this.isProcessed = false;
this.timeoutMs = remainingMs();
}
UnsentRequest buildRequest() {
// If this is the closing request, close the share session by setting the final epoch
if (isCloseRequest()) {
sessionHandler.notifyClose();
}
Map<TopicIdPartition, Acknowledgements> finalAcknowledgementsToSend = new HashMap<>(
incompleteAcknowledgements.isEmpty() ? acknowledgementsToSend : incompleteAcknowledgements);
for (Map.Entry<TopicIdPartition, Acknowledgements> entry : finalAcknowledgementsToSend.entrySet()) {
sessionHandler.addPartitionToFetch(entry.getKey(), entry.getValue());
}
ShareAcknowledgeRequest.Builder requestBuilder = sessionHandler.newShareAcknowledgeBuilder(groupId, shareFetchConfig);
isProcessed = false;
Node nodeToSend = metadata.fetch().nodeById(nodeId);
if (requestBuilder == null) {
handleAcknowledgeShareSessionNotFound();
return null;
} else if (nodeToSend != null) {
nodesWithPendingRequests.add(nodeId);
log.trace("Building acknowledgements to send : {}", finalAcknowledgementsToSend);
inFlightAcknowledgements.putAll(finalAcknowledgementsToSend);
if (incompleteAcknowledgements.isEmpty()) {
acknowledgementsToSend.clear();
} else {
incompleteAcknowledgements.clear();
}
UnsentRequest unsentRequest = new UnsentRequest(requestBuilder, Optional.of(nodeToSend));
BiConsumer<ClientResponse, Throwable> responseHandler = (clientResponse, error) -> {
if (error != null) {
handleShareAcknowledgeFailure(nodeToSend, requestBuilder.data(), this, error, unsentRequest.handler().completionTimeMs());
} else {
handleShareAcknowledgeSuccess(nodeToSend, requestBuilder.data(), this, clientResponse, unsentRequest.handler().completionTimeMs());
}
};
return unsentRequest.whenComplete(responseHandler);
}
return null;
}
int getInFlightAcknowledgementsCount(TopicIdPartition tip) {
Acknowledgements acks = inFlightAcknowledgements.get(tip);
if (acks == null) {
return 0;
} else {
return acks.size();
}
}
int getIncompleteAcknowledgementsCount(TopicIdPartition tip) {
Acknowledgements acks = incompleteAcknowledgements.get(tip);
if (acks == null) {
return 0;
} else {
return acks.size();
}
}
int getAcknowledgementsToSendCount(TopicIdPartition tip) {
Acknowledgements acks = acknowledgementsToSend.get(tip);
if (acks == null) {
return 0;
} else {
return acks.size();
}
}
boolean isEmpty() {
return acknowledgementsToSend.isEmpty() &&
incompleteAcknowledgements.isEmpty() &&
inFlightAcknowledgements.isEmpty();
}
/**
* Resets the timer with the configured timeout and resets the RequestState.
* This is only applicable for commitAsync() requests as these states could be re-used.
*/
void maybeResetTimerAndRequestState() {
if (requestType == AcknowledgeRequestType.COMMIT_ASYNC) {
resetTimeout(timeoutMs);
reset();
}
}
/**
* Sets the error code in the acknowledgements and sends the response
* through a background event.
*/
void handleAcknowledgeErrorCode(TopicIdPartition tip, Errors acknowledgeErrorCode, boolean isRenewAck, Optional<Integer> acquisitionLockTimeoutMs) {
Acknowledgements acks = inFlightAcknowledgements.remove(tip);
if (acks != null) {
acks.complete(acknowledgeErrorCode.exception());
resultHandler.complete(tip, acks, requestType, isRenewAck, acquisitionLockTimeoutMs);
} else {
log.error("Invalid partition {} received in ShareAcknowledge response", tip);
}
}
/**
* Sets the error code for the acknowledgements which were timed out
* after some retries.
*/
void handleAcknowledgeTimedOut(TopicIdPartition tip) {
Acknowledgements acks = incompleteAcknowledgements.get(tip);
if (acks != null) {
acks.complete(Errors.REQUEST_TIMED_OUT.exception());
// We do not know whether this is a renew ack, but handling the error as if it were, will ensure
// that we do not leave dangling acknowledgements
resultHandler.complete(tip, acks, requestType, true, Optional.empty());
}
}
/**
* Set the error code for all remaining acknowledgements in the event
* of a share session not found error which prevents the remaining acknowledgements from
* being sent.
*/
void handleAcknowledgeShareSessionNotFound() {
Map<TopicIdPartition, Acknowledgements> acknowledgementsMapToClear =
incompleteAcknowledgements.isEmpty() ? acknowledgementsToSend : incompleteAcknowledgements;
acknowledgementsMapToClear.forEach((tip, acks) -> {
if (acks != null) {
acks.complete(Errors.SHARE_SESSION_NOT_FOUND.exception());
}
// We do not know whether this is a renew ack, but handling the error as if it were, will ensure
// that we do not leave dangling acknowledgements
resultHandler.complete(tip, acks, requestType, true, Optional.empty());
});
acknowledgementsMapToClear.clear();
processingComplete();
}
ShareSessionHandler sessionHandler() {
return sessionHandler;
}
void processingComplete() {
// If there are any pending inFlightAcknowledgements after processing the response, we fail them with an InvalidRecordStateException.
processPendingInFlightAcknowledgements(new InvalidRecordStateException(INVALID_RESPONSE));
resultHandler.completeIfEmpty();
isProcessed = true;
maybeResetTimerAndRequestState();
}
/**
* Fail any existing in-flight acknowledgements with the given exception and clear the map.
* We also send a background event to update {@link org.apache.kafka.clients.consumer.AcknowledgementCommitCallback }
*/
private void processPendingInFlightAcknowledgements(KafkaException exception) {
if (!inFlightAcknowledgements.isEmpty()) {
inFlightAcknowledgements.forEach((partition, acknowledgements) -> {
acknowledgements.complete(exception);
// We do not know whether this is a renew ack, but handling the error as if it were, will ensure
// that we do not leave dangling acknowledgements
resultHandler.complete(partition, acknowledgements, requestType, true, Optional.empty());
});
inFlightAcknowledgements.clear();
}
}
/**
* Moves all the in-flight acknowledgements to incomplete acknowledgements to retry
* in the next request.
*/
void moveAllToIncompleteAcks() {
incompleteAcknowledgements.putAll(inFlightAcknowledgements);
inFlightAcknowledgements.clear();
}
boolean maybeExpire() {
return numAttempts > 0 && isExpired();
}
/**
* Moves the in-flight acknowledgements for a given partition to incomplete acknowledgements to retry
* in the next request.
*
* @param tip The TopicIdPartition for which we move the acknowledgements.
* @return True if the partition was sent in the request.
* <p> False if the partition was not part of the request, we log an error and ignore such partitions. </p>
*/
public boolean moveToIncompleteAcks(TopicIdPartition tip) {
Acknowledgements acks = inFlightAcknowledgements.remove(tip);
if (acks != null) {
incompleteAcknowledgements.put(tip, acks);
return true;
} else {
log.error("Invalid partition {} received in ShareAcknowledge response", tip);
return false;
}
}
public boolean isCloseRequest() {
return requestType == AcknowledgeRequestType.CLOSE;
}
}
/**
* Sends a ShareAcknowledgementEvent event to the application when it is done
* processing all the remaining acknowledgement request states.
* Also manages completing the future for synchronous acknowledgement commit by counting
* down the results as they are known and completing the future at the end.
*/
|
AcknowledgeRequestState
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DuplicateBranchesTest.java
|
{
"start": 1857,
"end": 2608
}
|
class ____ {
String f(boolean a, String b, String c) {
return a ? b : c;
}
String g(boolean a, String b, String c) {
if (a) {
return b;
} else {
return c;
}
}
String h(boolean a, String b, String c) {
if (a) {
return b;
}
return "";
}
}
""")
.doTest();
}
@Test
public void statementRefactoring() {
BugCheckerRefactoringTestHelper.newInstance(DuplicateBranches.class, getClass())
.addInputLines(
"Test.java",
"""
|
Test
|
java
|
grpc__grpc-java
|
okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/framed/FrameReader.java
|
{
"start": 1040,
"end": 6246
}
|
interface ____ {
void data(boolean inFinished, int streamId, BufferedSource source, int length, int paddedLength)
throws IOException;
/**
* Create or update incoming headers, creating the corresponding streams
* if necessary. Frames that trigger this are SPDY SYN_STREAM, HEADERS, and
* SYN_REPLY, and HTTP/2 HEADERS and PUSH_PROMISE.
*
* @param outFinished true if the receiver should not send further frames.
* @param inFinished true if the sender will not send further frames.
* @param streamId the stream owning these headers.
* @param associatedStreamId the stream that triggered the sender to create
* this stream.
*/
void headers(boolean outFinished, boolean inFinished, int streamId, int associatedStreamId,
List<io.grpc.okhttp.internal.framed.Header> headerBlock, HeadersMode headersMode);
void rstStream(int streamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode);
void settings(boolean clearPrevious, io.grpc.okhttp.internal.framed.Settings settings);
/** HTTP/2 only. */
void ackSettings();
/**
* Read a connection-level ping from the peer. {@code ack} indicates this
* is a reply. Payload parameters are different between SPDY/3 and HTTP/2.
* <p>
* In SPDY/3, only the first {@code payload1} parameter is set. If the
* reader is a client, it is an unsigned even number. Likewise, a server
* will receive an odd number.
* <p>
* In HTTP/2, both {@code payload1} and {@code payload2} parameters are
* set. The data is opaque binary, and there are no rules on the content.
*/
void ping(boolean ack, int payload1, int payload2);
/**
* The peer tells us to stop creating streams. It is safe to replay
* streams with {@code ID > lastGoodStreamId} on a new connection. In-
* flight streams with {@code ID <= lastGoodStreamId} can only be replayed
* on a new connection if they are idempotent.
*
* @param lastGoodStreamId the last stream ID the peer processed before
* sending this message. If {@code lastGoodStreamId} is zero, the peer
* processed no frames.
* @param errorCode reason for closing the connection.
* @param debugData only valid for HTTP/2; opaque debug data to send.
*/
void goAway(int lastGoodStreamId, io.grpc.okhttp.internal.framed.ErrorCode errorCode, ByteString debugData);
/**
* Notifies that an additional {@code windowSizeIncrement} bytes can be
* sent on {@code streamId}, or the connection if {@code streamId} is zero.
*/
void windowUpdate(int streamId, long windowSizeIncrement);
/**
* Called when reading a headers or priority frame. This may be used to
* change the stream's weight from the default (16) to a new value.
*
* @param streamId stream which has a priority change.
* @param streamDependency the stream ID this stream is dependent on.
* @param weight relative proportion of priority in [1..256].
* @param exclusive inserts this stream ID as the sole child of
* {@code streamDependency}.
*/
void priority(int streamId, int streamDependency, int weight, boolean exclusive);
/**
* HTTP/2 only. Receive a push promise header block.
* <p>
* A push promise contains all the headers that pertain to a server-initiated
* request, and a {@code promisedStreamId} to which response frames will be
* delivered. Push promise frames are sent as a part of the response to
* {@code streamId}.
*
* @param streamId client-initiated stream ID. Must be an odd number.
* @param promisedStreamId server-initiated stream ID. Must be an even
* number.
* @param requestHeaders minimally includes {@code :method}, {@code :scheme},
* {@code :authority}, and {@code :path}.
*/
void pushPromise(int streamId, int promisedStreamId, List<io.grpc.okhttp.internal.framed.Header> requestHeaders)
throws IOException;
/**
* HTTP/2 only. Expresses that resources for the connection or a client-
* initiated stream are available from a different network location or
* protocol configuration.
*
* <p>See <a href="http://tools.ietf.org/html/draft-ietf-httpbis-alt-svc-01">alt-svc</a>
*
* @param streamId when a client-initiated stream ID (odd number), the
* origin of this alternate service is the origin of the stream. When
* zero, the origin is specified in the {@code origin} parameter.
* @param origin when present, the
* <a href="http://tools.ietf.org/html/rfc6454">origin</a> is typically
* represented as a combination of scheme, host and port. When empty,
* the origin is that of the {@code streamId}.
* @param protocol an ALPN protocol, such as {@code h2}.
* @param host an IP address or hostname.
* @param port the IP port associated with the service.
* @param maxAge time in seconds that this alternative is considered fresh.
*/
void alternateService(int streamId, String origin, ByteString protocol, String host, int port,
long maxAge);
}
}
|
Handler
|
java
|
spring-projects__spring-framework
|
spring-orm/src/main/java/org/springframework/orm/jpa/hibernate/SharedSessionCreator.java
|
{
"start": 4045,
"end": 6349
}
|
class ____ implements InvocationHandler {
private final SessionFactory sessionFactory;
private final Supplier<Object> currentSessionSupplier;
public SharedSessionInvocationHandler(SessionFactory sessionFactory, Supplier<Object> currentSessionSupplier) {
this.sessionFactory = sessionFactory;
this.currentSessionSupplier = currentSessionSupplier;
}
@Override
public @Nullable Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
switch (method.getName()) {
case "equals" -> {
// Only consider equal when proxies are identical.
return (proxy == args[0]);
}
case "hashCode" -> {
// Use hashCode of EntityManager proxy.
return hashCode();
}
case "toString" -> {
// Deliver toString without touching a target EntityManager.
return "Shared Session proxy for target factory [" + this.sessionFactory + "]";
}
case "getSessionFactory", "getEntityManagerFactory" -> {
// JPA 2.0: return EntityManagerFactory without creating an EntityManager.
return this.sessionFactory;
}
case "getCriteriaBuilder", "getMetamodel" -> {
// JPA 2.0: return EntityManagerFactory's CriteriaBuilder/Metamodel (avoid creation of EntityManager)
try {
return SessionFactory.class.getMethod(method.getName()).invoke(this.sessionFactory);
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
}
case "unwrap" -> {
// JPA 2.0: handle unwrap method - could be a proxy match.
Class<?> targetClass = (Class<?>) args[0];
if (targetClass != null && targetClass.isInstance(proxy)) {
return proxy;
}
}
case "isOpen" -> {
// Handle isOpen method: always return true.
return true;
}
case "close" -> {
// Handle close method: suppress, not valid.
return null;
}
case "getTransaction" -> {
throw new IllegalStateException(
"Not allowed to create transaction on shared EntityManager - " +
"use Spring transactions or EJB CMT instead");
}
}
Object target = this.currentSessionSupplier.get();
try {
return method.invoke(target, args);
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
}
}
}
|
SharedSessionInvocationHandler
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/io/support/ResourcePropertySource.java
|
{
"start": 6737,
"end": 7117
}
|
class ____ of the resource plus its identity hash code.
* @see org.springframework.core.io.Resource#getDescription()
*/
private static String getNameForResource(Resource resource) {
String name = resource.getDescription();
if (!StringUtils.hasText(name)) {
name = resource.getClass().getSimpleName() + "@" + System.identityHashCode(resource);
}
return name;
}
}
|
name
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/NameMetaAttribute.java
|
{
"start": 403,
"end": 1944
}
|
class ____ implements MetaAttribute {
private final Metamodel annotationMetaEntity;
private final String name;
private final String prefix;
public NameMetaAttribute(Metamodel annotationMetaEntity, String name, String prefix) {
this.annotationMetaEntity = annotationMetaEntity;
this.name = name;
this.prefix = prefix;
}
@Override
public boolean hasTypedAttribute() {
return false;
}
@Override
public boolean hasStringAttribute() {
return true;
}
@Override
public String getAttributeDeclarationString() {
throw new UnsupportedOperationException("operation not supported");
}
@Override
public String getAttributeNameDeclarationString() {
final StringBuilder declaration = new StringBuilder();
if ( !annotationMetaEntity.isJakartaDataStyle() ) {
declaration.append( "public static final " );
}
return declaration
.append(annotationMetaEntity.importType(STRING))
.append(' ')
.append(prefix)
.append(fieldName())
.append(" = ")
.append('"')
.append(name)
.append('"')
.append(';')
.toString();
}
String fieldName() {
return nameToFieldName(name.charAt(0) == '#' ? name.substring(1) : name);
}
@Override
public String getMetaType() {
throw new UnsupportedOperationException("operation not supported");
}
@Override
public String getPropertyName() {
return name;
}
@Override
public String getTypeDeclaration() {
return "java.lang.String";
}
@Override
public Metamodel getHostingEntity() {
return annotationMetaEntity;
}
}
|
NameMetaAttribute
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 75718,
"end": 76002
}
|
enum ____ {}",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject);
assertThat(compilation)
.hadErrorContaining("can only apply to a
|
Builder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfig.java
|
{
"start": 1119,
"end": 9073
}
|
class ____ implements NlpConfig {
public static boolean validIOBTag(String label) {
return label.toUpperCase(Locale.ROOT).startsWith("I-")
|| label.toUpperCase(Locale.ROOT).startsWith("B-")
|| label.toUpperCase(Locale.ROOT).startsWith("I_")
|| label.toUpperCase(Locale.ROOT).startsWith("B_")
|| label.toUpperCase(Locale.ROOT).startsWith("O");
}
public static final String NAME = "ner";
public static NerConfig fromXContentStrict(XContentParser parser) {
return STRICT_PARSER.apply(parser, null);
}
public static NerConfig fromXContentLenient(XContentParser parser) {
return LENIENT_PARSER.apply(parser, null);
}
private static final ConstructingObjectParser<NerConfig, Void> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<NerConfig, Void> LENIENT_PARSER = createParser(true);
@SuppressWarnings({ "unchecked" })
private static ConstructingObjectParser<NerConfig, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<NerConfig, Void> parser = new ConstructingObjectParser<>(
NAME,
ignoreUnknownFields,
a -> new NerConfig((VocabularyConfig) a[0], (Tokenization) a[1], (List<String>) a[2], (String) a[3])
);
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> {
if (ignoreUnknownFields == false) {
throw ExceptionsHelper.badRequestException(
"illegal setting [{}] on inference model creation",
VOCABULARY.getPreferredName()
);
}
return VocabularyConfig.fromXContentLenient(p);
}, VOCABULARY);
parser.declareNamedObject(
ConstructingObjectParser.optionalConstructorArg(),
(p, c, n) -> p.namedObject(Tokenization.class, n, ignoreUnknownFields),
TOKENIZATION
);
parser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), CLASSIFICATION_LABELS);
parser.declareString(ConstructingObjectParser.optionalConstructorArg(), RESULTS_FIELD);
return parser;
}
private final VocabularyConfig vocabularyConfig;
private final Tokenization tokenization;
private final List<String> classificationLabels;
private final String resultsField;
public NerConfig(
@Nullable VocabularyConfig vocabularyConfig,
@Nullable Tokenization tokenization,
@Nullable List<String> classificationLabels,
@Nullable String resultsField
) {
this.vocabularyConfig = Optional.ofNullable(vocabularyConfig)
.orElse(new VocabularyConfig(InferenceIndexConstants.nativeDefinitionStore()));
this.tokenization = tokenization == null ? Tokenization.createDefault() : tokenization;
this.classificationLabels = classificationLabels == null ? Collections.emptyList() : classificationLabels;
if (this.classificationLabels.isEmpty() == false) {
List<String> badLabels = this.classificationLabels.stream().filter(l -> validIOBTag(l) == false).toList();
if (badLabels.isEmpty() == false) {
throw ExceptionsHelper.badRequestException(
"[{}] only allows IOB tokenization tagging for classification labels; provided {}",
NAME,
badLabels
);
}
if (this.classificationLabels.stream().noneMatch(l -> l.toUpperCase(Locale.ROOT).equals("O"))) {
throw ExceptionsHelper.badRequestException(
"[{}] only allows IOB tokenization tagging for classification labels; missing outside label [O]",
NAME
);
}
}
this.resultsField = resultsField;
if (this.tokenization.span != -1) {
throw ExceptionsHelper.badRequestException(
"[{}] does not support windowing long text sequences; configured span [{}]",
NAME,
this.tokenization.span
);
}
}
public NerConfig(StreamInput in) throws IOException {
vocabularyConfig = new VocabularyConfig(in);
tokenization = in.readNamedWriteable(Tokenization.class);
classificationLabels = in.readStringCollectionAsList();
resultsField = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
vocabularyConfig.writeTo(out);
out.writeNamedWriteable(tokenization);
out.writeStringCollection(classificationLabels);
out.writeOptionalString(resultsField);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(VOCABULARY.getPreferredName(), vocabularyConfig, params);
NamedXContentObjectHelper.writeNamedObject(builder, params, TOKENIZATION.getPreferredName(), tokenization);
if (classificationLabels.isEmpty() == false) {
builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels);
}
if (resultsField != null) {
builder.field(RESULTS_FIELD.getPreferredName(), resultsField);
}
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public boolean isTargetTypeSupported(TargetType targetType) {
return false;
}
@Override
public InferenceConfig apply(InferenceConfigUpdate update) {
if (update instanceof NerConfigUpdate configUpdate) {
return new NerConfig(
vocabularyConfig,
(configUpdate.getTokenizationUpdate() == null) ? tokenization : configUpdate.getTokenizationUpdate().apply(tokenization),
classificationLabels,
Optional.ofNullable(update.getResultsField()).orElse(resultsField)
);
} else if (update instanceof TokenizationConfigUpdate tokenizationUpdate) {
var updatedTokenization = getTokenization().updateWindowSettings(tokenizationUpdate.getSpanSettings());
return new NerConfig(this.vocabularyConfig, updatedTokenization, this.classificationLabels, this.resultsField);
} else {
throw incompatibleUpdateException(update.getName());
}
}
@Override
public MlConfigVersion getMinimalSupportedMlConfigVersion() {
return MlConfigVersion.V_8_0_0;
}
@Override
public TransportVersion getMinimalSupportedTransportVersion() {
return TransportVersion.minimumCompatible();
}
@Override
public String getName() {
return NAME;
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (o == null || getClass() != o.getClass()) return false;
NerConfig that = (NerConfig) o;
return Objects.equals(vocabularyConfig, that.vocabularyConfig)
&& Objects.equals(tokenization, that.tokenization)
&& Objects.equals(classificationLabels, that.classificationLabels)
&& Objects.equals(resultsField, that.resultsField);
}
@Override
public int hashCode() {
return Objects.hash(vocabularyConfig, tokenization, classificationLabels, resultsField);
}
@Override
public VocabularyConfig getVocabularyConfig() {
return vocabularyConfig;
}
@Override
public Tokenization getTokenization() {
return tokenization;
}
public List<String> getClassificationLabels() {
return classificationLabels;
}
@Override
public String getResultsField() {
return resultsField;
}
@Override
public boolean isAllocateOnly() {
return true;
}
}
|
NerConfig
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/io/support/PathMatchingResourcePatternResolver.java
|
{
"start": 12549,
"end": 12873
}
|
class ____.
* @see DefaultResourceLoader
*/
public PathMatchingResourcePatternResolver() {
this.resourceLoader = new DefaultResourceLoader();
}
/**
* Create a {@code PathMatchingResourcePatternResolver} with the supplied
* {@link ResourceLoader}.
* <p>ClassLoader access will happen via the thread context
|
loader
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/aspect/RequestLogAspect.java
|
{
"start": 1917,
"end": 8233
}
|
class ____ {
private static final String PUBLISH_CONFIG = "execution(* com.alibaba.nacos.config.server.service.ConfigOperationService.publishConfig(..))";
private static final String GET_CONFIG = "execution(* com.alibaba.nacos.config.server.service.query.ConfigQueryChainService.handle(..))";
private static final String DELETE_CONFIG = "execution(* com.alibaba.nacos.config.server.service.ConfigOperationService.deleteConfig(..))";
private static final String CONFIG_CHANGE_LISTEN_RPC =
"execution(* com.alibaba.nacos.core.remote.RequestHandler.handleRequest(..)) "
+ " && target(com.alibaba.nacos.config.server.remote.ConfigChangeBatchListenRequestHandler) && args(request,meta)";
/**
* Intercepts configuration publishing operations, records metrics, and logs client requests.
*/
@Around(PUBLISH_CONFIG)
public Object interfacePublishConfig(ProceedingJoinPoint pjp) throws Throwable {
Object[] args = pjp.getArgs();
ConfigForm configForm = (ConfigForm) args[0];
ConfigRequestInfo configRequestInfo = (ConfigRequestInfo) args[1];
String dataId = configForm.getDataId();
String group = configForm.getGroup();
String namespaceId = configForm.getNamespaceId();
String content = configForm.getContent();
String requestIp = configRequestInfo.getSrcIp();
String md5 = content == null ? null : MD5Utils.md5Hex(content, Constants.ENCODE);
MetricsMonitor.getPublishMonitor().incrementAndGet();
AtomicLong rtHolder = new AtomicLong();
Object retVal = logClientRequest("publish", pjp, dataId, group, namespaceId, requestIp, md5, rtHolder);
MetricsMonitor.getWriteConfigRtTimer().record(rtHolder.get(), TimeUnit.MILLISECONDS);
return retVal;
}
/**
* Intercepts configuration get operations, records metrics, and logs client requests.
*/
@Around(GET_CONFIG)
public Object interfaceGetConfig(ProceedingJoinPoint pjp) throws Throwable {
Object[] args = pjp.getArgs();
ConfigQueryChainRequest chainRequest = (ConfigQueryChainRequest) args[0];
String dataId = chainRequest.getDataId();
String group = chainRequest.getGroup();
String tenant = chainRequest.getTenant();
String requestIp = null;
if (chainRequest.getAppLabels() != null) {
requestIp = chainRequest.getAppLabels().getOrDefault(BetaGrayRule.CLIENT_IP_LABEL, null);
}
String groupKey = GroupKey2.getKey(dataId, group, tenant);
String md5 = ConfigCacheService.getContentMd5(groupKey);
MetricsMonitor.getConfigMonitor().incrementAndGet();
AtomicLong rtHolder = new AtomicLong();
Object retVal = logClientRequest("get", pjp, dataId, group, tenant, requestIp, md5, rtHolder);
MetricsMonitor.getReadConfigRtTimer().record(rtHolder.get(), TimeUnit.MILLISECONDS);
return retVal;
}
/**
* Deletes a configuration entry and logs the operation.
*/
@Around(DELETE_CONFIG)
public Object interfaceRemoveConfig(ProceedingJoinPoint pjp) throws Throwable {
Object[] args = pjp.getArgs();
String dataId = (String) args[0];
String group = (String) args[1];
String tenant = (String) args[2];
String clientIp = (String) args[4];
String groupKey = GroupKey2.getKey(dataId, group, tenant);
String md5 = ConfigCacheService.getContentMd5(groupKey);
MetricsMonitor.getConfigMonitor().incrementAndGet();
AtomicLong rtHolder = new AtomicLong();
Object retVal = logClientRequest("delete", pjp, dataId, group, tenant, clientIp, md5, rtHolder);
MetricsMonitor.getReadConfigRtTimer().record(rtHolder.get(), TimeUnit.MILLISECONDS);
return retVal;
}
/**
* Client api request log rt | status | requestIp | opType | dataId | group | datumId | md5.
*/
private Object logClientRequest(String requestType, ProceedingJoinPoint pjp, String dataId, String group,
String tenant, String requestIp, String md5, AtomicLong rtHolder) throws Throwable {
long startTime = System.currentTimeMillis();
try {
Object retVal = pjp.proceed();
long rt = System.currentTimeMillis() - startTime;
if (rtHolder != null) {
rtHolder.set(rt);
}
LogUtil.CLIENT_LOG.info(
"opType: {} | rt: {}ms | status: success | requestIp: {} | dataId: {} | group: {} | tenant: {} | md5: {}",
requestType, rt, requestIp, dataId, group, tenant, md5);
return retVal;
} catch (Throwable e) {
long rt = System.currentTimeMillis() - startTime;
if (rtHolder != null) {
rtHolder.set(rt);
}
LogUtil.CLIENT_LOG.error(
"opType: {} | rt: {}ms | status: failure | requestIp: {} | dataId: {} | group: {} | tenant: {} | md5: {}",
requestType, rt, requestIp, dataId, group, tenant, md5);
throw e;
}
}
/**
* Handles configuration change listening requests.
*/
@Around(CONFIG_CHANGE_LISTEN_RPC)
public Object interfaceListenConfigRpc(ProceedingJoinPoint pjp, ConfigBatchListenRequest request, RequestMeta meta)
throws Throwable {
MetricsMonitor.getConfigMonitor().incrementAndGet();
final String requestIp = meta.getClientIp();
String appName = request.getHeader(RequestUtil.CLIENT_APPNAME_HEADER);
final long st = System.currentTimeMillis();
Response retVal = (Response) pjp.proceed();
final long rt = System.currentTimeMillis() - st;
LogUtil.CLIENT_LOG.info(
"opType: {} | rt: {}ms | status: {} | requestIp: {} | listenSize: {} | listenOrCancel: {} | appName: {}",
"listen", rt, retVal.isSuccess() ? retVal.getResultCode() : retVal.getErrorCode(), requestIp,
request.getConfigListenContexts().size(), request.isListen(), appName);
return retVal;
}
}
|
RequestLogAspect
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/value/spi/CustomErroneousEnumMappingStrategy.java
|
{
"start": 482,
"end": 1711
}
|
class ____ extends DefaultEnumMappingStrategy implements EnumMappingStrategy {
@Override
public String getDefaultNullEnumConstant(TypeElement enumType) {
if ( isCustomEnum( enumType ) ) {
return "INCORRECT";
}
return super.getDefaultNullEnumConstant( enumType );
}
@Override
public String getEnumConstant(TypeElement enumType, String enumConstant) {
if ( isCustomEnum( enumType ) ) {
return getCustomEnumConstant( enumConstant );
}
return super.getEnumConstant( enumType, enumConstant );
}
protected String getCustomEnumConstant(String enumConstant) {
if ( "UNRECOGNIZED".equals( enumConstant ) || "UNSPECIFIED".equals( enumConstant ) ) {
return MappingConstantsGem.NULL;
}
return enumConstant.replace( "CUSTOM_", "" );
}
protected boolean isCustomEnum(TypeElement enumType) {
for ( TypeMirror enumTypeInterface : enumType.getInterfaces() ) {
if ( typeUtils.asElement( enumTypeInterface ).getSimpleName().contentEquals( "CustomEnumMarker" ) ) {
return true;
}
}
return false;
}
}
|
CustomErroneousEnumMappingStrategy
|
java
|
apache__avro
|
lang/java/ipc/src/test/java/org/apache/avro/SimpleException.java
|
{
"start": 993,
"end": 1124
}
|
class ____ extends Exception {
SimpleException() {
}
SimpleException(String message) {
super(message);
}
}
|
SimpleException
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/FileRollbackOnCompletionTest.java
|
{
"start": 2292,
"end": 4859
}
|
class ____ {
public String createMail(String order) {
return "Order confirmed: " + order;
}
public void sendMail(String body, @Header("to") String to) {
// simulate fatal error if we refer to a special no
if (to.equals("FATAL")) {
throw new IllegalArgumentException("Simulated fatal error");
}
}
}
@Test
public void testOk() throws Exception {
template.sendBodyAndHeader("direct:confirm", "bumper", "to", "someone@somewhere.org");
try (Stream<Path> list = Files.list(testDirectory())) {
long files = list.count();
assertEquals(1, files, "There should be one file");
}
}
@Test
public void testRollback() throws Exception {
try {
template.sendBodyAndHeader("direct:confirm", "bumper", "to", "FATAL");
fail("Should have thrown an exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertEquals("Simulated fatal error", e.getCause().getMessage());
}
oneExchangeDone.matchesWaitTime();
// onCompletion is async so we gotta wait a bit for the file to be
// deleted
assertTrue(LATCH.await(5, TimeUnit.SECONDS), "Should countdown the latch");
try (Stream<Path> list = Files.list(testDirectory())) {
long files = list.count();
assertEquals(0, files, "There should be no files");
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:confirm")
// use a route scoped onCompletion to be executed when the
// Exchange failed
.onCompletion().onFailureOnly()
// and call the onFailure method on this bean
.bean(FileRollback.class, "onFailure")
// must use end to denote the end of the onCompletion route
.end()
// here starts the regular route
.bean(OrderService.class, "createMail").log("Saving mail backup file")
.to(fileUri()).log("Trying to send mail to ${header.to}")
.bean(OrderService.class, "sendMail").log("Mail send to ${header.to}");
}
};
}
}
|
OrderService
|
java
|
apache__camel
|
components/camel-vertx/camel-vertx/src/main/java/org/apache/camel/component/vertx/VertxProducer.java
|
{
"start": 1332,
"end": 3110
}
|
class ____ extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(VertxProducer.class);
public VertxProducer(VertxEndpoint endpoint) {
super(endpoint);
}
@Override
public VertxEndpoint getEndpoint() {
return (VertxEndpoint) super.getEndpoint();
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
EventBus eventBus = getEndpoint().getEventBus();
if (eventBus == null) {
exchange.setException(new IllegalStateException("EventBus is not started or not configured"));
callback.done(true);
return true;
}
String address = getEndpoint().getAddress();
boolean reply = ExchangeHelper.isOutCapable(exchange);
boolean pubSub = getEndpoint().isPubSub();
Object body = exchange.getMessage().getBody();
if (body != null) {
if (reply) {
LOG.debug("Sending to: {} with body: {}", address, body);
eventBus.request(address, body, new CamelReplyHandler(exchange, callback));
return false;
} else {
if (pubSub) {
LOG.debug("Publishing to: {} with body: {}", address, body);
eventBus.publish(address, body);
} else {
LOG.debug("Sending to: {} with body: {}", address, body);
eventBus.send(address, body);
}
callback.done(true);
return true;
}
}
exchange.setException(new InvalidPayloadRuntimeException(exchange, String.class));
callback.done(true);
return true;
}
private static final
|
VertxProducer
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/tofix/ObjectIdWithBuilder1496Test.java
|
{
"start": 587,
"end": 804
}
|
class ____ extends DatabindTestUtil {
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
@JsonDeserialize(builder = POJOBuilder.class)
static
|
ObjectIdWithBuilder1496Test
|
java
|
apache__kafka
|
storage/src/test/java/org/apache/kafka/storage/internals/log/LogSegmentsTest.java
|
{
"start": 1801,
"end": 12111
}
|
class ____ {
private final TopicPartition topicPartition = new TopicPartition("topic", 0);
private static File logDir = null;
/* create a segment with the given base offset */
private static LogSegment createSegment(Long offset) throws IOException {
return spy(LogTestUtils.createSegment(offset, logDir, 10, Time.SYSTEM));
}
@BeforeEach
public void setup() {
logDir = TestUtils.tempDirectory();
}
@AfterEach
public void teardown() throws IOException {
Utils.delete(logDir);
}
private void assertEntry(LogSegment segment, Map.Entry<Long, LogSegment> tested) {
assertEquals(segment.baseOffset(), tested.getKey());
assertEquals(segment, tested.getValue());
}
@Test
public void testBasicOperations() throws IOException {
long offset1 = 40;
long offset2 = 80;
try (LogSegments segments = new LogSegments(topicPartition);
LogSegment seg1 = createSegment(offset1);
LogSegment seg2 = createSegment(offset2);
LogSegment seg3 = createSegment(offset1)) {
assertTrue(segments.isEmpty());
assertFalse(segments.nonEmpty());
// Add seg1
segments.add(seg1);
assertFalse(segments.isEmpty());
assertTrue(segments.nonEmpty());
assertEquals(1, segments.numberOfSegments());
assertTrue(segments.contains(offset1));
assertEquals(Optional.of(seg1), segments.get(offset1));
// Add seg2
segments.add(seg2);
assertFalse(segments.isEmpty());
assertTrue(segments.nonEmpty());
assertEquals(2, segments.numberOfSegments());
assertTrue(segments.contains(offset2));
assertEquals(Optional.of(seg2), segments.get(offset2));
// Replace seg1 with seg3
segments.add(seg3);
assertFalse(segments.isEmpty());
assertTrue(segments.nonEmpty());
assertEquals(2, segments.numberOfSegments());
assertTrue(segments.contains(offset1));
assertEquals(Optional.of(seg3), segments.get(offset1));
// Remove seg2
segments.remove(offset2);
assertFalse(segments.isEmpty());
assertTrue(segments.nonEmpty());
assertEquals(1, segments.numberOfSegments());
assertFalse(segments.contains(offset2));
// Clear all segments including seg3
segments.clear();
assertTrue(segments.isEmpty());
assertFalse(segments.nonEmpty());
assertEquals(0, segments.numberOfSegments());
assertFalse(segments.contains(offset1));
}
}
@Test
public void testSegmentAccess() throws IOException {
try (LogSegments segments = new LogSegments(topicPartition)) {
long offset1 = 1;
LogSegment seg1 = createSegment(offset1);
long offset2 = 2;
LogSegment seg2 = createSegment(offset2);
long offset3 = 3;
LogSegment seg3 = createSegment(offset3);
long offset4 = 4;
LogSegment seg4 = createSegment(offset4);
// Test firstEntry, lastEntry
List<LogSegment> segmentList = List.of(seg1, seg2, seg3, seg4);
for (LogSegment seg : segmentList) {
segments.add(seg);
assertEntry(seg1, segments.firstEntry().get());
assertEquals(Optional.of(seg1), segments.firstSegment());
assertEquals(OptionalLong.of(1), segments.firstSegmentBaseOffset());
assertEntry(seg, segments.lastEntry().get());
assertEquals(Optional.of(seg), segments.lastSegment());
}
// Test baseOffsets
assertEquals(List.of(offset1, offset2, offset3, offset4), segments.baseOffsets());
// Test values
assertEquals(List.of(seg1, seg2, seg3, seg4), new ArrayList<>(segments.values()));
// Test values(to, from)
assertThrows(IllegalArgumentException.class, () -> segments.values(2, 1));
assertEquals(List.of(), segments.values(1, 1));
assertEquals(List.of(seg1), new ArrayList<>(segments.values(1, 2)));
assertEquals(List.of(seg1, seg2), new ArrayList<>(segments.values(1, 3)));
assertEquals(List.of(seg1, seg2, seg3), new ArrayList<>(segments.values(1, 4)));
assertEquals(List.of(seg2, seg3), new ArrayList<>(segments.values(2, 4)));
assertEquals(List.of(seg3), new ArrayList<>(segments.values(3, 4)));
assertEquals(List.of(), new ArrayList<>(segments.values(4, 4)));
assertEquals(List.of(seg4), new ArrayList<>(segments.values(4, 5)));
// Test activeSegment
assertEquals(seg4, segments.activeSegment());
// Test nonActiveLogSegmentsFrom
assertEquals(List.of(seg2, seg3), new ArrayList<>(segments.nonActiveLogSegmentsFrom(2)));
assertEquals(List.of(), new ArrayList<>(segments.nonActiveLogSegmentsFrom(4)));
}
}
@Test
public void testClosestMatchOperations() throws IOException {
try (LogSegments segments = new LogSegments(topicPartition)) {
LogSegment seg1 = createSegment(1L);
LogSegment seg2 = createSegment(3L);
LogSegment seg3 = createSegment(5L);
LogSegment seg4 = createSegment(7L);
List.of(seg1, seg2, seg3, seg4).forEach(segments::add);
// Test floorSegment
assertEquals(Optional.of(seg1), segments.floorSegment(2));
assertEquals(Optional.of(seg2), segments.floorSegment(3));
// Test lowerSegment
assertEquals(Optional.of(seg1), segments.lowerSegment(3));
assertEquals(Optional.of(seg2), segments.lowerSegment(4));
// Test higherSegment, higherEntry
assertEquals(Optional.of(seg3), segments.higherSegment(4));
assertEntry(seg3, segments.higherEntry(4).get());
assertEquals(Optional.of(seg4), segments.higherSegment(5));
assertEntry(seg4, segments.higherEntry(5).get());
}
}
@Test
public void testHigherSegments() throws IOException {
try (LogSegments segments = new LogSegments(topicPartition)) {
LogSegment seg1 = createSegment(1L);
LogSegment seg2 = createSegment(3L);
LogSegment seg3 = createSegment(5L);
LogSegment seg4 = createSegment(7L);
LogSegment seg5 = createSegment(9L);
List.of(seg1, seg2, seg3, seg4, seg5).forEach(segments::add);
// higherSegments(0) should return all segments in order
{
final Iterator<LogSegment> iterator = segments.higherSegments(0).iterator();
List.of(seg1, seg2, seg3, seg4, seg5).forEach(segment -> {
assertTrue(iterator.hasNext());
assertEquals(segment, iterator.next());
});
assertFalse(iterator.hasNext());
}
// higherSegments(1) should return all segments in order except seg1
{
final Iterator<LogSegment> iterator = segments.higherSegments(1).iterator();
List.of(seg2, seg3, seg4, seg5).forEach(segment -> {
assertTrue(iterator.hasNext());
assertEquals(segment, iterator.next());
});
assertFalse(iterator.hasNext());
}
// higherSegments(8) should return only seg5
{
final Iterator<LogSegment> iterator = segments.higherSegments(8).iterator();
assertTrue(iterator.hasNext());
assertEquals(seg5, iterator.next());
assertFalse(iterator.hasNext());
}
// higherSegments(9) should return no segments
{
final Iterator<LogSegment> iterator = segments.higherSegments(9).iterator();
assertFalse(iterator.hasNext());
}
}
}
@Test
public void testSizeForLargeLogs() throws IOException {
try (LogSegment logSegment = mock(LogSegment.class)) {
long largeSize = (long) Integer.MAX_VALUE * 2;
when(logSegment.size()).thenReturn(Integer.MAX_VALUE);
assertEquals(Integer.MAX_VALUE, LogSegments.sizeInBytes(List.of(logSegment)));
assertEquals(largeSize, LogSegments.sizeInBytes(List.of(logSegment, logSegment)));
assertTrue(LogSegments.sizeInBytes(List.of(logSegment, logSegment)) > Integer.MAX_VALUE);
try (LogSegments logSegments = new LogSegments(topicPartition)) {
logSegments.add(logSegment);
assertEquals(Integer.MAX_VALUE, logSegments.sizeInBytes());
}
}
}
@Test
public void testUpdateDir() throws IOException {
try (LogSegment seg1 = createSegment(1L);
LogSegments segments = new LogSegments(topicPartition)) {
segments.add(seg1);
File newDir = TestUtils.tempDirectory();
segments.updateParentDir(newDir);
assertEquals(newDir, seg1.log().file().getParentFile());
assertEquals(newDir, seg1.timeIndexFile().getParentFile());
assertEquals(newDir, seg1.offsetIndexFile().getParentFile());
assertEquals(newDir, seg1.txnIndex().file().getParentFile());
Utils.delete(newDir);
}
}
@Test
public void testCloseClosesAllLogSegmentsOnExceptionWhileClosingOne() throws IOException {
LogSegment seg1 = createSegment(0L);
LogSegment seg2 = createSegment(100L);
LogSegment seg3 = createSegment(200L);
LogSegments segments = new LogSegments(topicPartition);
segments.add(seg1);
segments.add(seg2);
segments.add(seg3);
doThrow(new IOException("Failure")).when(seg2).close();
assertThrows(IOException.class, segments::close, "Expected IOException to be thrown");
verify(seg1).close();
verify(seg2).close();
verify(seg3).close();
}
}
|
LogSegmentsTest
|
java
|
apache__camel
|
components/camel-aws/camel-aws2-s3/src/test/java/org/apache/camel/component/aws2/s3/integration/S3ConsumerIncludeBodyIT.java
|
{
"start": 1336,
"end": 3367
}
|
class ____ extends Aws2S3Base {
@EndpointInject
private ProducerTemplate template;
@EndpointInject("mock:result")
private MockEndpoint result;
@Test
public void sendIn() throws Exception {
result.expectedMessageCount(3);
template.send("direct:putObject", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "test.txt");
exchange.getIn().setBody("Test");
}
});
template.send("direct:putObject", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "test1.txt");
exchange.getIn().setBody("Test1");
}
});
template.send("direct:putObject", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader(AWS2S3Constants.KEY, "test2.txt");
exchange.getIn().setBody("Test2");
}
});
MockEndpoint.assertIsSatisfied(context);
assertEquals(3, result.getExchanges().size());
assertNotNull(result.getExchanges().get(0).getMessage().getBody());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String awsEndpoint = "aws2-s3://" + name.get() + "?autoCreateBucket=true";
from("direct:putObject").startupOrder(1).to(awsEndpoint).to("mock:result");
from("aws2-s3://" + name.get()
+ "?moveAfterRead=true&destinationBucket=camel-kafka-connector&autoCreateBucket=true&destinationBucketPrefix=RAW(movedPrefix)&destinationBucketSuffix=RAW(movedSuffix)&includeBody=false")
.startupOrder(2).to("mock:result");
}
};
}
}
|
S3ConsumerIncludeBodyIT
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/ConnectionBuilderTests.java
|
{
"start": 867,
"end": 7240
}
|
class ____ extends SqlCliTestCase {
public void testDefaultConnection() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal);
boolean binaryCommunication = random().nextBoolean();
ConnectionConfiguration con = connectionBuilder.buildConnection(null, null, binaryCommunication);
assertNull(con.authUser());
assertNull(con.authPass());
assertEquals("http://localhost:9200/", con.connectionString());
assertEquals(URI.create("http://localhost:9200/"), con.baseUri());
assertEquals(30000, con.connectTimeout());
assertEquals(60000, con.networkTimeout());
assertEquals(45000, con.pageTimeout());
assertEquals(90000, con.queryTimeout());
assertEquals(1000, con.pageSize());
assertEquals(binaryCommunication, con.binaryCommunication());
verifyNoMoreInteractions(testTerminal);
}
public void testBasicConnection() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal);
ConnectionConfiguration con = buildConnection(connectionBuilder, "http://foobar:9242/", null);
assertNull(con.authUser());
assertNull(con.authPass());
assertEquals("http://foobar:9242/", con.connectionString());
assertEquals(URI.create("http://foobar:9242/"), con.baseUri());
verifyNoMoreInteractions(testTerminal);
}
public void testUserAndPasswordConnection() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal);
ConnectionConfiguration con = buildConnection(connectionBuilder, "http://user:pass@foobar:9242/", null);
assertEquals("user", con.authUser());
assertEquals("pass", con.authPass());
assertEquals("http://user:pass@foobar:9242/", con.connectionString());
assertEquals(URI.create("http://foobar:9242/"), con.baseUri());
verifyNoMoreInteractions(testTerminal);
}
public void testAskUserForPassword() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
when(testTerminal.readPassword("password: ")).thenReturn("password");
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal);
ConnectionConfiguration con = buildConnection(connectionBuilder, "http://user@foobar:9242/", null);
assertEquals("user", con.authUser());
assertEquals("password", con.authPass());
assertEquals("http://user@foobar:9242/", con.connectionString());
assertEquals(URI.create("http://foobar:9242/"), con.baseUri());
verify(testTerminal, times(1)).readPassword(any());
verifyNoMoreInteractions(testTerminal);
}
public void testAskUserForPasswordAndKeystorePassword() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
when(testTerminal.readPassword("keystore password: ")).thenReturn("keystore password");
when(testTerminal.readPassword("password: ")).thenReturn("password");
AtomicBoolean called = new AtomicBoolean(false);
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal) {
@Override
protected void checkIfExists(String name, Path p) {
// Stubbed so we don't need permission to read the file
}
@Override
protected ConnectionConfiguration newConnectionConfiguration(URI uri, String connectionString, Properties properties) {
// Stub building the actual configuration because we don't have permission to read the keystore.
assertEquals("true", properties.get(SslConfig.SSL));
assertEquals("keystore_location", properties.get(SslConfig.SSL_KEYSTORE_LOCATION));
assertEquals("keystore password", properties.get(SslConfig.SSL_KEYSTORE_PASS));
assertEquals("keystore_location", properties.get(SslConfig.SSL_TRUSTSTORE_LOCATION));
assertEquals("keystore password", properties.get(SslConfig.SSL_TRUSTSTORE_PASS));
called.set(true);
return null;
}
};
assertNull(buildConnection(connectionBuilder, "https://user@foobar:9242/", "keystore_location"));
assertTrue(called.get());
verify(testTerminal, times(2)).readPassword(any());
verifyNoMoreInteractions(testTerminal);
}
public void testUserGaveUpOnPassword() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
UserException ue = new UserException(random().nextInt(), randomAlphaOfLength(5));
when(testTerminal.readPassword("password: ")).thenThrow(ue);
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal);
UserException actual = expectThrows(
UserException.class,
() -> buildConnection(connectionBuilder, "http://user@foobar:9242/", null)
);
assertSame(actual, ue);
}
public void testUserGaveUpOnKeystorePassword() throws Exception {
CliTerminal testTerminal = mock(CliTerminal.class);
UserException ue = new UserException(random().nextInt(), randomAlphaOfLength(5));
when(testTerminal.readPassword("keystore password: ")).thenThrow(ue);
when(testTerminal.readPassword("password: ")).thenReturn("password");
ConnectionBuilder connectionBuilder = new ConnectionBuilder(testTerminal) {
@Override
protected void checkIfExists(String name, Path p) {
// Stubbed so we don't need permission to read the file
}
};
UserException actual = expectThrows(
UserException.class,
() -> buildConnection(connectionBuilder, "https://user@foobar:9242/", "keystore_location")
);
assertSame(actual, ue);
}
private ConnectionConfiguration buildConnection(ConnectionBuilder builder, String connectionStringArg, String keystoreLocation)
throws UserException {
return builder.buildConnection(connectionStringArg, keystoreLocation, randomBoolean());
}
}
|
ConnectionBuilderTests
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedTransformerRegistryTest.java
|
{
"start": 5427,
"end": 5603
}
|
class ____ extends Transformer {
@Override
public void transform(Message message, DataType from, DataType to) {
// empty
}
}
}
|
MyTransformer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/persistenceunit/TwoPersistenceUnits2LCDisabledEnabledTest.java
|
{
"start": 2843,
"end": 2890
}
|
class ____ {
@Id
private Long id;
}
}
|
AnEntity
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/RetriableStream.java
|
{
"start": 50731,
"end": 51001
}
|
class ____ {
private final AtomicLong bufferUsed = new AtomicLong();
@VisibleForTesting
long addAndGet(long newBytesUsed) {
return bufferUsed.addAndGet(newBytesUsed);
}
}
/**
* Used for retry throttling.
*/
static final
|
ChannelBufferMeter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/net/PeerServer.java
|
{
"start": 1032,
"end": 2402
}
|
interface ____ extends Closeable {
/**
* Set the receive buffer size of the PeerServer.
*
* @param size The receive buffer size.
*/
public void setReceiveBufferSize(int size) throws IOException;
/**
* Get the receive buffer size of the PeerServer.
*
* @return The receive buffer size.
*/
int getReceiveBufferSize() throws IOException;
/**
* Listens for a connection to be made to this server and accepts
* it. The method blocks until a connection is made.
*
* @exception IOException if an I/O error occurs when waiting for a
* connection.
* @exception SecurityException if a security manager exists and its
* <code>checkAccept</code> method doesn't allow the operation.
* @exception SocketTimeoutException if a timeout was previously set and
* the timeout has been reached.
*/
public Peer accept() throws IOException, SocketTimeoutException;
/**
* @return A string representation of the address we're
* listening on.
*/
public String getListeningString();
/**
* Free the resources associated with this peer server.
* This normally includes sockets, etc.
*
* @throws IOException If there is an error closing the PeerServer
*/
public void close() throws IOException;
}
|
PeerServer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohashFromFieldDocValuesAndLiteralEvaluator.java
|
{
"start": 3472,
"end": 4279
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory encoded;
private final int precision;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory encoded, int precision) {
this.source = source;
this.encoded = encoded;
this.precision = precision;
}
@Override
public StGeohashFromFieldDocValuesAndLiteralEvaluator get(DriverContext context) {
return new StGeohashFromFieldDocValuesAndLiteralEvaluator(source, encoded.get(context), precision, context);
}
@Override
public String toString() {
return "StGeohashFromFieldDocValuesAndLiteralEvaluator[" + "encoded=" + encoded + ", precision=" + precision + "]";
}
}
}
|
Factory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/metrics/HistoBackedMaxAggregator.java
|
{
"start": 1383,
"end": 4090
}
|
class ____ extends NumericMetricsAggregator.SingleValue {
private final HistogramValuesSource.Histogram valuesSource;
final DocValueFormat formatter;
DoubleArray maxes;
public HistoBackedMaxAggregator(
String name,
ValuesSourceConfig config,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
assert config.hasValues();
this.valuesSource = (HistogramValuesSource.Histogram) config.getValuesSource();
maxes = bigArrays().newDoubleArray(1, false);
maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY);
this.formatter = config.format();
}
@Override
public ScoreMode scoreMode() {
return valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, final LeafBucketCollector sub) throws IOException {
final HistogramValues values = valuesSource.getHistogramValues(aggCtx.getLeafReaderContext());
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (bucket >= maxes.size()) {
long from = maxes.size();
maxes = bigArrays().grow(maxes, bucket + 1);
maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY);
}
if (values.advanceExact(doc)) {
final HistogramValue sketch = values.histogram();
while (sketch.next()) {
double value = sketch.value();
double max = maxes.get(bucket);
max = Math.max(max, value);
maxes.set(bucket, max);
}
}
}
};
}
@Override
public double metric(long owningBucketOrd) {
if (owningBucketOrd >= maxes.size()) {
return Double.NEGATIVE_INFINITY;
}
return maxes.get(owningBucketOrd);
}
@Override
public InternalAggregation buildAggregation(long bucket) {
if (bucket >= maxes.size()) {
return buildEmptyAggregation();
}
return new Max(name, maxes.get(bucket), formatter, metadata());
}
@Override
public InternalAggregation buildEmptyAggregation() {
return Max.createEmptyMax(name, formatter, metadata());
}
@Override
public void doClose() {
Releasables.close(maxes);
}
}
|
HistoBackedMaxAggregator
|
java
|
square__javapoet
|
src/test/java/com/squareup/javapoet/JavaFileTest.java
|
{
"start": 6897,
"end": 7343
}
|
class ____ {\n"
+ " void main() {\n"
+ " out.println(\"hello\");\n"
+ " }\n"
+ "}\n");
}
@Test public void importStaticNone() {
assertThat(JavaFile.builder("readme", importStaticTypeSpec("Util"))
.build().toString()).isEqualTo(""
+ "package readme;\n"
+ "\n"
+ "import java.lang.System;\n"
+ "import java.util.concurrent.TimeUnit;\n"
+ "\n"
+ "
|
Taco
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/authentication/dao/MockUserCache.java
|
{
"start": 871,
"end": 1288
}
|
class ____ implements UserCache {
private Map<String, UserDetails> cache = new HashMap<>();
@Override
public UserDetails getUserFromCache(String username) {
return this.cache.get(username);
}
@Override
public void putUserInCache(UserDetails user) {
this.cache.put(user.getUsername(), user);
}
@Override
public void removeUserFromCache(String username) {
this.cache.remove(username);
}
}
|
MockUserCache
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/typebasedtypehandlerresolution/CsvTypeHandler.java
|
{
"start": 1145,
"end": 2938
}
|
class ____ extends BaseTypeHandler<Object> {
private final Type type;
public CsvTypeHandler(Type type) {
super();
this.type = type;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Object parameter, JdbcType jdbcType)
throws SQLException {
// test if the parameter matches 'type'
if (parameter instanceof List) {
Class<?> elementClass = (Class<?>) ((ParameterizedType) type).getActualTypeArguments()[0];
if (String.class.equals(elementClass)) {
ps.setString(i, String.join(",", (List) parameter));
} else if (Integer.class.equals(elementClass)) {
ps.setString(i, (String) ((List) parameter).stream().map(String::valueOf).collect(Collectors.joining(",")));
}
}
}
@Override
public Object getNullableResult(ResultSet rs, String columnName) throws SQLException {
String str = rs.getString(columnName);
if (str == null) {
return null;
}
if (type instanceof ParameterizedType) {
Type argType = ((ParameterizedType) type).getActualTypeArguments()[0];
if (argType instanceof Class) {
if (String.class.equals(argType)) {
return Arrays.asList(str.split(","));
} else if (Integer.class.equals(argType)) {
return Stream.of(str.split(",")).map(Integer::valueOf).collect(Collectors.toList());
}
}
}
return null;
}
@Override
public Object getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public Object getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
// TODO Auto-generated method stub
return null;
}
}
|
CsvTypeHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/override/InheritedAttributeOverridingTest.java
|
{
"start": 2570,
"end": 2887
}
|
class ____ {
private Integer id;
private String name;
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Entity( name = "D" )
public static
|
C
|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/main/java/io/quarkus/test/junit/AbstractJvmQuarkusTestExtension.java
|
{
"start": 10344,
"end": 10513
}
|
class ____ use for mapping is " + TestConfig.class.getClassLoader());
String message = isVSCode || isMaybeVSCode
? "Could not execute test
|
we
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase.java
|
{
"start": 901,
"end": 1940
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldNotStartWithIgnoringCase}</code>.
*
* @param actual the actual value in the failed assertion.
* @param expected the value or sequence of values that {@code actual} is expected not to start with, ignoring case.
* @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotStartWithIgnoringCase(CharSequence actual, CharSequence expected,
ComparisonStrategy comparisonStrategy) {
return new ShouldNotStartWithIgnoringCase(actual, expected, comparisonStrategy);
}
private ShouldNotStartWithIgnoringCase(Object actual, Object expected, ComparisonStrategy comparisonStrategy) {
super("%nExpecting actual:%n %s%nnot to start with (ignoring case):%n %s%n%s", actual, expected, comparisonStrategy);
}
}
|
ShouldNotStartWithIgnoringCase
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/nestedbeans/unmappable/erroneous/UnmappableSourceDeepNestingMapper.java
|
{
"start": 606,
"end": 680
}
|
class ____ extends BaseDeepNestingMapper {
}
|
UnmappableSourceDeepNestingMapper
|
java
|
spring-projects__spring-framework
|
spring-aop/src/test/java/org/springframework/aop/support/ControlFlowPointcutTests.java
|
{
"start": 8576,
"end": 8824
}
|
class ____ {
int getAge(ITestBean proxy) {
return proxy.getAge();
}
int nomatch(ITestBean proxy) {
return proxy.getAge();
}
void set(ITestBean proxy) {
proxy.setAge(5);
}
}
@SuppressWarnings("serial")
private static
|
MyComponent
|
java
|
spring-projects__spring-boot
|
module/spring-boot-elasticsearch/src/main/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchClientConfigurations.java
|
{
"start": 2256,
"end": 2429
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(JsonpMapper.class)
@ConditionalOnClass(JsonMapper.class)
static
|
JsonpMapperConfiguration
|
java
|
spring-projects__spring-framework
|
spring-aop/src/test/java/org/springframework/aop/aspectj/annotation/AbstractAspectJAdvisorFactoryTests.java
|
{
"start": 24102,
"end": 24474
}
|
class ____ {
@SuppressWarnings("unused")
private final ITestBean fieldThatShouldBeIgnoredBySpringAtAspectJProcessing = new TestBean();
@Around("org.springframework.aop.aspectj.annotation.AbstractAspectJAdvisorFactoryTests.CommonPointcuts.getAge()()")
int changeReturnValue(ProceedingJoinPoint pjp) {
return -1;
}
}
@Aspect
static
|
NamedPointcutAspectWithFQN
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/client/JdbcRegisteredClientRepository.java
|
{
"start": 22709,
"end": 22954
}
|
class ____ {
private static JsonMapper createJsonMapper() {
List<JacksonModule> modules = SecurityJacksonModules.getModules(Jackson3.class.getClassLoader());
return JsonMapper.builder().addModules(modules).build();
}
}
static
|
Jackson3
|
java
|
apache__flink
|
flink-test-utils-parent/flink-test-utils-junit/src/main/java/org/apache/flink/testutils/junit/extensions/ContextClassLoaderExtension.java
|
{
"start": 2857,
"end": 4828
}
|
class ____ {
private final Collection<ServiceEntry> serviceEntries = new ArrayList<>();
public ContextClassLoaderExtensionBuilder withServiceEntry(
Class<?> serviceClass, String... serviceImplementations) {
serviceEntries.add(new ServiceEntry(serviceClass, serviceImplementations));
return this;
}
public ContextClassLoaderExtension build() {
final Function<TemporaryFolder, URLClassLoader> factory =
temporaryFolder -> setupClassLoader(temporaryFolder, serviceEntries);
return new ContextClassLoaderExtension(factory);
}
private ContextClassLoaderExtensionBuilder() {}
}
private static URLClassLoader setupClassLoader(
TemporaryFolder temporaryFolder, Collection<ServiceEntry> serviceEntries) {
final Path root = temporaryFolder.getRoot().toPath();
try {
writeServiceEntries(root, serviceEntries);
final URL url = temporaryFolder.getRoot().toURI().toURL();
return new URLClassLoader(
new URL[] {url}, ContextClassLoaderExtension.class.getClassLoader());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static void writeServiceEntries(Path tmpDir, Collection<ServiceEntry> serviceEntries)
throws IOException {
for (ServiceEntry serviceEntry : serviceEntries) {
final Path path =
tmpDir.resolve(
Paths.get(
"META-INF",
"services",
serviceEntry.serviceClass.getCanonicalName()));
Files.createDirectories(path.getParent());
Files.write(path, serviceEntry.serviceImplementations, StandardOpenOption.CREATE);
}
}
private static
|
ContextClassLoaderExtensionBuilder
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/transaction/BaseTransactionalMapCache.java
|
{
"start": 1280,
"end": 3147
}
|
class ____<K, V> extends BaseTransactionalMap<K, V> {
public BaseTransactionalMapCache(CommandAsyncExecutor commandExecutor, long timeout, List<TransactionalOperation> operations, RMap<K, V> map, String transactionId) {
super(commandExecutor, timeout, operations, map, transactionId);
}
public RFuture<V> putIfAbsentAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
long threadId = Thread.currentThread().getId();
return putIfAbsentOperationAsync(key, value, new MapCachePutIfAbsentOperation(map, key, value, ttl, ttlUnit, maxIdleTime, maxIdleUnit, transactionId, threadId));
}
public RFuture<Boolean> fastPutOperationAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
long threadId = Thread.currentThread().getId();
return fastPutOperationAsync(key, value, new MapCacheFastPutOperation(map, key, value, ttl, ttlUnit, maxIdleTime, maxIdleUnit, transactionId, threadId));
}
public RFuture<V> putOperationAsync(K key, V value, long ttlTimeout, long maxIdleTimeout, long maxIdleDelta, long ttlTimeoutDelta) {
long threadId = Thread.currentThread().getId();
return putOperationAsync(key, value, new MapCachePutOperation(map, key, value,
ttlTimeoutDelta, TimeUnit.MILLISECONDS, maxIdleDelta, TimeUnit.MILLISECONDS, transactionId, threadId));
}
public RFuture<Boolean> fastPutIfAbsentAsync(K key, V value, long ttl, TimeUnit ttlUnit, long maxIdleTime, TimeUnit maxIdleUnit) {
long threadId = Thread.currentThread().getId();
return fastPutIfAbsentOperationAsync(key, value, new MapCacheFastPutIfAbsentOperation(map, key, value,
ttl, ttlUnit, maxIdleTime, maxIdleUnit, transactionId, threadId));
}
}
|
BaseTransactionalMapCache
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/IdempotentConsumerConcurrentTest.java
|
{
"start": 1453,
"end": 7537
}
|
class ____ extends ContextTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(IdempotentConsumerConcurrentTest.class);
protected Endpoint startEndpoint;
protected MockEndpoint resultEndpoint;
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testDuplicateMessagesAreFilteredOut() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.to("mock:result");
}
});
context.start();
resultEndpoint.expectedBodiesReceived("one", "two", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
@Test
public void testFailedExchangesNotAddedDeadLetterChannel() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:error").maximumRedeliveries(2).redeliveryDelay(0).logStackTrace(false));
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.process(new Processor() {
public void process(Exchange exchange) {
String id = exchange.getIn().getHeader("messageId", String.class);
if (id.equals("2")) {
throw new IllegalArgumentException("Damm I cannot handle id 2");
}
}
}).to("mock:result");
}
});
context.start();
// we send in 2 messages with id 2 that fails
getMockEndpoint("mock:error").expectedMessageCount(2);
resultEndpoint.expectedBodiesReceived("one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
@Test
public void testFailedExchangesNotAdded() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.process(new Processor() {
public void process(Exchange exchange) {
String id = exchange.getIn().getHeader("messageId", String.class);
if (id.equals("2")) {
throw new IllegalArgumentException("Damm I cannot handle id 2");
}
}
}).to("mock:result");
}
});
context.start();
resultEndpoint.expectedBodiesReceived("one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
/**
* A multithreaded test for IdempotentConsumer filter
*/
@Test
public void testThreadedIdempotentConsumer() throws Exception {
final int loopCount = 100;
final int threadCount = 10;
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.idempotentConsumer(header("messageId"), MemoryIdempotentRepository.memoryIdempotentRepository(200))
.delay(1).to("mock:result");
}
});
context.start();
resultEndpoint.reset();
resultEndpoint.expectedMessageCount(loopCount);
final boolean[] failedFlag = new boolean[1];
failedFlag[0] = false;
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threadCount; i++) {
final int threadIndex = i;
threads[threadIndex] = new Thread() {
@Override
public void run() {
try {
for (int j = 0; j < loopCount; j++) {
sendMessage(String.valueOf(j), "multithreadedTest" + j);
}
} catch (Exception e) {
LOG.error("Failed to send message: {}", e.getMessage(), e);
failedFlag[0] = true;
}
}
};
threads[i].start();
}
for (int i = 0; i < threadCount; i++) {
threads[i].join();
}
assertFalse(failedFlag[0], "At least one thread threw an exception");
assertMockEndpointsSatisfied();
}
protected void sendMessage(final Object messageId, final Object body) {
template.send(startEndpoint, new Processor() {
public void process(Exchange exchange) {
// now lets fire in a message
Message in = exchange.getIn();
in.setBody(body);
in.setHeader("messageId", messageId);
}
});
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
startEndpoint = resolveMandatoryEndpoint("direct:start");
resultEndpoint = getMockEndpoint("mock:result");
}
}
|
IdempotentConsumerConcurrentTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/MergedContextConfiguration.java
|
{
"start": 5089,
"end": 5976
}
|
class ____ which the configuration was merged
* @param locations the merged context resource locations
* @param classes the merged annotated classes
* @param contextInitializerClasses the merged context initializer classes
* @param activeProfiles the merged active bean definition profiles
* @param contextLoader the resolved {@code ContextLoader}
*/
public MergedContextConfiguration(Class<?> testClass, String @Nullable [] locations, Class<?> @Nullable [] classes,
@Nullable Set<Class<? extends ApplicationContextInitializer<?>>> contextInitializerClasses,
String @Nullable [] activeProfiles, ContextLoader contextLoader) {
this(testClass, locations, classes, contextInitializerClasses, activeProfiles, contextLoader, null, null);
}
/**
* Create a new {@code MergedContextConfiguration} instance for the
* supplied parameters.
* @param testClass the test
|
for
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/foo/TestFriendlyException.java
|
{
"start": 6257,
"end": 6778
}
|
class ____ is excluded
final String className = stackTraceElement.getClassName();
for (final String excludedClassNamePrefix : EXCLUDED_CLASS_NAME_PREFIXES) {
if (className.startsWith(excludedClassNamePrefix)) {
seenExcludedStackTraceElement[0] = true;
return Stream.empty();
}
}
// Replace `org.apache`-originating entries with a constant one.
// Without this, `INSTANCE` might yield different origin depending on the first
|
name
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/reflect/TypeUtilsTest.java
|
{
"start": 6208,
"end": 6287
}
|
class ____<T> implements This<String, T> {
// empty
}
public
|
Other
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/jdk/InetAddressSerializer.java
|
{
"start": 892,
"end": 3012
}
|
class ____
extends StdScalarSerializer<InetAddress>
{
protected final boolean _asNumeric;
public InetAddressSerializer() {
this(false);
}
public InetAddressSerializer(boolean asNumeric) {
super(InetAddress.class);
_asNumeric = asNumeric;
}
@Override
public ValueSerializer<?> createContextual(SerializationContext ctxt,
BeanProperty property)
{
JsonFormat.Value format = findFormatOverrides(ctxt, property, handledType());
boolean asNumeric = false;
if (format != null) {
JsonFormat.Shape shape = format.getShape();
if (shape.isNumeric() || shape == JsonFormat.Shape.ARRAY) {
asNumeric = true;
}
}
if (asNumeric != _asNumeric) {
return new InetAddressSerializer(asNumeric);
}
return this;
}
@Override
public void serialize(InetAddress value, JsonGenerator g, SerializationContext provider)
throws JacksonException
{
String str;
if (_asNumeric) { // since 2.9
str = value.getHostAddress();
} else {
// Ok: get textual description; choose "more specific" part
str = value.toString().trim();
int ix = str.indexOf('/');
if (ix >= 0) {
if (ix == 0) { // missing host name; use address
str = str.substring(1);
} else { // otherwise use name
str = str.substring(0, ix);
}
}
}
g.writeString(str);
}
@Override
public void serializeWithType(InetAddress value, JsonGenerator g,
SerializationContext ctxt, TypeSerializer typeSer) throws JacksonException
{
// Better ensure we don't use specific sub-classes...
WritableTypeId typeIdDef = typeSer.writeTypePrefix(g, ctxt,
typeSer.typeId(value, InetAddress.class, JsonToken.VALUE_STRING));
serialize(value, g, ctxt);
typeSer.writeTypeSuffix(g, ctxt, typeIdDef);
}
}
|
InetAddressSerializer
|
java
|
elastic__elasticsearch
|
build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InMemoryJavaCompiler.java
|
{
"start": 4509,
"end": 5167
}
|
class ____
*/
public static Map<String, byte[]> compile(Map<String, CharSequence> sources, String... options) {
var files = sources.entrySet().stream().map(e -> new InMemoryJavaFileObject(e.getKey(), e.getValue())).toList();
CompilationTask task = getCompilationTask(files, options);
boolean result = task.call();
if (result == false) {
throw new RuntimeException("Could not compile " + sources.entrySet().stream().toList());
}
return files.stream().collect(Collectors.toMap(InMemoryJavaFileObject::getClassName, InMemoryJavaFileObject::getByteCode));
}
/**
* Compiles the
|
name
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/event/injection/invalid/ProducerMethodEventRawTypeTest.java
|
{
"start": 952,
"end": 1143
}
|
class ____ {
@Produces
public Foo produceFoo(Event event) { // rawtype event injection point
return new Foo();
}
}
static
|
ProducerMethodInjectionBean
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/swagger/JavadocOpenAPIDefinitionResolver.java
|
{
"start": 2939,
"end": 10038
}
|
class ____ implements OpenAPIDefinitionResolver, OpenAPISchemaResolver {
private final LRUCache<Class<?>, WeakReference<ClassJavadocWrapper>> cache = new LRUCache<>(128);
private final CommentFormatter formatter = new CommentFormatter();
@Override
public OpenAPI resolve(OpenAPI openAPI, ServiceMeta serviceMeta, OpenAPIChain chain) {
openAPI = chain.resolve(openAPI, serviceMeta);
if (openAPI == null) {
return null;
}
Info info = openAPI.getInfo();
if (info == null) {
openAPI.setInfo(info = new Info());
}
if (info.getSummary() != null || info.getDescription() != null) {
return openAPI;
}
ClassJavadoc javadoc = getClassJavadoc(serviceMeta.getType()).javadoc;
if (javadoc.isEmpty()) {
return openAPI;
}
populateComment(javadoc.getComment(), info::setSummary, info::setDescription);
return openAPI;
}
@Override
public Collection<HttpMethods> resolve(PathItem pathItem, MethodMeta methodMeta, OperationContext context) {
return null;
}
@Override
public Operation resolve(Operation operation, MethodMeta methodMeta, OperationContext ctx, OperationChain chain) {
operation = chain.resolve(operation, methodMeta, ctx);
if (operation == null) {
return null;
}
Method method = methodMeta.getMethod();
ClassJavadocWrapper javadoc = getClassJavadoc(method.getDeclaringClass());
if (javadoc.isEmpty()) {
return operation;
}
if (operation.getSummary() == null && operation.getDescription() == null) {
MethodJavadoc methodJavadoc = javadoc.getMethod(method);
if (methodJavadoc != null) {
populateComment(methodJavadoc.getComment(), operation::setSummary, operation::setDescription);
}
}
List<Parameter> parameters = operation.getParameters();
if (parameters != null) {
for (Parameter parameter : parameters) {
if (parameter.getDescription() != null) {
continue;
}
ParameterMeta meta = parameter.getMeta();
if (!(meta instanceof MethodParameterMeta)) {
continue;
}
populateComment(javadoc.getParameter(method, parameter.getName()), null, parameter::setDescription);
}
}
return operation;
}
@Override
public Schema resolve(ParameterMeta parameter, SchemaContext context, SchemaChain chain) {
Schema schema = chain.resolve(parameter, context);
if (schema == null) {
return null;
}
if (schema.getTitle() != null || schema.getDescription() != null) {
return schema;
}
Comment comment = null;
if (parameter instanceof MethodParameterMeta) {
MethodParameterMeta meta = (MethodParameterMeta) parameter;
Method method = meta.getMethod();
comment = getClassJavadoc(method.getDeclaringClass()).getParameter(method, parameter.getName());
} else if (parameter instanceof ReturnParameterMeta) {
ReturnParameterMeta meta = (ReturnParameterMeta) parameter;
Method method = meta.getMethod();
MethodJavadoc methodJavadoc =
getClassJavadoc(method.getDeclaringClass()).getMethod(method);
if (methodJavadoc != null) {
comment = methodJavadoc.getReturns();
}
} else {
for (AnnotatedElement element : parameter.getAnnotatedElements()) {
if (element instanceof Class) {
comment = getClassJavadoc((Class<?>) element).getClassComment();
} else if (element instanceof Field) {
Field field = (Field) element;
ClassJavadocWrapper javadoc = getClassJavadoc(field.getDeclaringClass());
FieldJavadoc fieldJavadoc = javadoc.getField(field);
if (fieldJavadoc != null) {
comment = fieldJavadoc.getComment();
break;
}
ParamJavadoc paramJavadoc = javadoc.getRecordComponent(field.getName());
if (paramJavadoc != null) {
comment = paramJavadoc.getComment();
break;
}
} else if (element instanceof Method) {
Method method = (Method) element;
ClassJavadocWrapper javadoc = getClassJavadoc(method.getDeclaringClass());
MethodJavadoc methodJavadoc = javadoc.getMethod(method);
if (methodJavadoc != null) {
comment = methodJavadoc.getReturns();
break;
}
}
}
}
populateComment(comment, schema::setTitle, schema::setDescription);
return schema;
}
private ClassJavadocWrapper getClassJavadoc(Class<?> clazz) {
WeakReference<ClassJavadocWrapper> ref = cache.get(clazz);
ClassJavadocWrapper javadoc = ref == null ? null : ref.get();
if (javadoc == null) {
javadoc = new ClassJavadocWrapper(RuntimeJavadoc.getJavadoc(clazz));
cache.put(clazz, new WeakReference<>(javadoc));
}
return javadoc;
}
private void populateComment(Comment comment, Consumer<String> sConsumer, Consumer<String> dConsumer) {
if (comment == null) {
return;
}
String description = formatter.format(comment);
if (sConsumer == null) {
dConsumer.accept(description);
return;
}
String summary = getFirstSentence(description);
sConsumer.accept(summary);
if (description.equals(summary)) {
return;
}
dConsumer.accept(description);
}
private static String getFirstSentence(String text) {
if (StringUtils.isEmpty(text)) {
return text;
}
int pOpenIndex = text.indexOf("<p>");
int pCloseIndex = text.indexOf("</p>");
int dotIndex = text.indexOf(".");
if (pOpenIndex != -1) {
if (pOpenIndex == 0 && pCloseIndex != -1) {
if (dotIndex != -1) {
return text.substring(3, Math.min(pCloseIndex, dotIndex));
}
return text.substring(3, pCloseIndex);
}
if (dotIndex != -1) {
return text.substring(0, Math.min(pOpenIndex, dotIndex));
}
return text.substring(0, pOpenIndex);
}
if (dotIndex != -1 && text.length() != dotIndex + 1 && Character.isWhitespace(text.charAt(dotIndex + 1))) {
return text.substring(0, dotIndex + 1);
}
return text;
}
private static final
|
JavadocOpenAPIDefinitionResolver
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/transform/impl/AddDelegateTransformer.java
|
{
"start": 1244,
"end": 4872
}
|
class ____ extends ClassEmitterTransformer {
private static final String DELEGATE = "$CGLIB_DELEGATE";
private static final Signature CSTRUCT_OBJECT =
TypeUtils.parseSignature("void <init>(Object)");
private Class[] delegateIf;
private Class delegateImpl;
private Type delegateType;
/** Creates a new instance of AddDelegateTransformer */
public AddDelegateTransformer(Class delegateIf[], Class delegateImpl) {
try {
delegateImpl.getConstructor(new Class[]{ Object.class });
this.delegateIf = delegateIf;
this.delegateImpl = delegateImpl;
delegateType = Type.getType(delegateImpl);
} catch (NoSuchMethodException e) {
throw new CodeGenerationException(e);
}
}
@Override
public void begin_class(int version, int access, String className, Type superType, Type[] interfaces, String sourceFile) {
if(!TypeUtils.isInterface(access)){
Type[] all = TypeUtils.add(interfaces, TypeUtils.getTypes(delegateIf));
super.begin_class(version, access, className, superType, all, sourceFile);
declare_field(Constants.ACC_PRIVATE | Constants.ACC_TRANSIENT,
DELEGATE,
delegateType,
null);
for (Class element : delegateIf) {
Method[] methods = element.getMethods();
for (Method method : methods) {
if (Modifier.isAbstract(method.getModifiers())) {
addDelegate(method);
}
}
}
}else{
super.begin_class(version, access, className, superType, interfaces, sourceFile);
}
}
@Override
public CodeEmitter begin_method(int access, Signature sig, Type[] exceptions) {
final CodeEmitter e = super.begin_method(access, sig, exceptions);
if (sig.getName().equals(Constants.CONSTRUCTOR_NAME)) {
return new CodeEmitter(e) {
private boolean transformInit = true;
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) {
super.visitMethodInsn(opcode, owner, name, desc, itf);
if (transformInit && opcode == Constants.INVOKESPECIAL) {
load_this();
new_instance(delegateType);
dup();
load_this();
invoke_constructor(delegateType, CSTRUCT_OBJECT);
putfield(DELEGATE);
transformInit = false;
}
}
};
}
return e;
}
private void addDelegate(Method m) {
Method delegate;
try {
delegate = delegateImpl.getMethod(m.getName(), m.getParameterTypes());
if (!delegate.getReturnType().getName().equals(m.getReturnType().getName())){
throw new IllegalArgumentException("Invalid delegate signature " + delegate);
}
} catch (NoSuchMethodException e) {
throw new CodeGenerationException(e);
}
final Signature sig = ReflectUtils.getSignature(m);
Type[] exceptions = TypeUtils.getTypes(m.getExceptionTypes());
CodeEmitter e = super.begin_method(Constants.ACC_PUBLIC, sig, exceptions);
e.load_this();
e.getfield(DELEGATE);
e.load_args();
e.invoke_virtual(delegateType, sig);
e.return_value();
e.end_method();
}
}
|
AddDelegateTransformer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PreconditionsInvalidPlaceholderTest.java
|
{
"start": 882,
"end": 1516
}
|
class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(PreconditionsInvalidPlaceholder.class, getClass());
@Test
public void positiveCase1() {
compilationHelper
.addSourceLines(
"PreconditionsInvalidPlaceholderPositiveCase1.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.base.Preconditions;
import com.google.common.base.Verify;
public
|
PreconditionsInvalidPlaceholderTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/MicroProfileRestClientEnricher.java
|
{
"start": 23742,
"end": 29559
}
|
interface ____
String mockName = mockInterface(declaringClass, generatedClasses, index);
ResultHandle interfaceMock = methodCallCreator.newInstance(MethodDescriptor.ofConstructor(mockName));
paramValueMethod = findMethod(declaringClass, declaringClass, methodName, clientParamAnnotation.toString());
if (paramValueMethod == null) {
throw new RestClientDefinitionException(
annotationName + " method " + methodName + " not found on " + declaringClass);
}
if (paramValueMethod.parametersCount() == 0) {
paramValue = methodCallCreator.invokeInterfaceMethod(paramValueMethod, interfaceMock);
} else if (paramValueMethod.parametersCount() == 1 && isString(paramValueMethod.parameterType(0))) {
paramValue = methodCallCreator.invokeInterfaceMethod(paramValueMethod, interfaceMock,
methodCallCreator.load(paramName));
} else {
throw new RestClientDefinitionException(
annotationName + " method " + declaringClass + "#" + methodName
+ " has too many parameters, at most one parameter, param name, expected");
}
}
Type returnType = paramValueMethod.returnType();
ResultHandle valuesList;
if (isStringArray(returnType)) {
// repack array to list
valuesList = methodCallCreator.invokeStaticMethod(ARRAYS_AS_LIST, paramValue);
} else if (isString(returnType)) {
valuesList = methodCallCreator.newInstance(MethodDescriptor.ofConstructor(ArrayList.class));
methodCallCreator.invokeInterfaceMethod(LIST_ADD_METHOD, valuesList, paramValue);
} else {
throw new RestClientDefinitionException("Method " + declaringClass.toString() + "#" + methodName
+ " has an unsupported return type for " + annotationName + ". " +
"Only String and String[] return types are supported");
}
paramAdder.accept(methodCallCreator, valuesList);
if (!required) {
CatchBlockCreator catchBlock = tryBlock.addCatch(Exception.class);
ResultHandle log = catchBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(Logger.class, "getLogger", Logger.class, String.class),
catchBlock.load(declaringClass.name().toString()));
String errorMessage = String.format(
"Invoking param generation method '%s' for '%s' on method '%s#%s' failed",
methodName, paramName, declaringClass.name(), declaringMethod.name());
catchBlock.invokeVirtualMethod(
MethodDescriptor.ofMethod(Logger.class, "warn", void.class, Object.class, Throwable.class),
log,
catchBlock.load(errorMessage), catchBlock.getCaughtException());
}
}
}
@Override
public void forSubResourceMethod(ClassCreator subClassCreator, MethodCreator subConstructor,
MethodCreator subClinit, MethodCreator subMethodCreator, ClassInfo rootInterfaceClass,
ClassInfo subInterfaceClass, MethodInfo subMethod, MethodInfo rootMethod,
AssignableResultHandle invocationBuilder, // sub-level
IndexView index, BuildProducer<GeneratedClassBuildItem> generatedClasses,
int methodIndex, int subMethodIndex, FieldDescriptor javaMethodField) {
addJavaMethodToContext(javaMethodField, subMethodCreator, invocationBuilder);
Map<String, ParamData> headerFillersByName = new HashMap<>();
collectHeaderFillers(rootInterfaceClass, rootMethod, headerFillersByName);
collectHeaderFillers(subInterfaceClass, subMethod, headerFillersByName);
String subHeaderFillerName = subInterfaceClass.name().toString() + sha1(rootInterfaceClass.name().toString()) +
"$$" + methodIndex + "$$" + subMethodIndex;
createAndReturnHeaderFiller(subClassCreator, subClinit, subMethodCreator, subMethod,
invocationBuilder, index, generatedClasses, subMethodIndex, subHeaderFillerName, headerFillersByName,
Collections.emptyList());
}
@Override
public void forMethod(ClassCreator classCreator, MethodCreator constructor,
MethodCreator clinit, MethodCreator methodCreator, ClassInfo interfaceClass,
MethodInfo method, AssignableResultHandle invocationBuilder, IndexView index,
BuildProducer<GeneratedClassBuildItem> generatedClasses, int methodIndex, FieldDescriptor javaMethodField) {
addJavaMethodToContext(javaMethodField, methodCreator, invocationBuilder);
// header filler
Map<String, ParamData> headerFillersByName = new HashMap<>();
collectHeaderFillers(interfaceClass, method, headerFillersByName);
AnnotationInstance clientBasicAuth = interfaceClass.declaredAnnotation(CLIENT_BASIC_AUTH);
List<AddHeadersEnhancer> enhancers = new ArrayList<>();
if (clientBasicAuth != null) {
enhancers.add(new BasicAuthAddHeadersEnhancer(clientBasicAuth.value("username").asString(),
clientBasicAuth.value("password").asString()));
}
createAndReturnHeaderFiller(classCreator, clinit, methodCreator, method,
invocationBuilder, index, generatedClasses, methodIndex,
interfaceClass + "$$" + method.name() + "$$" + methodIndex, headerFillersByName, enhancers);
}
private
|
method
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/AbstractBuilder.java
|
{
"start": 603,
"end": 1131
}
|
class ____ utility methods.
*
* Implementation note: ANTLR 4 generates sources with a parameterized signature that isn't really useful for SQL.
* That is mainly because it forces <i>each</i> visitor method to return a node inside the generated AST which
* might be or not the case.
* Since the parser generates two types of trees ({@code LogicalPlan} and {@code Expression}) plus string handling,
* the generic signature does not fit and does give any advantage hence why it is <i>erased</i>, each subsequent
* child
|
offering
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/param/MySqlParameterizedOutputVisitorTest_40.java
|
{
"start": 629,
"end": 2390
}
|
class ____ extends TestCase {
public void test_for_parameterize() throws Exception {
final DbType dbType = JdbcConstants.MYSQL;
String sql = "select * from t where 1 <> 1 or id = 3";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
List<SQLStatement> stmtList = parser.parseStatementList();
SQLStatement statement = stmtList.get(0);
StringBuilder out = new StringBuilder();
SQLASTOutputVisitor visitor = SQLUtils.createOutputVisitor(out, JdbcConstants.MYSQL);
List<Object> parameters = new ArrayList<Object>();
visitor.setParameterized(true);
visitor.setParameterizedMergeInList(true);
visitor.setParameters(parameters);
/*visitor.setPrettyFormat(false);*/
statement.accept(visitor);
/* JSONArray array = new JSONArray();
for(String table : visitor.getTables()){
array.add(table.replaceAll("`",""));
}*/
String psql = out.toString();
assertEquals("SELECT *\n" +
"FROM t\n" +
"WHERE 1 <> 1\n" +
"\tOR id = ?", psql);
String params_json = JSON.toJSONString(parameters, JSONWriter.Feature.WriteClassName);
System.out.println(params_json);
JSONArray jsonArray = JSON.parseArray(params_json);
String json = JSON.toJSONString(jsonArray, JSONWriter.Feature.WriteClassName);
assertEquals("[3]", json);
String rsql = SQLUtils.toSQLString(SQLUtils.parseStatements(psql, dbType), dbType, jsonArray);
assertEquals("SELECT *\n" +
"FROM t\n" +
"WHERE 1 <> 1\n" +
"\tOR id = 3", rsql);
}
}
|
MySqlParameterizedOutputVisitorTest_40
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DatePartProcessor.java
|
{
"start": 779,
"end": 2586
}
|
class ____ extends BinaryDateTimeProcessor {
public static final String NAME = "dtpart";
public DatePartProcessor(Processor source1, Processor source2, ZoneId zoneId) {
super(source1, source2, zoneId);
}
public DatePartProcessor(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Object doProcess(Object part, Object timestamp) {
return process(part, timestamp, zoneId());
}
/**
* Used in Painless scripting
*/
public static Object process(Object part, Object timestamp, ZoneId zoneId) {
if (part == null || timestamp == null) {
return null;
}
if (part instanceof String == false) {
throw new SqlIllegalArgumentException("A string is required; received [{}]", part);
}
Part datePartField = Part.resolve((String) part);
if (datePartField == null) {
List<String> similar = Part.findSimilar((String) part);
if (similar.isEmpty()) {
throw new InvalidArgumentException("A value of {} or their aliases is required; received [{}]", Part.values(), part);
} else {
throw new InvalidArgumentException(
"Received value [{}] is not valid date part for extraction; " + "did you mean {}?",
part,
similar
);
}
}
if (timestamp instanceof ZonedDateTime == false) {
throw new SqlIllegalArgumentException("A date/datetime is required; received [{}]", timestamp);
}
return datePartField.extract(((ZonedDateTime) timestamp).withZoneSameInstant(zoneId));
}
}
|
DatePartProcessor
|
java
|
quarkusio__quarkus
|
integration-tests/infinispan-cache-jpa/src/main/java/io/quarkus/it/infinispan/cache/jpa/Person.java
|
{
"start": 413,
"end": 1071
}
|
class ____ {
private long id;
private String name;
public Person() {
}
public Person(String name) {
this.name = name;
}
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "personSeq")
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public void describeFully(StringBuilder sb) {
sb.append("Person with id=").append(id).append(", name='").append(name).append("'");
}
static final
|
Person
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableTakeUntilPredicate.java
|
{
"start": 1411,
"end": 3259
}
|
class ____<T> implements FlowableSubscriber<T>, Subscription {
final Subscriber<? super T> downstream;
final Predicate<? super T> predicate;
Subscription upstream;
boolean done;
InnerSubscriber(Subscriber<? super T> actual, Predicate<? super T> predicate) {
this.downstream = actual;
this.predicate = predicate;
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
if (!done) {
downstream.onNext(t);
boolean b;
try {
b = predicate.test(t);
} catch (Throwable e) {
Exceptions.throwIfFatal(e);
upstream.cancel();
onError(e);
return;
}
if (b) {
done = true;
upstream.cancel();
downstream.onComplete();
}
}
}
@Override
public void onError(Throwable t) {
if (!done) {
done = true;
downstream.onError(t);
} else {
RxJavaPlugins.onError(t);
}
}
@Override
public void onComplete() {
if (!done) {
done = true;
downstream.onComplete();
}
}
@Override
public void request(long n) {
upstream.request(n);
}
@Override
public void cancel() {
upstream.cancel();
}
}
}
|
InnerSubscriber
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/test/java/org/apache/flink/runtime/rpc/pekko/PekkoRpcActorTest.java
|
{
"start": 33759,
"end": 34748
}
|
class ____ extends RpcEndpoint {
private final CountDownLatch countDownLatch;
@Nullable private final Exception exception;
OnStartEndpoint(RpcService rpcService, @Nullable Exception exception) {
super(rpcService);
this.countDownLatch = new CountDownLatch(1);
this.exception = exception;
// remove this endpoint from the rpc service once it terminates (normally or
// exceptionally)
getTerminationFuture().whenComplete((aVoid, throwable) -> closeAsync());
}
@Override
public void onStart() throws Exception {
countDownLatch.countDown();
ExceptionUtils.tryRethrowException(exception);
}
public void awaitUntilOnStartCalled() throws InterruptedException {
countDownLatch.await();
}
}
// ------------------------------------------------------------------------
private static final
|
OnStartEndpoint
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/CacheRequestBodyGatewayFilterFactoryTests.java
|
{
"start": 5782,
"end": 6426
}
|
class ____ implements GatewayFilter {
private boolean exceptNullBody;
private String bodyExcepted;
AssertCachedRequestBodyGatewayFilter(String body) {
this.exceptNullBody = !StringUtils.hasText(body);
this.bodyExcepted = body;
}
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
String body = exchange.getAttribute(ServerWebExchangeUtils.CACHED_REQUEST_BODY_ATTR);
if (exceptNullBody) {
assertThat(body).isNull();
}
else {
assertThat(body).isEqualTo(bodyExcepted);
}
return chain.filter(exchange);
}
}
private static
|
AssertCachedRequestBodyGatewayFilter
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/http/FormLoginConfigTests.java
|
{
"start": 9255,
"end": 9789
}
|
class ____
implements AuthenticationSuccessHandler, AuthenticationFailureHandler {
@Override
public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
AuthenticationException exception) {
response.setStatus(HttpStatus.I_AM_A_TEAPOT.value());
}
@Override
public void onAuthenticationSuccess(HttpServletRequest request, HttpServletResponse response,
Authentication authentication) {
response.setStatus(HttpStatus.I_AM_A_TEAPOT.value());
}
}
}
|
TeapotAuthenticationHandler
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AssertProvider.java
|
{
"start": 855,
"end": 1023
}
|
class ____ implements AssertProvider<ButtonAssert> {
* public ButtonAssert assertThat() {
* return new ButtonAssert(this);
* }
* }
*
* public
|
Button
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java
|
{
"start": 1350,
"end": 1997
}
|
class ____ {
public String getMessage(boolean b) {
if (b) {
// BUG: Diagnostic contains: @Nullable
return null;
} else {
return "negative";
}
}
}
""")
.doTest();
}
@Test
public void parenthesizedLiteralNullReturn() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/LiteralNullReturnTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
public
|
LiteralNullReturnTest
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/AbstractMapEntryTest.java
|
{
"start": 997,
"end": 3223
}
|
class ____ extends TestCase {
private static final @Nullable String NK = null;
private static final @Nullable Integer NV = null;
private static <K extends @Nullable Object, V extends @Nullable Object> Entry<K, V> entry(
K key, V value) {
return new AbstractMapEntry<K, V>() {
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
};
}
private static <K extends @Nullable Object, V extends @Nullable Object> Entry<K, V> control(
K key, V value) {
return singletonMap(key, value).entrySet().iterator().next();
}
public void testToString() {
assertEquals("foo=1", entry("foo", 1).toString());
}
public void testToStringNull() {
assertEquals("null=1", entry(NK, 1).toString());
assertEquals("foo=null", entry("foo", NV).toString());
assertEquals("null=null", entry(NK, NV).toString());
}
public void testEquals() {
Entry<String, Integer> foo1 = entry("foo", 1);
// Explicitly call `equals`; `assertEquals` might return fast
assertTrue(foo1.equals(foo1));
assertEquals(control("foo", 1), foo1);
assertEquals(control("bar", 2), entry("bar", 2));
assertFalse(control("foo", 1).equals(entry("foo", 2)));
assertFalse(foo1.equals(control("bar", 1)));
assertFalse(foo1.equals(new Object()));
assertFalse(foo1.equals(null));
}
public void testEqualsNull() {
assertEquals(control(NK, 1), entry(NK, 1));
assertEquals(control("bar", NV), entry("bar", NV));
assertFalse(control(NK, 1).equals(entry(NK, 2)));
assertFalse(entry(NK, 1).equals(control("bar", 1)));
assertFalse(entry(NK, 1).equals(new Object()));
assertFalse(entry(NK, 1).equals(null));
}
public void testHashCode() {
assertEquals(control("foo", 1).hashCode(), entry("foo", 1).hashCode());
assertEquals(control("bar", 2).hashCode(), entry("bar", 2).hashCode());
}
public void testHashCodeNull() {
assertEquals(control(NK, 1).hashCode(), entry(NK, 1).hashCode());
assertEquals(control("bar", NV).hashCode(), entry("bar", NV).hashCode());
assertEquals(control(NK, NV).hashCode(), entry(NK, NV).hashCode());
}
}
|
AbstractMapEntryTest
|
java
|
apache__flink
|
flink-connectors/flink-connector-base/src/main/java/org/apache/flink/connector/base/source/reader/RecordEvaluator.java
|
{
"start": 955,
"end": 1131
}
|
interface ____ evaluates whether a de-serialized record should trigger certain control-flow
* operations (e.g. end of stream).
*/
@PublicEvolving
@FunctionalInterface
public
|
that
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-transport/src/main/java/org/apache/camel/component/cxf/transport/CamelTransportFactory.java
|
{
"start": 1813,
"end": 6939
}
|
class ____ extends AbstractTransportFactory
implements ConduitInitiator, DestinationFactory, CamelContextAware {
public static final String TRANSPORT_ID = "http://cxf.apache.org/transports/camel";
public static final List<String> DEFAULT_NAMESPACES = Arrays.asList(TRANSPORT_ID);
private static final Set<String> URI_PREFIXES = new HashSet<>();
private HeaderFilterStrategy headerFilterStrategy;
private boolean checkException;
private Bus bus;
static {
URI_PREFIXES.add("camel://");
}
private CamelContext camelContext;
public CamelTransportFactory() {
CxfHeaderFilterStrategy defaultHeaderFilterStrategy = new CxfHeaderFilterStrategy();
// Doesn't filter the camel relates headers by default
defaultHeaderFilterStrategy.setOutFilterPattern("");
headerFilterStrategy = defaultHeaderFilterStrategy;
}
public CamelTransportFactory(Bus b) {
super(DEFAULT_NAMESPACES);
bus = b;
registerFactory();
CxfHeaderFilterStrategy defaultHeaderFilterStrategy = new CxfHeaderFilterStrategy();
// Doesn't filter the camel relates headers by default
defaultHeaderFilterStrategy.setOutFilterPattern("");
headerFilterStrategy = defaultHeaderFilterStrategy;
}
public void setCheckException(boolean check) {
checkException = check;
}
public boolean isCheckException() {
return checkException;
}
public Conduit getConduit(EndpointInfo targetInfo) throws IOException {
return getConduit(targetInfo, null, bus);
}
public Conduit getConduit(EndpointInfo endpointInfo, EndpointReferenceType target) throws IOException {
return getConduit(endpointInfo, target, bus);
}
public Destination getDestination(EndpointInfo endpointInfo) throws IOException {
return getDestination(endpointInfo, bus);
}
@Override
public Set<String> getUriPrefixes() {
return URI_PREFIXES;
}
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext c) {
camelContext = c;
}
@Override
public Destination getDestination(EndpointInfo ei, Bus b) throws IOException {
return new CamelDestination(camelContext, b, this, ei, headerFilterStrategy, checkException);
}
@Override
public Conduit getConduit(EndpointInfo targetInfo, Bus b) throws IOException {
return getConduit(targetInfo, null, b);
}
@Override
public Conduit getConduit(EndpointInfo localInfo, EndpointReferenceType target, Bus b)
throws IOException {
return new CamelConduit(camelContext, b, localInfo, target, headerFilterStrategy);
}
// CXF 2.x support methods
public void setBus(Bus b) {
unregisterFactory();
bus = b;
registerFactory();
}
public final void registerFactory() {
if (null == bus) {
return;
}
DestinationFactoryManager dfm = bus.getExtension(DestinationFactoryManager.class);
if (null != dfm && getTransportIds() != null) {
for (String ns : getTransportIds()) {
dfm.registerDestinationFactory(ns, this);
}
}
ConduitInitiatorManager cim = bus.getExtension(ConduitInitiatorManager.class);
if (cim != null && getTransportIds() != null) {
for (String ns : getTransportIds()) {
cim.registerConduitInitiator(ns, this);
}
}
}
public final void unregisterFactory() {
if (null == bus) {
return;
}
DestinationFactoryManager dfm = bus.getExtension(DestinationFactoryManager.class);
if (null != dfm && getTransportIds() != null) {
unregisterDestinationFactories(dfm);
}
ConduitInitiatorManager cim = bus.getExtension(ConduitInitiatorManager.class);
if (cim != null && getTransportIds() != null) {
unregisterConduitInitiators(cim);
}
}
private void unregisterConduitInitiators(ConduitInitiatorManager cim) {
for (String ns : getTransportIds()) {
try {
if (cim.getConduitInitiator(ns) == this) {
cim.deregisterConduitInitiator(ns);
}
} catch (BusException e) {
//ignore
}
}
}
private void unregisterDestinationFactories(DestinationFactoryManager dfm) {
for (String ns : getTransportIds()) {
try {
if (dfm.getDestinationFactory(ns) == this) {
dfm.deregisterDestinationFactory(ns);
}
} catch (BusException e) {
//ignore
}
}
}
}
|
CamelTransportFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/IdentityIdEntityTest.java
|
{
"start": 1041,
"end": 3444
}
|
class ____ {
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
@JiraKey(value = "HHH-15561")
@ServiceRegistry(
settings = { @Setting( name = AvailableSettings.USE_GET_GENERATED_KEYS, value = "false") }
)
@DomainModel( annotatedClasses = { IdentityEntity.class } )
@SessionFactory
@RequiresDialect( value = H2Dialect.class )
public void testIdentityEntityWithDisabledGetGeneratedKeys(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
try {
IdentityEntity ie = new IdentityEntity();
ie.setTimestamp( new Date() );
session.persist( ie );
}
catch (Exception e) {
fail( "Creation of an IDENTITY-id-based entity failed when \"hibernate.jdbc.use_get_generated_keys\" was set to false (" + e.getMessage() + ")" );
}
}
);
}
@Test
@JiraKey(value = "HHH-15561")
@ServiceRegistry(
settings = { @Setting( name = "use_jdbc_metadata_defaults", value = "false") }
)
@DomainModel( annotatedClasses = { IdentityEntity.class } )
@SessionFactory
@RequiresDialect( value = H2Dialect.class )
public void testIdentityEntityWithDisabledJdbcMetadataDefaults(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
try {
IdentityEntity ie = new IdentityEntity();
ie.setTimestamp( new Date() );
session.persist( ie );
}
catch (Exception e) {
fail( "Creation of an IDENTITY-id-based entity failed when \"use_jdbc_metadata_defaults\" was set to false (" + e.getMessage() + ")" );
}
}
);
}
@Test
@JiraKey(value = "HHH-16418")
@ServiceRegistry(
settings = { @Setting( name = AvailableSettings.USE_GET_GENERATED_KEYS, value = "false") }
)
@DomainModel( annotatedClasses = { IdentityEntity.class } )
@SessionFactory
@RequiresDialect( value = OracleDialect.class, majorVersion = 12 )
public void testNullSelectIdentityString(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
try {
IdentityEntity ie = new IdentityEntity();
ie.setTimestamp( new Date() );
session.persist( ie );
fail( "A HibernateException should have been thrown" );
}
catch (Exception e) {
assertTrue( e.getMessage().contains( AvailableSettings.USE_GET_GENERATED_KEYS ) );
}
}
);
}
@Entity(name = "id_entity")
public static
|
IdentityIdEntityTest
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/CryptoComponentBuilderFactory.java
|
{
"start": 1437,
"end": 1972
}
|
interface ____ {
/**
* Crypto (JCE) (camel-crypto)
* Sign and verify exchanges using the Signature Service of the Java
* Cryptographic Extension (JCE).
*
* Category: security,transformation
* Since: 2.3
* Maven coordinates: org.apache.camel:camel-crypto
*
* @return the dsl builder
*/
static CryptoComponentBuilder crypto() {
return new CryptoComponentBuilderImpl();
}
/**
* Builder for the Crypto (JCE) component.
*/
|
CryptoComponentBuilderFactory
|
java
|
apache__flink
|
flink-filesystems/flink-hadoop-fs/src/test/java/org/apache/flink/runtime/fs/hdfs/AbstractHadoopFileSystemITTest.java
|
{
"start": 1623,
"end": 6002
}
|
class ____ {
protected static FileSystem fs;
protected static Path basePath;
protected static long consistencyToleranceNS;
private static void checkPathExistence(
Path path, boolean expectedExists, long consistencyToleranceNS)
throws IOException, InterruptedException {
if (consistencyToleranceNS == 0) {
// strongly consistency
assertThat(fs.exists(path)).isEqualTo(expectedExists);
} else {
// eventually consistency
checkPathEventualExistence(fs, path, expectedExists, consistencyToleranceNS);
}
}
protected void checkEmptyDirectory(Path path) throws IOException, InterruptedException {
checkPathExistence(path, true, consistencyToleranceNS);
}
@Test
void testSimpleFileWriteAndRead() throws Exception {
final String testLine = "Hello Upload!";
final Path path = new Path(basePath, "test.txt");
try {
try (FSDataOutputStream out = fs.create(path, FileSystem.WriteMode.OVERWRITE);
OutputStreamWriter writer =
new OutputStreamWriter(out, StandardCharsets.UTF_8)) {
writer.write(testLine);
}
// just in case, wait for the path to exist
checkPathExistence(path, true, consistencyToleranceNS);
try (FSDataInputStream in = fs.open(path);
InputStreamReader ir = new InputStreamReader(in, StandardCharsets.UTF_8);
BufferedReader reader = new BufferedReader(ir)) {
String line = reader.readLine();
assertThat(line).isEqualTo(testLine);
}
} finally {
fs.delete(path, false);
}
checkPathExistence(path, false, consistencyToleranceNS);
}
@Test
void testDirectoryListing() throws Exception {
final Path directory = new Path(basePath, "testdir/");
// directory must not yet exist
assertThat(fs.exists(directory)).isFalse();
try {
// create directory
assertThat(fs.mkdirs(directory)).isTrue();
checkEmptyDirectory(directory);
// directory empty
assertThat(fs.listStatus(directory).length).isZero();
// create some files
final int numFiles = 3;
for (int i = 0; i < numFiles; i++) {
Path file = new Path(directory, "/file-" + i);
try (FSDataOutputStream out = fs.create(file, FileSystem.WriteMode.OVERWRITE);
OutputStreamWriter writer =
new OutputStreamWriter(out, StandardCharsets.UTF_8)) {
writer.write("hello-" + i + "\n");
}
// just in case, wait for the file to exist (should then also be reflected in the
// directory's file list below)
checkPathExistence(file, true, consistencyToleranceNS);
}
FileStatus[] files = fs.listStatus(directory);
assertThat(files).isNotNull();
assertThat(files.length).isEqualTo(3);
for (FileStatus status : files) {
assertThat(status.isDir()).isFalse();
}
// now that there are files, the directory must exist
assertThat(fs.exists(directory)).isTrue();
} finally {
// clean up
cleanupDirectoryWithRetry(fs, directory, consistencyToleranceNS);
}
}
@AfterAll
static void teardown() throws IOException, InterruptedException {
try {
if (fs != null) {
cleanupDirectoryWithRetry(fs, basePath, consistencyToleranceNS);
}
} finally {
FileSystem.initialize(new Configuration());
}
}
private static void cleanupDirectoryWithRetry(
FileSystem fs, Path path, long consistencyToleranceNS)
throws IOException, InterruptedException {
fs.delete(path, true);
long deadline = System.nanoTime() + consistencyToleranceNS;
while (fs.exists(path) && System.nanoTime() - deadline < 0) {
fs.delete(path, true);
Thread.sleep(50L);
}
assertThat(fs.exists(path)).isFalse();
}
}
|
AbstractHadoopFileSystemITTest
|
java
|
netty__netty
|
transport-udt/src/main/java/io/netty/channel/udt/DefaultUdtServerChannelConfig.java
|
{
"start": 1205,
"end": 6209
}
|
class ____ extends DefaultUdtChannelConfig
implements UdtServerChannelConfig {
private volatile int backlog = 64;
public DefaultUdtServerChannelConfig(
final UdtChannel channel, final ChannelUDT channelUDT, final boolean apply) throws IOException {
super(channel, channelUDT, apply);
if (apply) {
apply(channelUDT);
}
}
@Override
protected void apply(final ChannelUDT channelUDT) throws IOException {
// nothing to apply for now.
}
@Override
public int getBacklog() {
return backlog;
}
@SuppressWarnings("unchecked")
@Override
public <T> T getOption(final ChannelOption<T> option) {
if (option == SO_BACKLOG) {
return (T) Integer.valueOf(getBacklog());
}
return super.getOption(option);
}
@Override
public Map<ChannelOption<?>, Object> getOptions() {
return getOptions(super.getOptions(), SO_BACKLOG);
}
@Override
public UdtServerChannelConfig setBacklog(final int backlog) {
this.backlog = backlog;
return this;
}
@Override
public <T> boolean setOption(final ChannelOption<T> option, final T value) {
validate(option, value);
if (option == SO_BACKLOG) {
setBacklog((Integer) value);
} else {
return super.setOption(option, value);
}
return true;
}
@Override
public UdtServerChannelConfig setProtocolReceiveBufferSize(
final int protocolReceiveBufferSize) {
super.setProtocolReceiveBufferSize(protocolReceiveBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setProtocolSendBufferSize(
final int protocolSendBufferSize) {
super.setProtocolSendBufferSize(protocolSendBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setReceiveBufferSize(
final int receiveBufferSize) {
super.setReceiveBufferSize(receiveBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setReuseAddress(final boolean reuseAddress) {
super.setReuseAddress(reuseAddress);
return this;
}
@Override
public UdtServerChannelConfig setSendBufferSize(final int sendBufferSize) {
super.setSendBufferSize(sendBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setSoLinger(final int soLinger) {
super.setSoLinger(soLinger);
return this;
}
@Override
public UdtServerChannelConfig setSystemReceiveBufferSize(
final int systemReceiveBufferSize) {
super.setSystemReceiveBufferSize(systemReceiveBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setSystemSendBufferSize(
final int systemSendBufferSize) {
super.setSystemSendBufferSize(systemSendBufferSize);
return this;
}
@Override
public UdtServerChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) {
super.setConnectTimeoutMillis(connectTimeoutMillis);
return this;
}
@Override
@Deprecated
public UdtServerChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) {
super.setMaxMessagesPerRead(maxMessagesPerRead);
return this;
}
@Override
public UdtServerChannelConfig setWriteSpinCount(int writeSpinCount) {
super.setWriteSpinCount(writeSpinCount);
return this;
}
@Override
public UdtServerChannelConfig setAllocator(ByteBufAllocator allocator) {
super.setAllocator(allocator);
return this;
}
@Override
public UdtServerChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) {
super.setRecvByteBufAllocator(allocator);
return this;
}
@Override
public UdtServerChannelConfig setAutoRead(boolean autoRead) {
super.setAutoRead(autoRead);
return this;
}
@Override
public UdtServerChannelConfig setAutoClose(boolean autoClose) {
super.setAutoClose(autoClose);
return this;
}
@Override
public UdtServerChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) {
super.setWriteBufferLowWaterMark(writeBufferLowWaterMark);
return this;
}
@Override
public UdtServerChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) {
super.setWriteBufferHighWaterMark(writeBufferHighWaterMark);
return this;
}
@Override
public UdtServerChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark) {
super.setWriteBufferWaterMark(writeBufferWaterMark);
return this;
}
@Override
public UdtServerChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) {
super.setMessageSizeEstimator(estimator);
return this;
}
}
|
DefaultUdtServerChannelConfig
|
java
|
quarkusio__quarkus
|
extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/ApplicationContext.java
|
{
"start": 269,
"end": 318
}
|
class ____ extends Application {
}
|
ApplicationContext
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongKeyedBucketOrds.java
|
{
"start": 12516,
"end": 17251
}
|
class ____ extends LongKeyedBucketOrds {
private final LongHash ords;
private final int owningBucketOrdShift;
private final long owningBucketOrdMask;
public FromManySmall(BigArrays bigArrays, int owningBucketOrdShift) {
ords = new LongHash(2, bigArrays);
this.owningBucketOrdShift = owningBucketOrdShift;
this.owningBucketOrdMask = -1L << owningBucketOrdShift;
}
private long encode(long owningBucketOrd, long value) {
// This is in the critical path for collecting some aggs. Be careful of performance.
return (owningBucketOrd << owningBucketOrdShift) | value;
}
@Override
public long add(long owningBucketOrd, long value) {
// This is in the critical path for collecting lots of aggs. Be careful of performance.
long enc = encode(owningBucketOrd, value);
if (owningBucketOrd != (enc >>> owningBucketOrdShift) && (enc & ~owningBucketOrdMask) != value) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"[%s] and [%s] must fit in [%s..%s] bits",
owningBucketOrd,
value,
64 - owningBucketOrdShift,
owningBucketOrdShift
)
);
}
return ords.add(enc);
}
@Override
public long find(long owningBucketOrd, long value) {
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return -1;
}
if ((value & owningBucketOrdMask) != 0) {
return -1;
}
return ords.find(encode(owningBucketOrd, value));
}
@Override
public long get(long ordinal) {
return ords.get(ordinal) & ~owningBucketOrdMask;
}
@Override
public long bucketsInOrd(long owningBucketOrd) {
// TODO it'd be faster to count the number of buckets in a list of these ords rather than one at a time
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return 0;
}
long count = 0;
long enc = owningBucketOrd << owningBucketOrdShift;
for (long i = 0; i < ords.size(); i++) {
if ((ords.get(i) & owningBucketOrdMask) == enc) {
count++;
}
}
return count;
}
@Override
public long size() {
return ords.size();
}
@Override
public long maxOwningBucketOrd() {
// TODO this is fairly expensive to compute. Can we avoid needing it?
long max = -1;
for (long i = 0; i < ords.size(); i++) {
max = Math.max(max, (ords.get(i) & owningBucketOrdMask) >>> owningBucketOrdShift);
}
return max;
}
@Override
public String decribe() {
return "many bucket ords packed using [" + (64 - owningBucketOrdShift) + "/" + owningBucketOrdShift + "] bits";
}
@Override
public BucketOrdsEnum ordsEnum(long owningBucketOrd) {
// TODO it'd be faster to iterate many ords at once rather than one at a time
if (Long.numberOfLeadingZeros(owningBucketOrd) < owningBucketOrdShift) {
return BucketOrdsEnum.EMPTY;
}
final long encodedOwningBucketOrd = owningBucketOrd << owningBucketOrdShift;
return new BucketOrdsEnum() {
private long ord = -1;
private long value;
@Override
public boolean next() {
while (true) {
ord++;
if (ord >= ords.size()) {
return false;
}
long encoded = ords.get(ord);
if ((encoded & owningBucketOrdMask) == encodedOwningBucketOrd) {
value = encoded & ~owningBucketOrdMask;
return true;
}
}
}
@Override
public long value() {
return value;
}
@Override
public long ord() {
return ord;
}
};
}
@Override
public void close() {
super.close();
ords.close();
}
}
}
|
FromManySmall
|
java
|
apache__maven
|
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4319PluginExecutionGoalInterpolationTest.java
|
{
"start": 1040,
"end": 1709
}
|
class ____ extends AbstractMavenIntegrationTestCase {
/**
* Test that goals in plugin executions can be interpolated.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4319");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyFilePresent("target/touch.txt");
}
}
|
MavenITmng4319PluginExecutionGoalInterpolationTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/support/ClassPathXmlApplicationContext.java
|
{
"start": 2243,
"end": 5727
}
|
class ____ extends AbstractXmlApplicationContext {
private Resource @Nullable [] configResources;
/**
* Create a new ClassPathXmlApplicationContext for bean-style configuration.
* @see #setConfigLocation
* @see #setConfigLocations
* @see #afterPropertiesSet()
*/
public ClassPathXmlApplicationContext() {
}
/**
* Create a new ClassPathXmlApplicationContext for bean-style configuration.
* @param parent the parent context
* @see #setConfigLocation
* @see #setConfigLocations
* @see #afterPropertiesSet()
*/
public ClassPathXmlApplicationContext(ApplicationContext parent) {
super(parent);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML file and automatically refreshing the context.
* @param configLocation resource location
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String configLocation) throws BeansException {
this(new String[] {configLocation}, true, null);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML files and automatically refreshing the context.
* @param configLocations array of resource locations
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String... configLocations) throws BeansException {
this(configLocations, true, null);
}
/**
* Create a new ClassPathXmlApplicationContext with the given parent,
* loading the definitions from the given XML files and automatically
* refreshing the context.
* @param configLocations array of resource locations
* @param parent the parent context
* @throws BeansException if context creation failed
*/
public ClassPathXmlApplicationContext(String[] configLocations, @Nullable ApplicationContext parent)
throws BeansException {
this(configLocations, true, parent);
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML files.
* @param configLocations array of resource locations
* @param refresh whether to automatically refresh the context,
* loading all bean definitions and creating all singletons.
* Alternatively, call refresh manually after further configuring the context.
* @throws BeansException if context creation failed
* @see #refresh()
*/
public ClassPathXmlApplicationContext(String[] configLocations, boolean refresh) throws BeansException {
this(configLocations, refresh, null);
}
/**
* Create a new ClassPathXmlApplicationContext with the given parent,
* loading the definitions from the given XML files.
* @param configLocations array of resource locations
* @param refresh whether to automatically refresh the context,
* loading all bean definitions and creating all singletons.
* Alternatively, call refresh manually after further configuring the context.
* @param parent the parent context
* @throws BeansException if context creation failed
* @see #refresh()
*/
public ClassPathXmlApplicationContext(
String[] configLocations, boolean refresh, @Nullable ApplicationContext parent)
throws BeansException {
super(parent);
setConfigLocations(configLocations);
if (refresh) {
refresh();
}
}
/**
* Create a new ClassPathXmlApplicationContext, loading the definitions
* from the given XML file and automatically refreshing the context.
* <p>This is a convenience method to load
|
ClassPathXmlApplicationContext
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockito/internal/configuration/MockInjectionTest.java
|
{
"start": 595,
"end": 4027
}
|
class ____ {
private AnObjectWithConstructor withConstructor;
private AnObjectWithoutConstructor withoutConstructor;
@After
public void reset() throws Exception {
withConstructor = null;
withoutConstructor = null;
}
@Test
public void should_not_allow_null_on_field() {
assertThatThrownBy(
() -> {
MockInjection.onField((Field) null, this);
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("item in fields should not be null");
}
@Test
public void should_not_allow_null_on_fields() {
assertThatThrownBy(
() -> {
MockInjection.onFields((Set<Field>) null, this);
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("fields should not be null");
}
@Test
public void should_not_allow_null_on_instance_owning_the_field() {
assertThatThrownBy(
() -> {
MockInjection.onField(field("withConstructor"), null);
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("fieldOwner should not be null");
}
@Test
public void should_not_allow_null_on_mocks() {
assertThatThrownBy(
() -> {
MockInjection.onField(field("withConstructor"), this).withMocks(null);
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("mocks should not be null");
}
@Test
public void can_try_constructor_injection() throws Exception {
MockInjection.onField(field("withConstructor"), this)
.withMocks(oneSetMock())
.tryConstructorInjection()
.apply();
assertThat(withConstructor.initializedWithConstructor).isTrue();
}
@Test
public void should_not_fail_if_constructor_injection_is_not_possible() throws Exception {
MockInjection.onField(field("withoutConstructor"), this)
.withMocks(otherKindOfMocks())
.tryConstructorInjection()
.apply();
assertThat(withoutConstructor).isNull();
}
@Test
public void can_try_property_or_setter_injection() throws Exception {
MockInjection.onField(field("withoutConstructor"), this)
.withMocks(oneSetMock())
.tryPropertyOrFieldInjection()
.apply();
assertThat(withoutConstructor.theSet).isNotNull();
}
@Test
public void should_not_fail_if_property_or_field_injection_is_not_possible() throws Exception {
MockInjection.onField(field("withoutConstructor"), this)
.withMocks(otherKindOfMocks())
.tryPropertyOrFieldInjection()
.apply();
assertThat(withoutConstructor.theSet).isNull();
}
private Set oneSetMock() {
return Collections.singleton(mock(Set.class));
}
private Set otherKindOfMocks() {
return Collections.singleton(mock(Observer.class));
}
private Field field(String field) throws NoSuchFieldException {
return getClass().getDeclaredField(field);
}
public static
|
MockInjectionTest
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/mapping/timezone/TimezoneDefaultStorageDefaultTest.java
|
{
"start": 345,
"end": 1766
}
|
class ____ extends AbstractTimezoneDefaultStorageTest {
@RegisterExtension
static QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(EntityWithTimezones.class)
.addClasses(SchemaUtil.class, SmokeTestUtils.class))
.withConfigurationResource("application.properties");
@Test
public void schema() throws Exception {
assertThat(SchemaUtil.getColumnNames(sessionFactory, EntityWithTimezones.class))
.doesNotContain("zonedDateTime_tz", "offsetDateTime_tz", "offsetTime_tz");
assertThat(SchemaUtil.getColumnTypeName(sessionFactory, EntityWithTimezones.class, "zonedDateTime"))
.isEqualTo("TIMESTAMP_WITH_TIMEZONE");
assertThat(SchemaUtil.getColumnTypeName(sessionFactory, EntityWithTimezones.class, "offsetDateTime"))
.isEqualTo("TIMESTAMP_WITH_TIMEZONE");
}
@Test
public void persistAndLoad() {
long id = persistWithValuesToTest();
assertLoadedValues(id,
// Native storage preserves the offset, but not the zone ID: https://hibernate.atlassian.net/browse/HHH-16289
PERSISTED_ZONED_DATE_TIME.withZoneSameInstant(PERSISTED_ZONED_DATE_TIME.getOffset()),
PERSISTED_OFFSET_DATE_TIME,
PERSISTED_OFFSET_TIME);
}
}
|
TimezoneDefaultStorageDefaultTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-csrf/deployment/src/main/java/io/quarkus/csrf/reactive/deployment/CsrfReactiveBuildStep.java
|
{
"start": 1997,
"end": 2178
}
|
class ____ implements BooleanSupplier {
RestCsrfBuildTimeConfig config;
public boolean getAsBoolean() {
return config.enabled();
}
}
}
|
IsEnabled
|
java
|
dropwizard__dropwizard
|
dropwizard-lifecycle/src/test/java/io/dropwizard/lifecycle/ServerLifecycleListenerTest.java
|
{
"start": 430,
"end": 5207
}
|
class ____ {
private static final String APPLICATION = "application";
private static final String ADMIN = "admin";
private static final String HOST_1 = "192.168.2.5";
private static final String HOST_2 = "192.68.4.77";
private static final String HOST_3 = "192.168.67.20";
@Test
void getLocalPort() {
int localPort = 5673;
int adminPort = 12345;
Server server = configureAndGetSingleConnectorServer(localPort, adminPort);
ServerLifecycleListener listener = (server1) -> {
};
int retrievedLocalPort = listener.getLocalPort(server);
Assertions.assertThat(retrievedLocalPort).isEqualTo(localPort);
}
@Test
void getAdminPort() {
int localPort = 5673;
int adminPort = 12345;
Server server = configureAndGetSingleConnectorServer(localPort, adminPort);
ServerLifecycleListener listener = (server1) -> {
};
int retrievedAdminPort = listener.getAdminPort(server);
Assertions.assertThat(retrievedAdminPort).isEqualTo(adminPort);
}
private Server configureAndGetSingleConnectorServer(int applicationPort, int adminPort) {
Server server = mock(Server.class);
ServerConnector applicationConnector = mock(ServerConnector.class);
ServerConnector adminConnector = mock(ServerConnector.class);
Connector[] connectors = {applicationConnector, adminConnector};
when(server.getConnectors()).thenReturn(connectors);
configuredServerConnector(applicationConnector, applicationPort, Arrays.asList("ssl", "http/1.1", "http/2"), APPLICATION, HOST_1);
configuredServerConnector(adminConnector, adminPort, Arrays.asList("tls", "http/2"), ADMIN, HOST_2);
return server;
}
@Test
void getPortDescriptorList() {
Server server = configureMultiProtocolServer();
ServerLifecycleListener listener = (server1) -> {
};
List<PortDescriptor> portDescriptorList = listener.getPortDescriptorList(server);
PortDescriptor[] portDescriptors = buildCompletePortDescriptorsArray();
Assertions.assertThat(portDescriptorList).usingElementComparator((o1, o2) -> {
if (Objects.equals(o1.getConnectorType(), o2.getConnectorType()) &&
Objects.equals(o1.getProtocol(), o2.getProtocol()) &&
o1.getPort() == o2.getPort() &&
Objects.equals(o1.getHost(), o2.getHost())) {
return 0;
} else {
return -1;
}
}).contains(portDescriptors);
}
private PortDescriptor[] buildCompletePortDescriptorsArray() {
return Stream.of(getPortDescriptors(5673, ADMIN, new String[]{"ssl", "http/1.1", "http/2"}, HOST_1),
getPortDescriptors(12345, APPLICATION, new String[]{"tls", "http/2"}, HOST_2),
getPortDescriptors(1234, APPLICATION, new String[]{"http/1.1", "http/2", "websocket"}, HOST_3))
.flatMap(Arrays::stream)
.toArray(PortDescriptor[]::new);
}
private PortDescriptor[] getPortDescriptors(int port, String type, String[] protocols, String host) {
return Arrays.stream(protocols)
.map(protocol -> getPortDescriptor(protocol, port, type, host))
.toArray(PortDescriptor[]::new);
}
private PortDescriptor getPortDescriptor(String protocol, int port, String type, String host) {
return new PortDescriptor(protocol, port, type, host);
}
private Server configureMultiProtocolServer() {
Server server = mock(Server.class);
ServerConnector connectorMock1 = mock(ServerConnector.class);
ServerConnector connectorMock2 = mock(ServerConnector.class);
ServerConnector connectorMock3 = mock(ServerConnector.class);
Connector[] connectors = {connectorMock1, connectorMock2, connectorMock3};
when(server.getConnectors()).thenReturn(connectors);
configuredServerConnector(connectorMock1, 5673, Arrays.asList("ssl", "http/1.1", "http/2"), ADMIN, HOST_1);
configuredServerConnector(connectorMock2, 12345, Arrays.asList("tls", "http/2"), APPLICATION, HOST_2);
configuredServerConnector(connectorMock3, 1234, Arrays.asList("http/1.1", "http/2", "websocket"), APPLICATION, "192.168.67.20");
return server;
}
private void configuredServerConnector(ServerConnector connectorMock1, int localPort, List<String> protocols, String portType, String host) {
when(connectorMock1.getLocalPort()).thenReturn(localPort);
when(connectorMock1.getProtocols()).thenReturn(protocols);
when(connectorMock1.getName()).thenReturn(portType);
when(connectorMock1.getHost()).thenReturn(host);
}
}
|
ServerLifecycleListenerTest
|
java
|
apache__camel
|
core/camel-core-processor/src/main/java/org/apache/camel/processor/SetHeadersProcessor.java
|
{
"start": 1248,
"end": 3599
}
|
class ____ extends BaseProcessorSupport implements Traceable, IdAware, RouteIdAware {
private String id;
private String routeId;
private final List<Expression> headerNames;
private final List<Expression> expressions;
public SetHeadersProcessor(List<Expression> headerNames, List<Expression> expressions) {
this.headerNames = headerNames;
this.expressions = expressions;
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
try {
int headerIndex = 0;
for (Expression expression : expressions) {
Object newHeader = expression.evaluate(exchange, Object.class);
if (exchange.getException() != null) {
// the expression threw an exception so we should break-out
callback.done(true);
return true;
}
Message message = exchange.getMessage();
String key = headerNames.get(headerIndex++).evaluate(exchange, String.class);
message.setHeader(key, newHeader);
}
} catch (Exception e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
@Override
public String toString() {
return id;
}
@Override
public String getTraceLabel() {
StringBuilder sb = new StringBuilder(256);
sb.append("setHeaders[");
int headerIndex = 0;
for (Expression expression : expressions) {
if (headerIndex > 0) {
sb.append("; ");
}
sb.append(headerNames.get(headerIndex++).toString());
sb.append(", ");
sb.append(expression.toString());
}
sb.append("]");
return sb.toString();
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public String getRouteId() {
return routeId;
}
@Override
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public List<Expression> getHeaderNames() {
return headerNames;
}
public List<Expression> getExpressions() {
return expressions;
}
}
|
SetHeadersProcessor
|
java
|
quarkusio__quarkus
|
extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java
|
{
"start": 1288,
"end": 9463
}
|
class ____ {
private String name;
private Direction direction;
private NullPrecedence nullPrecedence;
public Column(String name) {
this(name, Direction.Ascending);
}
public Column(String name, Direction direction) {
this.name = name;
this.direction = direction;
}
public Column(String name, Direction direction, NullPrecedence nullPrecedence) {
this.name = name;
this.direction = direction;
this.nullPrecedence = nullPrecedence;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Direction getDirection() {
return direction;
}
public void setDirection(Direction direction) {
this.direction = direction;
}
public NullPrecedence getNullPrecedence() {
return nullPrecedence;
}
public void setNullPrecedence(NullPrecedence nullPrecedence) {
this.nullPrecedence = nullPrecedence;
}
}
private List<Column> columns = new ArrayList<>();
private boolean escapingEnabled = true;
private Sort() {
}
/**
* Sort by the given column, in ascending order.
*
* @param column the column to sort on, in ascending order.
* @return a new Sort instance which sorts on the given column in ascending order.
* @see #by(String, Direction)
* @see #by(String...)
*/
public static Sort by(String column) {
return new Sort().and(column);
}
/**
* Sort by the given column, in the given order.
*
* @param column the column to sort on, in the given order.
* @param direction the direction to sort on.
* @return a new Sort instance which sorts on the given column in the given order.
* @see #by(String)
* @see #by(String...)
*/
public static Sort by(String column, Direction direction) {
return new Sort().and(column, direction);
}
/**
* Sort by the given column, in the given order and in the given null precedence.
*
* @param column the column to sort on, in the given order.
* @param nullPrecedence the null precedence to use.
* @return a new Sort instance which sorts on the given column in the given order and null precedence.
* @see #by(String)
* @see #by(String...)
*/
public static Sort by(String column, NullPrecedence nullPrecedence) {
return by(column, Direction.Ascending, nullPrecedence);
}
/**
* Sort by the given column, in the given order and in the given null precedence.
*
* @param column the column to sort on, in the given order.
* @param direction the direction to sort on.
* @param nullPrecedence the null precedence to use.
* @return a new Sort instance which sorts on the given column in the given order and null precedence.
* @see #by(String)
* @see #by(String...)
*/
public static Sort by(String column, Direction direction, NullPrecedence nullPrecedence) {
return new Sort().and(column, direction, nullPrecedence);
}
/**
* Sort by the given columns, in ascending order. Equivalent to {@link #ascending(String...)}.
*
* @param columns the columns to sort on, in ascending order.
* @return a new Sort instance which sorts on the given columns in ascending order.
* @see #by(String, Direction)
* @see #by(String)
* @see #ascending(String...)
* @see #descending(String...)
*/
public static Sort by(String... columns) {
Sort sort = new Sort();
for (String column : columns) {
sort.and(column);
}
return sort;
}
/**
* Sort by the given columns, in ascending order. Equivalent to {@link #by(String...)}.
*
* @param columns the columns to sort on, in ascending order.
* @return a new Sort instance which sorts on the given columns in ascending order.
* @see #by(String, Direction)
* @see #by(String)
* @see #by(String...)
* @see #descending(String...)
*/
public static Sort ascending(String... columns) {
return by(columns);
}
/**
* Sort by the given columns, in descending order.
*
* @param columns the columns to sort on, in descending order.
* @return a new Sort instance which sorts on the given columns in descending order.
* @see #by(String, Direction)
* @see #by(String)
* @see #descending(String...)
*/
public static Sort descending(String... columns) {
Sort sort = new Sort();
for (String column : columns) {
sort.and(column, Direction.Descending);
}
return sort;
}
/**
* Sets the order to descending for all current sort columns.
*
* @return this instance, modified.
* @see #ascending()
* @see #direction(Direction)
*/
public Sort descending() {
return direction(Direction.Descending);
}
/**
* Sets the order to ascending for all current sort columns.
*
* @return this instance, modified.
* @see #descending()
* @see #direction(Direction)
*/
public Sort ascending() {
return direction(Direction.Ascending);
}
/**
* Sets the order to all current sort columns.
*
* @param direction the direction to use for all current sort columns.
* @return this instance, modified.
* @see #descending()
* @see #ascending()
*/
public Sort direction(Direction direction) {
for (Column column : columns) {
column.direction = direction;
}
return this;
}
/**
* Adds a sort column, in ascending order.
*
* @param name the new column to sort on, in ascending order.
* @return this instance, modified.
* @see #and(String, Direction)
*/
public Sort and(String name) {
columns.add(new Column(name));
return this;
}
/**
* Adds a sort column, in the given order.
*
* @param name the new column to sort on, in the given order.
* @param direction the direction to sort on.
* @return this instance, modified.
* @see #and(String)
*/
public Sort and(String name, Direction direction) {
columns.add(new Column(name, direction));
return this;
}
/**
* Adds a sort column, in the given null precedence.
*
* @param name the new column to sort on, in the given null precedence.
* @param nullPrecedence the null precedence to use.
* @return this instance, modified.
* @see #and(String)
*/
public Sort and(String name, NullPrecedence nullPrecedence) {
return and(name, Direction.Ascending, nullPrecedence);
}
/**
* Adds a sort column, in the given order and null precedence.
*
* @param name the new column to sort on, in the given order and null precedence.
* @param direction the direction to sort on.
* @param nullPrecedence the null precedence to use.
* @return this instance, modified.
* @see #and(String)
*/
public Sort and(String name, Direction direction, NullPrecedence nullPrecedence) {
columns.add(new Column(name, direction, nullPrecedence));
return this;
}
/**
* Disables escaping of column names with a backticks during HQL Order By clause generation
*
* @return this instance, modified.
*/
public Sort disableEscaping() {
escapingEnabled = false;
return this;
}
/**
* Get the sort columns
*
* @return the sort columns
*/
public List<Column> getColumns() {
return columns;
}
/**
* Creates an Empty Sort instance. Equivalent to {@link #by()}.
*
* @return a new empty Sort instance
* @see #by(String[])
*/
public static Sort empty() {
return by();
}
public boolean isEscapingEnabled() {
return escapingEnabled;
}
}
|
Column
|
java
|
qos-ch__slf4j
|
integration/src/test/java/org/slf4j/MultiBindingAssertionTest.java
|
{
"start": 1423,
"end": 2615
}
|
class ____ {
StringPrintStream sps = new StringPrintStream(System.err);
PrintStream old = System.err;
int diff = 1024 + new Random().nextInt(10000);
@Before
public void setUp() throws Exception {
System.setErr(sps);
}
@After
public void tearDown() throws Exception {
System.setErr(old);
}
@Test
public void test() throws Exception {
Logger logger = LoggerFactory.getLogger(this.getClass());
String msg = "hello world " + diff;
logger.info(msg);
List<String> list = sps.stringList;
int line = 0;
assertMsgContains(list, line++, "Class path contains multiple SLF4J providers.");
assertMsgContains(list, line++, "Found provider");
assertMsgContains(list, line++, "Found provider");
assertMsgContains(list, line++, "See https://www.slf4j.org/codes.html#multiple_bindings for an explanation.");
//assertMsgContains(list, line++, "SLF4J(D): Connected with provider of type [");
}
void assertMsgContains(List<String> strList, int index, String msg) {
assertTrue(((String) strList.get(index)).contains(msg));
}
}
|
MultiBindingAssertionTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/MultiSingleTableLoadTest.java
|
{
"start": 4730,
"end": 5100
}
|
class ____ extends A {
@ManyToOne(optional = true, cascade = CascadeType.ALL)
@JoinColumn(name = "x_id")
private X x;
public C() {
}
public C(long id, X x) {
super( id );
this.x = x;
}
public X getX() {
return x;
}
}
@Entity(name = "X")
@Table(name = "tbl_x")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
public static abstract
|
C
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
|
{
"start": 15610,
"end": 15951
}
|
class ____ extends Symbol {
public final int size;
@Deprecated
public IntCheckAction(int size) {
super(Kind.EXPLICIT_ACTION);
this.size = size;
}
}
public static EnumAdjustAction enumAdjustAction(int rsymCount, Object[] adj) {
return new EnumAdjustAction(rsymCount, adj);
}
public static
|
IntCheckAction
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TargetType.java
|
{
"start": 590,
"end": 1216
}
|
enum ____ implements Writeable {
REGRESSION,
CLASSIFICATION;
public static final ParseField TARGET_TYPE = new ParseField("target_type");
public static TargetType fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public static TargetType fromStream(StreamInput in) throws IOException {
return in.readEnum(TargetType.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(this);
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
|
TargetType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/QueryProducer.java
|
{
"start": 645,
"end": 1879
}
|
interface ____ the same or very similar signatures to operations of
* {@link jakarta.persistence.EntityManager}. They are declared here to allow reuse by
* {@code StatelessSession}.
* <p>
* There are three fundamental ways to express a query:
* <ul>
* <li>in <em>Hibernate Query Language</em>, an object-oriented query dialect of SQL which is
* a superset of the <em>Jakarta Persistence Query Language</em>,
* <li>in the native SQL dialect of the database, or
* <li>using the {@linkplain jakarta.persistence.criteria.CriteriaBuilder Criteria API} defined
* by JPA, along with {@linkplain org.hibernate.query.criteria.HibernateCriteriaBuilder
* extensions} defined by Hibernate.
* </ul>
* <p>
* In each case, the object used to execute the query depends on whether the query is a
* selection query or a mutation query.
* <ul>
* <li>selection queries are executed via an instance of {@link SelectionQuery}, while
* <li>mutation queries are executed via an instance of {@link MutationQuery}, but
* <li>since JPA makes no such distinction within its API, the type {@link Query} is a mixin of
* {@code SelectionQuery}, {@code MutationQuery}, and {@link jakarta.persistence.TypedQuery}.
* </ul>
* This
|
have
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
|
{
"start": 3622,
"end": 4881
}
|
class ____ {
public void setup(TaskInputOutputContext<?, ?, ?, ?> context)
throws IOException {
Configuration conf = context.getConfiguration();
Path[] localFiles = context.getLocalCacheFiles();
URI[] files = context.getCacheFiles();
Path[] localArchives = context.getLocalCacheArchives();
URI[] archives = context.getCacheArchives();
FileSystem fs = LocalFileSystem.get(conf);
// Check that 2 files and 2 archives are present
assertEquals(2, localFiles.length);
assertEquals(2, localArchives.length);
assertEquals(2, files.length);
assertEquals(2, archives.length);
// Check the file name
assertTrue(files[0].getPath().endsWith("distributed.first"));
assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
// Check lengths of the files
assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
// Check extraction of the archive
assertTrue(fs.exists(new Path(localArchives[0],
"distributed.jar.inside3")));
assertTrue(fs.exists(new Path(localArchives[1],
"distributed.jar.inside4")));
// Check the
|
DistributedCacheChecker
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/type/MonthTypeHandler.java
|
{
"start": 894,
"end": 1749
}
|
class ____ extends BaseTypeHandler<Month> {
@Override
public void setNonNullParameter(PreparedStatement ps, int i, Month month, JdbcType type) throws SQLException {
ps.setInt(i, month.getValue());
}
@Override
public Month getNullableResult(ResultSet rs, String columnName) throws SQLException {
int month = rs.getInt(columnName);
return month == 0 && rs.wasNull() ? null : Month.of(month);
}
@Override
public Month getNullableResult(ResultSet rs, int columnIndex) throws SQLException {
int month = rs.getInt(columnIndex);
return month == 0 && rs.wasNull() ? null : Month.of(month);
}
@Override
public Month getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {
int month = cs.getInt(columnIndex);
return month == 0 && cs.wasNull() ? null : Month.of(month);
}
}
|
MonthTypeHandler
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/multiplepersistenceunits/model/annotation/user/User.java
|
{
"start": 307,
"end": 875
}
|
class ____ {
private long id;
private String name;
public User() {
}
public User(String name) {
this.name = name;
}
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "userSeq")
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "User:" + name;
}
}
|
User
|
java
|
apache__camel
|
components/camel-github/src/test/java/org/apache/camel/component/github/services/MockIssueService.java
|
{
"start": 1121,
"end": 2351
}
|
class ____ extends IssueService {
private List<Comment> comments = new ArrayList<>();
private MockPullRequestService mockPullRequestService;
public MockIssueService(MockPullRequestService mockPullRequestService) {
this.mockPullRequestService = mockPullRequestService;
}
@Override
public List<Comment> getComments(IRepositoryIdProvider repository, int issueNumber) {
return comments;
}
@Override
public Comment createComment(IRepositoryIdProvider repository, int issueNumber, String commentText) {
Comment addedComment = mockPullRequestService.addComment((long) issueNumber, commentText);
return addedComment;
}
@Override
public Issue createIssue(IRepositoryIdProvider repository, Issue issue) {
Issue finalIssue = new Issue();
issue.setBody("There's an error");
issue.setTitle("Error");
issue.setId(1L);
return finalIssue;
}
@Override
public Issue getIssue(IRepositoryIdProvider repository, String issueNumber) {
Issue issue = new Issue();
issue.setBody("There's an error");
issue.setTitle("Error");
issue.setId(1L);
return issue;
}
}
|
MockIssueService
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/field/privatewithqualifier/B.java
|
{
"start": 829,
"end": 1025
}
|
class ____ {
@Inject
@One
private A a;
@Inject
@Named("twoA")
private A a2;
public A getA() {
return a;
}
public A getA2() {
return a2;
}
}
|
B
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/services/ArtifactManager.java
|
{
"start": 1176,
"end": 1587
}
|
interface ____ extends Service {
/**
* Returns the path of the file previously associated to this artifact
* or {@code Optional.empty()} if no path has been associated.
*/
@Nonnull
Optional<Path> getPath(@Nonnull Artifact artifact);
/**
* Associates the given file path to the artifact.
*/
void setPath(@Nonnull ProducedArtifact artifact, Path path);
}
|
ArtifactManager
|
java
|
google__dagger
|
hilt-android/main/java/dagger/hilt/android/internal/managers/ApplicationComponentManager.java
|
{
"start": 854,
"end": 1452
}
|
class ____ implements GeneratedComponentManager<Object> {
private volatile Object component;
private final Object componentLock = new Object();
private final ComponentSupplier componentCreator;
public ApplicationComponentManager(ComponentSupplier componentCreator) {
this.componentCreator = componentCreator;
}
@Override
public Object generatedComponent() {
if (component == null) {
synchronized (componentLock) {
if (component == null) {
component = componentCreator.get();
}
}
}
return component;
}
}
|
ApplicationComponentManager
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/model/MultiModelLoaderFactory.java
|
{
"start": 7153,
"end": 7950
}
|
class ____<Model, Data> {
private final Class<Model> modelClass;
@Synthetic final Class<Data> dataClass;
@Synthetic final ModelLoaderFactory<? extends Model, ? extends Data> factory;
public Entry(
@NonNull Class<Model> modelClass,
@NonNull Class<Data> dataClass,
@NonNull ModelLoaderFactory<? extends Model, ? extends Data> factory) {
this.modelClass = modelClass;
this.dataClass = dataClass;
this.factory = factory;
}
public boolean handles(@NonNull Class<?> modelClass, @NonNull Class<?> dataClass) {
return handles(modelClass) && this.dataClass.isAssignableFrom(dataClass);
}
public boolean handles(@NonNull Class<?> modelClass) {
return this.modelClass.isAssignableFrom(modelClass);
}
}
static
|
Entry
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RemoveSettingsCommandIT.java
|
{
"start": 1395,
"end": 7423
}
|
class ____ extends ESIntegTestCase {
public void testRemoveSettingsAbortedByUser() throws Exception {
internalCluster().setBootstrapMasterNodeIndex(0);
String node = internalCluster().startNode();
updateClusterSettings(
Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false)
);
Settings dataPathSettings = internalCluster().dataPathSettings(node);
ensureStableCluster(1);
internalCluster().stopRandomDataNode();
Environment environment = TestEnvironment.newEnvironment(
Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build()
);
expectThrows(
() -> removeSettings(
environment,
true,
new String[] { DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey() }
),
ElasticsearchNodeCommand.ABORTED_BY_USER_MSG
);
}
public void testRemoveSettingsSuccessful() throws Exception {
internalCluster().setBootstrapMasterNodeIndex(0);
String node = internalCluster().startNode();
updateClusterSettings(
Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false)
);
assertThat(
clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().metadata().persistentSettings().keySet(),
contains(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey())
);
Settings dataPathSettings = internalCluster().dataPathSettings(node);
ensureStableCluster(1);
internalCluster().stopRandomDataNode();
Environment environment = TestEnvironment.newEnvironment(
Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build()
);
MockTerminal terminal = removeSettings(
environment,
false,
randomBoolean()
? new String[] { DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey() }
: new String[] { "cluster.routing.allocation.disk.*" }
);
assertThat(terminal.getOutput(), containsString(RemoveSettingsCommand.SETTINGS_REMOVED_MSG));
assertThat(terminal.getOutput(), containsString("The following settings will be removed:"));
assertThat(
terminal.getOutput(),
containsString(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey() + ": " + false)
);
internalCluster().startNode(dataPathSettings);
assertThat(
clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().metadata().persistentSettings().keySet(),
not(contains(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey()))
);
}
public void testSettingDoesNotMatch() throws Exception {
internalCluster().setBootstrapMasterNodeIndex(0);
String node = internalCluster().startNode();
updateClusterSettings(
Settings.builder().put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey(), false)
);
assertThat(
clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).get().getState().metadata().persistentSettings().keySet(),
contains(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.getKey())
);
Settings dataPathSettings = internalCluster().dataPathSettings(node);
ensureStableCluster(1);
internalCluster().stopRandomDataNode();
Environment environment = TestEnvironment.newEnvironment(
Settings.builder().put(internalCluster().getDefaultSettings()).put(dataPathSettings).build()
);
UserException ex = expectThrows(
UserException.class,
() -> removeSettings(environment, false, new String[] { "cluster.routing.allocation.disk.bla.*" })
);
assertThat(
ex.getMessage(),
containsString("No persistent cluster settings matching [cluster.routing.allocation.disk.bla.*] were found on this node")
);
}
private MockTerminal executeCommand(ElasticsearchNodeCommand command, Environment environment, boolean abort, String... args)
throws Exception {
final MockTerminal terminal = MockTerminal.create();
final OptionSet options = command.getParser().parse(args);
final ProcessInfo processInfo = new ProcessInfo(Map.of(), Map.of(), createTempDir());
final String input;
if (abort) {
input = randomValueOtherThanMany(c -> c.equalsIgnoreCase("y"), () -> randomAlphaOfLength(1));
} else {
input = randomBoolean() ? "y" : "Y";
}
terminal.addTextInput(input);
try {
command.execute(terminal, options, environment, processInfo);
} finally {
assertThat(terminal.getOutput(), containsString(ElasticsearchNodeCommand.STOP_WARNING_MSG));
}
return terminal;
}
private MockTerminal removeSettings(Environment environment, boolean abort, String... args) throws Exception {
final MockTerminal terminal = executeCommand(new RemoveSettingsCommand(), environment, abort, args);
assertThat(terminal.getOutput(), containsString(RemoveSettingsCommand.CONFIRMATION_MSG));
assertThat(terminal.getOutput(), containsString(RemoveSettingsCommand.SETTINGS_REMOVED_MSG));
return terminal;
}
private void expectThrows(ThrowingRunnable runnable, String message) {
ElasticsearchException ex = expectThrows(ElasticsearchException.class, runnable);
assertThat(ex.getMessage(), containsString(message));
}
}
|
RemoveSettingsCommandIT
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/workspace/WorkspaceModulePom.java
|
{
"start": 231,
"end": 2177
}
|
class ____ {
final Path pom;
Model model;
Model effectiveModel;
WorkspaceModulePom parent;
boolean processed;
WorkspaceModulePom(Path pom) {
this(pom, null, null);
}
public WorkspaceModulePom(Path pom, Model model, Model effectiveModel) {
this.pom = pom.normalize().toAbsolutePath();
this.model = model;
this.effectiveModel = effectiveModel;
}
Path getModuleDir() {
var moduleDir = pom.getParent();
return moduleDir == null ? WorkspaceLoader.getFsRootDir() : moduleDir;
}
Model getModel() {
return model == null ? model = WorkspaceLoader.readModel(pom) : model;
}
Path getParentPom() {
if (model == null) {
return null;
}
Path parentPom = null;
final Parent parent = model.getParent();
if (parent != null && parent.getRelativePath() != null && !parent.getRelativePath().isEmpty()) {
parentPom = pom.getParent().resolve(parent.getRelativePath()).normalize();
if (Files.isDirectory(parentPom)) {
parentPom = parentPom.resolve(WorkspaceLoader.POM_XML);
}
} else {
final Path parentDir = pom.getParent().getParent();
if (parentDir != null) {
parentPom = parentDir.resolve(WorkspaceLoader.POM_XML);
}
}
return parentPom != null && Files.exists(parentPom) ? parentPom.normalize().toAbsolutePath() : null;
}
void process(Consumer<WorkspaceModulePom> consumer) {
if (processed) {
return;
}
processed = true;
if (parent != null) {
parent.process(consumer);
}
if (model != null && model != WorkspaceLoader.MISSING_MODEL) {
consumer.accept(this);
}
}
@Override
public String toString() {
return String.valueOf(pom);
}
}
|
WorkspaceModulePom
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/matchers/AnnotationHasArgumentWithValueTest.java
|
{
"start": 1484,
"end": 1793
}
|
class ____ {}
""");
assertCompiles(
annotationMatches(
/* shouldMatch= */ true,
new AnnotationHasArgumentWithValue("stuff", stringLiteral("y"))));
}
@Test
public void matchesExtraParentheses() {
writeFile(
"Thing2.java",
"""
public @
|
A
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/test/java/org/springframework/web/socket/config/annotation/WebSocketHandlerRegistrationTests.java
|
{
"start": 9009,
"end": 9809
}
|
class ____ {
private final WebSocketHandler webSocketHandler;
private final String path;
private final HandshakeHandler handshakeHandler;
private final HandshakeInterceptor[] interceptors;
private final DefaultSockJsService sockJsService;
public Mapping(WebSocketHandler handler, String path, SockJsService sockJsService) {
this.webSocketHandler = handler;
this.path = path;
this.handshakeHandler = null;
this.interceptors = null;
this.sockJsService = (DefaultSockJsService) sockJsService;
}
public Mapping(WebSocketHandler h, String path, HandshakeHandler hh, HandshakeInterceptor[] interceptors) {
this.webSocketHandler = h;
this.path = path;
this.handshakeHandler = hh;
this.interceptors = interceptors;
this.sockJsService = null;
}
}
}
|
Mapping
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
|
{
"start": 12026,
"end": 12256
}
|
class ____ extends DecoderFactory {
@Override
public DecoderFactory configureDecoderBufferSize(int bufferSize) {
throw new IllegalArgumentException("This Factory instance is Immutable");
}
}
}
|
DefaultDecoderFactory
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UGIExceptionMessages.java
|
{
"start": 957,
"end": 2071
}
|
class ____ {
public static final String FAILURE_TO_LOGIN = "failure to login:";
public static final String FOR_USER = " for user: ";
public static final String FOR_PRINCIPAL = " for principal: ";
public static final String FROM_KEYTAB = " from keytab ";
public static final String INVALID_UID = "Invalid UID, could not determine effective user";
public static final String LOGIN_FAILURE = "Login failure";
public static final String LOGOUT_FAILURE = "Logout failure";
public static final String MUST_FIRST_LOGIN =
"login must be done first";
public static final String MUST_FIRST_LOGIN_FROM_KEYTAB =
"loginUserFromKeyTab must be done first";
public static final String SUBJECT_MUST_CONTAIN_PRINCIPAL =
"Provided Subject must contain a KerberosPrincipal";
public static final String SUBJECT_MUST_NOT_BE_NULL =
"Subject must not be null";
public static final String USING_TICKET_CACHE_FILE =
" using ticket cache file: ";
//checkstyle: Utility classes should not have a public or default constructor.
private UGIExceptionMessages() {
}
}
|
UGIExceptionMessages
|
java
|
apache__maven
|
compat/maven-artifact/src/main/java/org/apache/maven/artifact/versioning/ComparableVersion.java
|
{
"start": 8789,
"end": 13315
}
|
class ____ implements Item {
private static final List<String> QUALIFIERS =
Arrays.asList("alpha", "beta", "milestone", "rc", "snapshot", "", "sp");
private static final List<String> RELEASE_QUALIFIERS = Arrays.asList("ga", "final", "release");
private static final Properties ALIASES = new Properties();
static {
ALIASES.put("cr", "rc");
}
/**
* A comparable value for the empty-string qualifier. This one is used to determine if a given qualifier makes
* the version older than one without a qualifier, or more recent.
*/
private static final String RELEASE_VERSION_INDEX = String.valueOf(QUALIFIERS.indexOf(""));
private final String value;
StringItem(String value, boolean followedByDigit) {
if (followedByDigit && value.length() == 1) {
// a1 = alpha-1, b1 = beta-1, m1 = milestone-1
switch (value.charAt(0)) {
case 'a':
value = "alpha";
break;
case 'b':
value = "beta";
break;
case 'm':
value = "milestone";
break;
default:
}
}
this.value = ALIASES.getProperty(value, value);
}
@Override
public int getType() {
return STRING_ITEM;
}
@Override
public boolean isNull() {
return value == null || value.isEmpty();
}
/**
* Returns a comparable value for a qualifier.
* <p>
* This method takes into account the ordering of known qualifiers then unknown qualifiers with lexical
* ordering.
* <p>
*
* @param qualifier
* @return an equivalent value that can be used with lexical comparison
*/
public static String comparableQualifier(String qualifier) {
if (RELEASE_QUALIFIERS.contains(qualifier)) {
return String.valueOf(QUALIFIERS.indexOf(""));
}
int i = QUALIFIERS.indexOf(qualifier);
// Just returning an Integer with the index here is faster, but requires a lot of if/then/else to check for
// -1
// or QUALIFIERS.size and then resort to lexical ordering. Most comparisons are decided by the first
// character,
// so this is still fast. If more characters are needed then it requires a lexical sort anyway.
return i == -1 ? (QUALIFIERS.size() + "-" + qualifier) : String.valueOf(i);
}
@Override
public int compareTo(Item item) {
if (item == null) {
// 1-rc < 1, 1-ga > 1
return comparableQualifier(value).compareTo(RELEASE_VERSION_INDEX);
}
switch (item.getType()) {
case INT_ITEM:
case LONG_ITEM:
case BIGINTEGER_ITEM:
return -1; // 1.any < 1.1 ?
case STRING_ITEM:
return comparableQualifier(value).compareTo(comparableQualifier(((StringItem) item).value));
case COMBINATION_ITEM:
int result = this.compareTo(((CombinationItem) item).getStringPart());
if (result == 0) {
return -1;
}
return result;
case LIST_ITEM:
return -1; // 1.any < 1-1
default:
throw new IllegalStateException("invalid item: " + item.getClass());
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StringItem that = (StringItem) o;
return value.equals(that.value);
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public String toString() {
return value;
}
}
/**
* Represents a combination in the version item list.
* It is usually a combination of a string and a number, with the string first and the number second.
*/
private static
|
StringItem
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/api/extension/ExecutableInvokerIntegrationTests.java
|
{
"start": 753,
"end": 1496
}
|
class ____ extends AbstractJupiterTestEngineTests {
@Test
void invokeConstructorViaExtensionContext() {
EngineExecutionResults results = executeTestsForClass(ExecuteConstructorTwiceTestCase.class);
assertEquals(1, results.testEvents().succeeded().count());
assertEquals(2, ExecuteConstructorTwiceTestCase.constructorInvocations);
}
@Test
void invokeMethodViaExtensionContext() {
EngineExecutionResults results = executeTestsForClass(ExecuteTestsTwiceTestCase.class);
assertEquals(1, results.testEvents().succeeded().count());
assertEquals(2, ExecuteTestsTwiceTestCase.testInvocations);
}
@SuppressWarnings("JUnitMalformedDeclaration")
@ExtendWith(ExecuteTestsTwiceExtension.class)
static
|
ExecutableInvokerIntegrationTests
|
java
|
quarkusio__quarkus
|
extensions/security/spi/src/main/java/io/quarkus/security/spi/PermissionsAllowedMetaAnnotationBuildItem.java
|
{
"start": 753,
"end": 2518
}
|
class ____ extends SimpleBuildItem {
private final List<DotName> metaAnnotationNames;
private final boolean empty;
private final List<AnnotationInstance> transitiveInstances;
public PermissionsAllowedMetaAnnotationBuildItem(List<AnnotationInstance> transitiveInstances,
List<DotName> metaAnnotationNames) {
this.transitiveInstances = List.copyOf(transitiveInstances);
this.metaAnnotationNames = List.copyOf(metaAnnotationNames);
this.empty = transitiveInstances.isEmpty();
}
public boolean hasPermissionsAllowed(MethodInfo methodInfo) {
if (empty) {
return false;
}
return hasPermissionsAllowed(methodInfo.annotations());
}
public boolean hasPermissionsAllowed(ClassInfo classInfo) {
if (empty) {
return false;
}
return hasPermissionsAllowed(classInfo.declaredAnnotations());
}
public List<AnnotationInstance> getTransitiveInstances() {
return transitiveInstances;
}
public boolean hasPermissionsAllowed(List<AnnotationInstance> instances) {
return instances.stream().anyMatch(ai -> metaAnnotationNames.contains(ai.name()));
}
public Optional<AnnotationInstance> findPermissionsAllowedInstance(ClassInfo classInfo) {
if (empty) {
return Optional.empty();
}
return transitiveInstances
.stream()
.filter(ai -> ai.target().kind() == AnnotationTarget.Kind.CLASS)
.filter(ai -> ai.target().asClass().name().equals(classInfo.name()))
// not repeatable on class-level, therefore we can just find the first one
.findFirst();
}
}
|
PermissionsAllowedMetaAnnotationBuildItem
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/collect/testing/ListIteratorTester.java
|
{
"start": 1604,
"end": 2386
}
|
class ____<E extends @Nullable Object>
extends AbstractIteratorTester<E, ListIterator<E>> {
protected ListIteratorTester(
int steps,
Iterable<E> elementsToInsert,
Iterable<? extends IteratorFeature> features,
Iterable<E> expectedElements,
int startIndex) {
super(steps, elementsToInsert, features, expectedElements, KnownOrder.KNOWN_ORDER, startIndex);
}
@Override
protected final Iterable<? extends Stimulus<E, ? super ListIterator<E>>> getStimulusValues() {
List<Stimulus<E, ? super ListIterator<E>>> list = new ArrayList<>();
Helpers.addAll(list, iteratorStimuli());
Helpers.addAll(list, listIteratorStimuli());
return list;
}
@Override
protected abstract ListIterator<E> newTargetIterator();
}
|
ListIteratorTester
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilderTests.java
|
{
"start": 1284,
"end": 10200
}
|
class ____ extends AbstractQueryTestCase<DistanceFeatureQueryBuilder> {
@Override
protected DistanceFeatureQueryBuilder doCreateTestQueryBuilder() {
String field = randomFrom(DATE_FIELD_NAME, DATE_NANOS_FIELD_NAME, GEO_POINT_FIELD_NAME);
Origin origin;
String pivot;
switch (field) {
case GEO_POINT_FIELD_NAME:
GeoPoint point = new GeoPoint(randomDouble(), randomDouble());
origin = randomBoolean() ? new Origin(point) : new Origin(point.geohash());
pivot = randomFrom(DistanceUnit.values()).toString(randomDouble());
break;
case DATE_FIELD_NAME:
long randomDateMills = randomLongBetween(0, 2_000_000_000_000L);
origin = randomBoolean() ? new Origin(randomDateMills) : new Origin(Instant.ofEpochMilli(randomDateMills).toString());
pivot = between(1, 1000) + randomFrom("d", "h", "ms", "s", "m");
break;
default: // DATE_NANOS_FIELD_NAME
randomDateMills = randomLongBetween(0, 2_000_000_000_000L);
if (randomBoolean()) {
origin = new Origin(randomDateMills); // nano_dates long accept milliseconds since epoch
} else {
long randomNanos = randomLongBetween(0, 1_000_000L);
Instant randomDateNanos = Instant.ofEpochMilli(randomDateMills).plusNanos(randomNanos);
origin = new Origin(randomDateNanos.toString());
}
pivot = between(1, 100_000_000) + "nanos";
break;
}
return new DistanceFeatureQueryBuilder(field, origin, pivot);
}
@Override
protected void doAssertLuceneQuery(DistanceFeatureQueryBuilder queryBuilder, Query query, SearchExecutionContext context)
throws IOException {
String fieldName = expectedFieldName(queryBuilder.fieldName());
Object origin = queryBuilder.origin().origin();
String pivot = queryBuilder.pivot();
final Query expectedQuery;
if (fieldName.equals(GEO_POINT_FIELD_NAME)) {
GeoPoint originGeoPoint = (origin instanceof GeoPoint) ? (GeoPoint) origin : GeoUtils.parseFromString((String) origin);
double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT);
expectedQuery = LatLonPoint.newDistanceFeatureQuery(fieldName, 1.0f, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble);
} else { // if (fieldName.equals(DATE_FIELD_NAME))
DateFieldType fieldType = (DateFieldType) context.getFieldType(fieldName);
long originLong = fieldType.parseToLong(origin, true, null, null, context::nowInMillis);
TimeValue pivotVal = TimeValue.parseTimeValue(pivot, DistanceFeatureQueryBuilder.class.getSimpleName() + ".pivot");
long pivotLong;
if (fieldType.resolution() == DateFieldMapper.Resolution.MILLISECONDS) {
pivotLong = pivotVal.getMillis();
} else { // NANOSECONDS
pivotLong = pivotVal.getNanos();
}
expectedQuery = LongField.newDistanceFeatureQuery(fieldName, 1.0f, originLong, pivotLong);
}
assertEquals(expectedQuery, query);
}
public void testFromJsonDateFieldType() throws IOException {
// origin as string
String origin = "2018-01-01T13:10:30Z";
String pivot = "7d";
String json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": "%s",
"pivot": "%s",
"boost": 1.0
}
}""", DATE_FIELD_NAME, origin, pivot);
DistanceFeatureQueryBuilder parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, origin, parsed.origin().origin());
assertEquals(json, pivot, parsed.pivot());
assertEquals(json, 1.0, parsed.boost(), 0.0001);
// origin as long
long originLong = 1514812230999L;
json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": %s,
"pivot": "%s",
"boost": 1.0
}
}""", DATE_FIELD_NAME, originLong, pivot);
parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
assertEquals(json, originLong, parsed.origin().origin());
}
public void testFromJsonDateNanosFieldType() throws IOException {
// origin as string
String origin = "2018-01-01T13:10:30.323456789Z";
String pivot = "100000000nanos";
String json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": "%s",
"pivot": "%s",
"boost": 1.0
}
}""", DATE_NANOS_FIELD_NAME, origin, pivot);
DistanceFeatureQueryBuilder parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, origin, parsed.origin().origin());
assertEquals(json, pivot, parsed.pivot());
assertEquals(json, 1.0, parsed.boost(), 0.0001);
// origin as long
long originLong = 1514812230999L;
json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": %s,
"pivot": "%s",
"boost": 1.0
}
}""", DATE_NANOS_FIELD_NAME, originLong, pivot);
parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
assertEquals(json, originLong, parsed.origin().origin());
}
public void testFromJsonGeoFieldType() throws IOException {
final GeoPoint origin = new GeoPoint(41.12, -71.34);
final String pivot = "1km";
// origin as string
String json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": "%s",
"pivot": "%s",
"boost": 2.0
}
}""", GEO_POINT_FIELD_NAME, origin.toString(), pivot);
DistanceFeatureQueryBuilder parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, origin.toString(), parsed.origin().origin());
assertEquals(json, pivot, parsed.pivot());
assertEquals(json, 2.0, parsed.boost(), 0.0001);
// origin as array
json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": [ %s, %s ],
"pivot": "%s",
"boost": 2.0
}
}""", GEO_POINT_FIELD_NAME, origin.lon(), origin.lat(), pivot);
parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
assertEquals(json, origin, parsed.origin().origin());
// origin as object
json = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": {
"lat": %s,
"lon": %s
},
"pivot": "%s",
"boost": 2.0
}
}""", GEO_POINT_FIELD_NAME, origin.lat(), origin.lon(), pivot);
parsed = (DistanceFeatureQueryBuilder) parseQuery(json);
assertEquals(json, origin, parsed.origin().origin());
}
public void testQueryMatchNoDocsQueryWithUnmappedField() throws IOException {
Query expectedQuery = Queries.newMatchNoDocsQuery("Can't run [" + DistanceFeatureQueryBuilder.NAME + "] query on unmapped fields!");
String queryString = """
{
"distance_feature" : {
"field": "random_unmapped_field",
"origin": "random_string",
"pivot" : "random_string"
}
}""";
Query query = parseQuery(queryString).toQuery(createSearchExecutionContext());
assertEquals(expectedQuery, query);
}
public void testQueryFailsWithWrongFieldType() {
String query = Strings.format("""
{
"distance_feature": {
"field": "%s",
"origin": 40,
"pivot": "random_string"
}
}""", INT_FIELD_NAME);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> parseQuery(query).toQuery(createSearchExecutionContext())
);
assertThat(e.getMessage(), containsString("query can only be run on a date, date_nanos or geo_point field type!"));
}
}
|
DistanceFeatureQueryBuilderTests
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoParametrizedTypeExtractionTest.java
|
{
"start": 1251,
"end": 2543
}
|
class ____ {
@Test
void testDirectlyCreateTypeInfo() {
final TypeInformation<ParameterizedParentImpl> directTypeInfo =
TypeExtractor.createTypeInfo(ParameterizedParentImpl.class);
assertThat(directTypeInfo).isEqualTo(getParameterizedParentTypeInformation());
}
@Test
void testMapReturnTypeInfo() {
TypeInformation<ParameterizedParentImpl> expectedTypeInfo =
getParameterizedParentTypeInformation();
TypeInformation<ParameterizedParentImpl> mapReturnTypeInfo =
TypeExtractor.getMapReturnTypes(new ConcreteMapFunction(), Types.INT);
assertThat(mapReturnTypeInfo).isEqualTo(expectedTypeInfo);
}
private TypeInformation<ParameterizedParentImpl> getParameterizedParentTypeInformation() {
Map<String, TypeInformation<?>> nestedFields = new HashMap<>();
nestedFields.put("digits", Types.INT);
nestedFields.put("letters", Types.STRING);
Map<String, TypeInformation<?>> fields = new HashMap<>();
fields.put("precise", Types.DOUBLE);
fields.put("pojoField", Types.POJO(Pojo.class, nestedFields));
return Types.POJO(ParameterizedParentImpl.class, fields);
}
/** Representation of Pojo
|
PojoParametrizedTypeExtractionTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.