language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | check_api/src/main/java/com/google/errorprone/matchers/method/MatchState.java | {
"start": 886,
"end": 1158
} | class ____ which a member method or constructor is declared. */
Type ownerType();
/** The method being matched. */
MethodSymbol sym();
/** The method's formal parameter types. */
default List<Type> paramTypes() {
return sym().type.getParameterTypes();
}
}
| in |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/runtime/operators/lifecycle/event/TestCommandAckEvent.java | {
"start": 1043,
"end": 1718
} | class ____ extends TestEvent {
private final TestCommand command;
private final int attemptNumber;
public TestCommandAckEvent(
String operatorId, int subtaskIndex, int attemptNumber, TestCommand command) {
super(operatorId, subtaskIndex, attemptNumber);
this.command = command;
this.attemptNumber = attemptNumber;
}
public TestCommand getCommand() {
return command;
}
public int getAttemptNumber() {
return attemptNumber;
}
@Override
public String toString() {
return super.toString() + ", attemptNumber=" + attemptNumber + ", command=" + command;
}
}
| TestCommandAckEvent |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/basic/IntBenchmark.java | {
"start": 2302,
"end": 2439
} | class ____ {
public int v1;
public int v2;
public int v3;
public int v4;
public int v5;
}
}
| Model |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/groupwindow/assigners/PanedWindowAssigner.java | {
"start": 1112,
"end": 1991
} | class ____<W extends Window> extends GroupWindowAssigner<W> {
private static final long serialVersionUID = 1L;
/**
* Given the timestamp and element, returns the pane into which it should be placed.
*
* @param element The element to which windows should be assigned.
* @param timestamp The timestamp of the element when {@link #isEventTime()} returns true, or
* the current system time when {@link #isEventTime()} returns false.
*/
public abstract W assignPane(Object element, long timestamp);
/**
* Splits the given window into panes collection.
*
* @param window the window to be split.
* @return the panes iterable
*/
public abstract Iterable<W> splitIntoPanes(W window);
/** Gets the last window which the pane belongs to. */
public abstract W getLastWindow(W pane);
}
| PanedWindowAssigner |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptReportRequestPBImpl.java | {
"start": 1454,
"end": 3596
} | class ____ extends ProtoBase<GetTaskAttemptReportRequestProto> implements GetTaskAttemptReportRequest {
GetTaskAttemptReportRequestProto proto = GetTaskAttemptReportRequestProto.getDefaultInstance();
GetTaskAttemptReportRequestProto.Builder builder = null;
boolean viaProto = false;
private TaskAttemptId taskAttemptId = null;
public GetTaskAttemptReportRequestPBImpl() {
builder = GetTaskAttemptReportRequestProto.newBuilder();
}
public GetTaskAttemptReportRequestPBImpl(GetTaskAttemptReportRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public GetTaskAttemptReportRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (this.taskAttemptId != null) {
builder.setTaskAttemptId(convertToProtoFormat(this.taskAttemptId));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetTaskAttemptReportRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public TaskAttemptId getTaskAttemptId() {
GetTaskAttemptReportRequestProtoOrBuilder p = viaProto ? proto : builder;
if (this.taskAttemptId != null) {
return this.taskAttemptId;
}
if (!p.hasTaskAttemptId()) {
return null;
}
this.taskAttemptId = convertFromProtoFormat(p.getTaskAttemptId());
return this.taskAttemptId;
}
@Override
public void setTaskAttemptId(TaskAttemptId taskAttemptId) {
maybeInitBuilder();
if (taskAttemptId == null)
builder.clearTaskAttemptId();
this.taskAttemptId = taskAttemptId;
}
private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) {
return new TaskAttemptIdPBImpl(p);
}
private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) {
return ((TaskAttemptIdPBImpl)t).getProto();
}
}
| GetTaskAttemptReportRequestPBImpl |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java | {
"start": 1494,
"end": 2527
} | class ____ implements ConnectionConfigurator {
private final SSLFactory factory;
private final SSLSocketFactory sf;
private final HostnameVerifier hv;
private final int connectTimeout;
private final int readTimeout;
SSLConnectionConfigurator(int connectTimeout, int readTimeout,
Configuration conf) throws IOException, GeneralSecurityException {
factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
factory.init();
sf = factory.createSSLSocketFactory();
hv = factory.getHostnameVerifier();
this.connectTimeout = connectTimeout;
this.readTimeout = readTimeout;
}
@Override
public HttpURLConnection configure(HttpURLConnection conn) {
if (conn instanceof HttpsURLConnection) {
HttpsURLConnection c = (HttpsURLConnection) conn;
c.setSSLSocketFactory(sf);
c.setHostnameVerifier(hv);
}
conn.setConnectTimeout(connectTimeout);
conn.setReadTimeout(readTimeout);
return conn;
}
void destroy() {
factory.destroy();
}
}
| SSLConnectionConfigurator |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java | {
"start": 3246,
"end": 5451
} | class ____ extends INodeWithAdditionalFields
implements INodeFileAttributes, BlockCollection {
/**
* Erasure Coded striped blocks have replication factor of 1.
*/
public static final short DEFAULT_REPL_FOR_STRIPED_BLOCKS = 1;
/** The same as valueOf(inode, path, false). */
public static INodeFile valueOf(INode inode, String path
) throws FileNotFoundException {
return valueOf(inode, path, false);
}
/** Cast INode to INodeFile. */
public static INodeFile valueOf(INode inode, String path, boolean acceptNull)
throws FileNotFoundException {
if (inode == null) {
if (acceptNull) {
return null;
} else {
throw new FileNotFoundException("File does not exist: " + path);
}
}
if (!inode.isFile()) {
throw new FileNotFoundException("Path is not a file: " + path);
}
return inode.asFile();
}
/**
* Bit format:
* [4-bit storagePolicyID][12-bit BLOCK_LAYOUT_AND_REDUNDANCY]
* [48-bit preferredBlockSize]
*
* BLOCK_LAYOUT_AND_REDUNDANCY contains 12 bits and describes the layout and
* redundancy of a block. We use the highest 1 bit to determine whether the
* block is replica or erasure coded. For replica blocks, the tail 11 bits
* stores the replication factor. For erasure coded blocks, the tail 11 bits
* stores the EC policy ID, and in the future, we may further divide these
* 11 bits to store both the EC policy ID and replication factor for erasure
* coded blocks. The layout of this section is demonstrated as below.
*
* Another possible future extension is for future block types, in which case
* the 'Replica or EC' bit may be extended into the 11 bit field.
*
* +---------------+-------------------------------+
* | 1 bit | 11 bit |
* +---------------+-------------------------------+
* | Replica or EC |Replica factor or EC policy ID |
* +---------------+-------------------------------+
*
* BLOCK_LAYOUT_AND_REDUNDANCY format for replicated block:
* 0 [11-bit replication]
*
* BLOCK_LAYOUT_AND_REDUNDANCY format for striped block:
* 1 [11-bit ErasureCodingPolicy ID]
*/
| INodeFile |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/event/qualifier/EventDefaultQualifierTest.java | {
"start": 4153,
"end": 4217
} | class ____ {
}
@ApplicationScoped
public static | Payload |
java | spring-projects__spring-boot | module/spring-boot-couchbase/src/main/java/org/springframework/boot/couchbase/autoconfigure/CouchbaseProperties.java | {
"start": 1155,
"end": 2197
} | class ____ {
/**
* Connection string used to locate the Couchbase cluster.
*/
private @Nullable String connectionString;
/**
* Cluster username.
*/
private @Nullable String username;
/**
* Cluster password.
*/
private @Nullable String password;
private final Authentication authentication = new Authentication();
private final Env env = new Env();
public @Nullable String getConnectionString() {
return this.connectionString;
}
public void setConnectionString(@Nullable String connectionString) {
this.connectionString = connectionString;
}
public @Nullable String getUsername() {
return this.username;
}
public void setUsername(@Nullable String username) {
this.username = username;
}
public @Nullable String getPassword() {
return this.password;
}
public void setPassword(@Nullable String password) {
this.password = password;
}
public Authentication getAuthentication() {
return this.authentication;
}
public Env getEnv() {
return this.env;
}
public static | CouchbaseProperties |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/propertyeditors/CustomCollectionEditor.java | {
"start": 5253,
"end": 7407
} | interface ____ Collection
* @param initialCapacity the initial capacity
* @return the new Collection instance
*/
@SuppressWarnings({"rawtypes", "unchecked"})
protected Collection<Object> createCollection(Class<? extends Collection> collectionType, int initialCapacity) {
if (!collectionType.isInterface()) {
try {
return ReflectionUtils.accessibleConstructor(collectionType).newInstance();
}
catch (Throwable ex) {
throw new IllegalArgumentException(
"Could not instantiate collection class: " + collectionType.getName(), ex);
}
}
else if (List.class == collectionType) {
return new ArrayList<>(initialCapacity);
}
else if (SortedSet.class == collectionType) {
return new TreeSet<>();
}
else {
return new LinkedHashSet<>(initialCapacity);
}
}
/**
* Return whether to always create a new Collection,
* even if the type of the passed-in Collection already matches.
* <p>Default is "false"; can be overridden to enforce creation of a
* new Collection, for example to convert elements in any case.
* @see #convertElement
*/
protected boolean alwaysCreateNewCollection() {
return false;
}
/**
* Hook to convert each encountered Collection/array element.
* The default implementation simply returns the passed-in element as-is.
* <p>Can be overridden to perform conversion of certain elements,
* for example String to Integer if a String array comes in and
* should be converted to a Set of Integer objects.
* <p>Only called if actually creating a new Collection!
* This is by default not the case if the type of the passed-in Collection
* already matches. Override {@link #alwaysCreateNewCollection()} to
* enforce creating a new Collection in every case.
* @param element the source element
* @return the element to be used in the target Collection
* @see #alwaysCreateNewCollection()
*/
protected Object convertElement(Object element) {
return element;
}
/**
* This implementation returns {@code null} to indicate that
* there is no appropriate text representation.
*/
@Override
public @Nullable String getAsText() {
return null;
}
}
| of |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/framework/JdkDynamicAopProxy.java | {
"start": 1966,
"end": 2095
} | class ____ internal
* to Spring's AOP framework and need not be used directly by client code.
*
* <p>Proxies created using this | is |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java | {
"start": 1013,
"end": 7506
} | class ____ {
private QueryParserHelper() {}
/**
* Convert a list of field names encoded with optional boosts to a map that associates
* the field name and its boost.
* @param fields The list of fields encoded with optional boosts (e.g. ^0.35).
* @return The converted map with field names and associated boosts.
*/
public static Map<String, Float> parseFieldsAndWeights(List<String> fields) {
final Map<String, Float> fieldsAndWeights = new HashMap<>();
for (String field : fields) {
int boostIndex = field.indexOf('^');
String fieldName;
float boost = 1.0f;
if (boostIndex != -1) {
fieldName = field.substring(0, boostIndex);
boost = Float.parseFloat(field.substring(boostIndex + 1));
} else {
fieldName = field;
}
// handle duplicates
if (fieldsAndWeights.containsKey(field)) {
boost *= fieldsAndWeights.get(field);
}
fieldsAndWeights.put(fieldName, boost);
}
return fieldsAndWeights;
}
public static Map<String, Float> resolveMappingFields(SearchExecutionContext context, Map<String, Float> fieldsAndWeights) {
return resolveMappingFields(context, fieldsAndWeights, null);
}
/**
* Resolve all the field names and patterns present in the provided map with the
* {@link SearchExecutionContext} and returns a new map containing all the expanded fields with their original boost.
* @param context The context of the query.
* @param fieldsAndWeights The map of fields and weights to expand.
* @param fieldSuffix The suffix name to add to the expanded field names if a mapping exists for that name.
* The original name of the field is kept if adding the suffix to the field name does not point to a valid field
* in the mapping.
*/
static Map<String, Float> resolveMappingFields(
SearchExecutionContext context,
Map<String, Float> fieldsAndWeights,
String fieldSuffix
) {
Map<String, Float> resolvedFields = new HashMap<>();
for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) {
boolean allField = Regex.isMatchAllPattern(fieldEntry.getKey());
boolean multiField = Regex.isSimpleMatchPattern(fieldEntry.getKey());
float weight = fieldEntry.getValue() == null ? 1.0f : fieldEntry.getValue();
Map<String, Float> fieldMap = resolveMappingField(
context,
fieldEntry.getKey(),
weight,
multiField == false,
allField == false,
fieldSuffix
);
for (Map.Entry<String, Float> field : fieldMap.entrySet()) {
float boost = field.getValue();
if (resolvedFields.containsKey(field.getKey())) {
boost *= resolvedFields.get(field.getKey());
}
resolvedFields.put(field.getKey(), boost);
}
}
checkForTooManyFields(resolvedFields.size(), null);
return resolvedFields;
}
/**
* Resolves the provided pattern or field name from the {@link SearchExecutionContext} and return a map of
* the expanded fields with their original boost.
* @param context The context of the query
* @param fieldOrPattern The field name or the pattern to resolve
* @param weight The weight for the field
* @param acceptAllTypes Whether all field type should be added when a pattern is expanded.
* If false, only searchable field types are added.
* @param acceptMetadataField Whether metadata fields should be added when a pattern is expanded.
* @param fieldSuffix The suffix name to add to the expanded field names if a mapping exists for that name.
* The original name of the field is kept if adding the suffix to the field name does not point to a valid field
* in the mapping.
*/
static Map<String, Float> resolveMappingField(
SearchExecutionContext context,
String fieldOrPattern,
float weight,
boolean acceptAllTypes,
boolean acceptMetadataField,
String fieldSuffix
) {
Set<String> allFields = context.getMatchingFieldNames(fieldOrPattern);
Map<String, Float> fields = new HashMap<>();
for (String fieldName : allFields) {
if (fieldSuffix != null && context.isFieldMapped(fieldName + fieldSuffix)) {
fieldName = fieldName + fieldSuffix;
}
MappedFieldType fieldType = context.getFieldType(fieldName);
if (acceptMetadataField == false && fieldType.name().startsWith("_")) {
// Ignore metadata fields
continue;
}
if (acceptAllTypes == false) {
if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE || fieldType.mayExistInIndex(context) == false) {
continue;
}
}
// Deduplicate aliases and their concrete fields.
String resolvedFieldName = fieldType.name();
if (allFields.contains(resolvedFieldName)) {
fieldName = resolvedFieldName;
}
float w = fields.getOrDefault(fieldName, 1.0F);
fields.put(fieldName, w * weight);
}
return fields;
}
static void checkForTooManyFields(int numberOfFields, @Nullable String inputPattern) {
int limit = IndexSearcher.getMaxClauseCount();
if (numberOfFields > limit) {
StringBuilder errorMsg = new StringBuilder("field expansion ");
if (inputPattern != null) {
errorMsg.append("for [" + inputPattern + "] ");
}
errorMsg.append("matches too many fields, limit: " + limit + ", got: " + numberOfFields);
throw new IllegalArgumentException(errorMsg.toString());
}
}
/**
* Returns true if any of the fields is the wildcard {@code *}, false otherwise.
* @param fields A collection of field names
*/
public static boolean hasAllFieldsWildcard(Collection<String> fields) {
return fields.stream().anyMatch(Regex::isMatchAllPattern);
}
}
| QueryParserHelper |
java | apache__camel | components/camel-datasonnet/src/test/java/org/apache/camel/language/datasonnet/Manufacturer.java | {
"start": 884,
"end": 2169
} | class ____ {
private String manufacturerName;
private String manufacturerCode;
public String getManufacturerName() {
return manufacturerName;
}
public void setManufacturerName(String manufacturerName) {
this.manufacturerName = manufacturerName;
}
public String getManufacturerCode() {
return manufacturerCode;
}
public void setManufacturerCode(String manufacturerCode) {
this.manufacturerCode = manufacturerCode;
}
@Override
public String toString() {
return "Manufacturer{" +
"manufacturerName='" + manufacturerName + '\'' +
", manufacturerCode='" + manufacturerCode + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Manufacturer that = (Manufacturer) o;
return Objects.equals(getManufacturerName(), that.getManufacturerName()) &&
Objects.equals(getManufacturerCode(), that.getManufacturerCode());
}
@Override
public int hashCode() {
return Objects.hash(getManufacturerName(), getManufacturerCode());
}
}
| Manufacturer |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/message/ParameterizedMessageFactory.java | {
"start": 1375,
"end": 5528
} | class ____ extends AbstractMessageFactory {
/**
* Instance of ParameterizedMessageFactory.
*/
public static final ParameterizedMessageFactory INSTANCE = new ParameterizedMessageFactory();
private static final long serialVersionUID = -8970940216592525651L;
/**
* Constructs a message factory.
*/
public ParameterizedMessageFactory() {}
/**
* Creates {@link ParameterizedMessage} instances.
*
* @param message The message pattern.
* @param params The message parameters.
* @return The Message.
*
* @see MessageFactory#newMessage(String, Object...)
*/
@Override
public Message newMessage(final String message, final Object... params) {
return new ParameterizedMessage(message, params);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0) {
return new ParameterizedMessage(message, p0);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0, final Object p1) {
return new ParameterizedMessage(message, p0, p1);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(final String message, final Object p0, final Object p1, final Object p2) {
return new ParameterizedMessage(message, p0, p1, p2);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message, final Object p0, final Object p1, final Object p2, final Object p3) {
return new ParameterizedMessage(message, p0, p1, p2, p3);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4, p5);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4, p5, p6);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4, p5, p6, p7);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4, p5, p6, p7, p8);
}
/**
* @since 2.6.1
*/
@Override
public Message newMessage(
final String message,
final Object p0,
final Object p1,
final Object p2,
final Object p3,
final Object p4,
final Object p5,
final Object p6,
final Object p7,
final Object p8,
final Object p9) {
return new ParameterizedMessage(message, p0, p1, p2, p3, p4, p5, p6, p7, p8, p9);
}
}
| ParameterizedMessageFactory |
java | apache__kafka | group-coordinator/group-coordinator-api/src/main/java/org/apache/kafka/coordinator/group/api/assignor/GroupAssignment.java | {
"start": 971,
"end": 1874
} | class ____ {
/**
* The member assignments keyed by member id.
*/
private final Map<String, MemberAssignment> members;
public GroupAssignment(
Map<String, MemberAssignment> members
) {
this.members = Objects.requireNonNull(members);
}
/**
* @return Member assignments keyed by member Ids.
*/
public Map<String, MemberAssignment> members() {
return members;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GroupAssignment that = (GroupAssignment) o;
return members.equals(that.members);
}
@Override
public int hashCode() {
return members.hashCode();
}
@Override
public String toString() {
return "GroupAssignment(members=" + members + ')';
}
}
| GroupAssignment |
java | micronaut-projects__micronaut-core | json-core/src/main/java/io/micronaut/json/tree/JsonObject.java | {
"start": 973,
"end": 2236
} | class ____ extends JsonContainer {
private final Map<String, JsonNode> values;
JsonObject(Map<String, JsonNode> values) {
this.values = values;
}
@Override
public Object getValue() {
Map<String, Object> newMap = CollectionUtils.newLinkedHashMap(values.size());
for (Map.Entry<String, JsonNode> e : values.entrySet()) {
newMap.put(e.getKey(), e.getValue().getValue());
}
return newMap;
}
@Override
public int size() {
return values.size();
}
@Override
public boolean isObject() {
return true;
}
@Override
public JsonNode get(@NonNull String fieldName) {
return values.get(fieldName);
}
@Override
public JsonNode get(int index) {
return null;
}
@Override
@NonNull
public Iterable<JsonNode> values() {
return values.values();
}
@Override
@NonNull
public Iterable<Map.Entry<String, JsonNode>> entries() {
return values.entrySet();
}
@Override
public boolean equals(Object o) {
return o instanceof JsonObject jo && jo.values.equals(values);
}
@Override
public int hashCode() {
return values.hashCode();
}
}
| JsonObject |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/xprocessing/XCodeBlocks.java | {
"start": 8151,
"end": 9255
} | class ____ {
private final String delimiter;
private final XCodeBlock.Builder builder;
private boolean first = true;
XCodeBlockJoiner(String delimiter, XCodeBlock.Builder builder) {
this.delimiter = delimiter;
this.builder = builder;
}
@CanIgnoreReturnValue
XCodeBlockJoiner add(XCodeBlock codeBlock) {
if (!first) {
if (!toKotlinPoet(codeBlock).isEmpty()) {
toKotlinPoet(builder).add(delimiter);
}
if (!toJavaPoet(codeBlock).isEmpty()) {
toJavaPoet(builder).add(delimiter);
}
}
first = false;
if (!toKotlinPoet(codeBlock).isEmpty()) {
toKotlinPoet(builder).add(toKotlinPoet(codeBlock));
}
if (!toJavaPoet(codeBlock).isEmpty()) {
toJavaPoet(builder).add(toJavaPoet(codeBlock));
}
return this;
}
@CanIgnoreReturnValue
XCodeBlockJoiner merge(XCodeBlockJoiner other) {
add(other.builder.build());
return this;
}
XCodeBlock join() {
return builder.build();
}
}
private XCodeBlocks() {}
}
| XCodeBlockJoiner |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/strategy/SchedulingTopology.java | {
"start": 1140,
"end": 2675
} | interface ____
extends Topology<
ExecutionVertexID,
IntermediateResultPartitionID,
SchedulingExecutionVertex,
SchedulingResultPartition,
SchedulingPipelinedRegion> {
/**
* Looks up the {@link SchedulingExecutionVertex} for the given {@link ExecutionVertexID}.
*
* @param executionVertexId identifying the respective scheduling vertex
* @return The respective scheduling vertex
* @throws IllegalArgumentException If the vertex does not exist
*/
SchedulingExecutionVertex getVertex(ExecutionVertexID executionVertexId);
/**
* Looks up the {@link SchedulingResultPartition} for the given {@link
* IntermediateResultPartitionID}.
*
* @param intermediateResultPartitionId identifying the respective scheduling result partition
* @return The respective scheduling result partition
* @throws IllegalArgumentException If the partition does not exist
*/
SchedulingResultPartition getResultPartition(
IntermediateResultPartitionID intermediateResultPartitionId);
/**
* Register a scheduling topology listener. The listener will be notified by {@link
* SchedulingTopologyListener#notifySchedulingTopologyUpdated(SchedulingTopology, List)} when
* the scheduling topology is updated.
*
* @param listener the registered listener.
*/
void registerSchedulingTopologyListener(SchedulingTopologyListener listener);
}
| SchedulingTopology |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamAction.java | {
"start": 1355,
"end": 1860
} | class ____ implements Writeable, ToXContentObject {
private static final ParseField DATA_STREAM = new ParseField("data_stream");
private static final ParseField INDEX = new ParseField("index");
private static final ParseField FAILURE_STORE = new ParseField("failure_store");
private static final ParseField ADD_BACKING_INDEX = new ParseField("add_backing_index");
private static final ParseField REMOVE_BACKING_INDEX = new ParseField("remove_backing_index");
public | DataStreamAction |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/ElementFactory.java | {
"start": 5196,
"end": 5316
} | enum ____
* @param elementAnnotationMetadataFactory The element annotation metadata factory
* @return The | constant |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregatorFactory.java | {
"start": 896,
"end": 2591
} | class ____ extends AggregatorFactory {
protected final Map<String, ValuesSourceConfig> configs;
protected final DocValueFormat format;
public MultiValuesSourceAggregatorFactory(
String name,
Map<String, ValuesSourceConfig> configs,
DocValueFormat format,
AggregationContext context,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, subFactoriesBuilder, metadata);
this.configs = configs;
this.format = format;
}
@Override
public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map<String, Object> metadata)
throws IOException {
return doCreateInternal(configs, format, parent, cardinality, metadata);
}
/**
* Create an aggregator that won't collect anything but will return an
* appropriate empty aggregation.
*/
protected abstract Aggregator createUnmapped(Aggregator parent, Map<String, Object> metadata) throws IOException;
/**
* Create the {@linkplain Aggregator}.
*
* @param cardinality Upper bound of the number of {@code owningBucketOrd}s
* that the {@link Aggregator} created by this method
* will be asked to collect.
*/
protected abstract Aggregator doCreateInternal(
Map<String, ValuesSourceConfig> configs,
DocValueFormat format,
Aggregator parent,
CardinalityUpperBound cardinality,
Map<String, Object> metadata
) throws IOException;
}
| MultiValuesSourceAggregatorFactory |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/targetthis/TargetThisMappingTest.java | {
"start": 784,
"end": 5715
} | class ____ {
@ProcessorTest
@WithClasses( SimpleMapper.class )
public void testTargetingThis() {
CustomerDTO ce = new CustomerDTO();
ce.setName( "customer name" );
ItemDTO e = new ItemDTO();
e.setId( "item id" );
e.setStatus( 1 );
ce.setItem( e );
CustomerItem c = SimpleMapper.INSTANCE.map( ce );
assertThat( c ).isNotNull();
assertThat( c.getName() ).isNull();
assertThat( c.getId() ).isEqualTo( ce.getItem().getId() );
assertThat( c.getStatus() ).isEqualTo( ce.getItem().getStatus() );
}
@ProcessorTest
@WithClasses( NestedMapper.class )
public void testTargetingThisWithNestedLevels() {
CustomerDTO customerDTO = new CustomerDTO();
customerDTO.setName( "customer name" );
ItemDTO itemDTO = new ItemDTO();
itemDTO.setId( "item id" );
itemDTO.setStatus( 1 );
customerDTO.setItem( itemDTO );
OrderDTO order = new OrderDTO();
order.setCustomer( customerDTO );
OrderItem c = NestedMapper.INSTANCE.map( order );
assertThat( c ).isNotNull();
assertThat( c.getId() ).isEqualTo( customerDTO.getItem().getId() );
assertThat( c.getStatus() ).isEqualTo( customerDTO.getItem().getStatus() );
}
@ProcessorTest
@WithClasses( SimpleMapperWithIgnore.class )
public void testTargetingThisWithIgnore() {
CustomerDTO ce = new CustomerDTO();
ce.setName( "customer name" );
ItemDTO e = new ItemDTO();
e.setId( "item id" );
e.setStatus( 1 );
ce.setItem( e );
CustomerItem c = SimpleMapperWithIgnore.INSTANCE.map( ce );
assertThat( c ).isNotNull();
assertThat( c.getName() ).isEqualTo( "customer name" );
assertThat( c.getId() ).isNull();
assertThat( c.getStatus() ).isEqualTo( ce.getItem().getStatus() );
}
@ProcessorTest
@WithClasses( ErroneousNestedMapper.class )
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousNestedMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 22,
message = "Several possible source properties for target property \"id\"."),
@Diagnostic(type = ErroneousNestedMapper.class,
kind = javax.tools.Diagnostic.Kind.ERROR,
line = 22,
message = "Several possible source properties for target property \"status\".")
}
)
public void testNestedDuplicates() {
}
@ProcessorTest
@WithClasses( ConfictsResolvedNestedMapper.class )
public void testWithConflictsResolved() {
OrderDTO orderDTO = new OrderDTO();
orderDTO.setItem( new ItemDTO() );
orderDTO.getItem().setId( "item1" );
orderDTO.getItem().setStatus( 1 );
orderDTO.setCustomer( new CustomerDTO() );
orderDTO.getCustomer().setName( "customer name" );
orderDTO.getCustomer().setItem( new ItemDTO() );
orderDTO.getCustomer().getItem().setId( "item2" );
orderDTO.getCustomer().getItem().setStatus( 2 );
OrderItem c = ConfictsResolvedNestedMapper.INSTANCE.map( orderDTO );
assertThat( c ).isNotNull();
assertThat( c.getStatus() ).isEqualTo( orderDTO.getItem().getStatus() );
assertThat( c.getId() ).isEqualTo( orderDTO.getCustomer().getItem().getId() );
}
@ProcessorTest
@WithClasses( FlatteningMapper.class )
public void testFlattening() {
FlatteningMapper.CustomerDTO customerDTO = new FlatteningMapper.CustomerDTO();
customerDTO.setName( new FlatteningMapper.NameDTO() );
customerDTO.getName().setName( "john doe" );
customerDTO.getName().setId( "1" );
customerDTO.setAccount( new FlatteningMapper.AccountDTO() );
customerDTO.getAccount().setDetails( "nice guys" );
customerDTO.getAccount().setNumber( "11223344" );
FlatteningMapper.Customer customer = FlatteningMapper.INSTANCE.flatten( customerDTO );
assertThat( customer ).isNotNull();
assertThat( customer.getName() ).isEqualTo( "john doe" );
assertThat( customer.getId() ).isEqualTo( "1" );
assertThat( customer.getDetails() ).isEqualTo( "nice guys" );
assertThat( customer.getNumber() ).isEqualTo( "11223344" );
FlatteningMapper.CustomerDTO customerDTO2 = FlatteningMapper.INSTANCE.expand( customer );
assertThat( customerDTO2 ).isNotNull();
assertThat( customerDTO2.getName().getName() ).isEqualTo( "john doe" );
assertThat( customerDTO2.getName().getId() ).isEqualTo( "1" );
assertThat( customerDTO2.getAccount().getDetails() ).isEqualTo( "nice guys" );
assertThat( customerDTO2.getAccount().getNumber() ).isEqualTo( "11223344" );
}
}
| TargetThisMappingTest |
java | apache__kafka | trogdor/src/main/java/org/apache/kafka/trogdor/agent/WorkerManager.java | {
"start": 3898,
"end": 5839
} | class ____ implements AutoCloseable {
AtomicBoolean closed = new AtomicBoolean(false);
@Override
public void close() {
if (closed.compareAndSet(false, true)) {
synchronized (ShutdownManager.this) {
refCount--;
if (shutdown && (refCount == 0)) {
ShutdownManager.this.notifyAll();
}
}
}
}
}
synchronized Reference takeReference() {
if (shutdown) {
throw new KafkaException("WorkerManager is shut down.");
}
refCount++;
return new Reference();
}
synchronized boolean shutdown() {
if (shutdown) {
return false;
}
shutdown = true;
if (refCount == 0) {
this.notifyAll();
}
return true;
}
synchronized void waitForQuiescence() throws InterruptedException {
while ((!shutdown) || (refCount > 0)) {
this.wait();
}
}
}
WorkerManager(Platform platform, Scheduler scheduler) {
this.platform = platform;
this.nodeName = platform.curNode().name();
this.scheduler = scheduler;
this.time = scheduler.time();
this.workers = new HashMap<>();
this.stateChangeExecutor = Executors.newSingleThreadScheduledExecutor(
ThreadUtils.createThreadFactory("WorkerManagerStateThread", false));
this.workerCleanupExecutor = Executors.newCachedThreadPool(
ThreadUtils.createThreadFactory("WorkerCleanupThread%d", false));
this.shutdownExecutor = Executors.newScheduledThreadPool(0,
ThreadUtils.createThreadFactory("WorkerManagerShutdownThread%d", false));
}
| Reference |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/convertor/TrueReplayConvertor.java | {
"start": 708,
"end": 853
} | class ____ implements Convertor<Boolean> {
@Override
public Boolean convert(Object obj) {
return true;
}
}
| TrueReplayConvertor |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/path/PathAssert_hasNoExtension_Test.java | {
"start": 785,
"end": 1102
} | class ____ extends PathAssertBaseTest {
@Override
protected PathAssert invoke_api_method() {
return assertions.hasNoExtension();
}
@Override
protected void verify_internal_effects() {
verify(paths).assertHasNoExtension(getInfo(assertions), getActual(assertions));
}
}
| PathAssert_hasNoExtension_Test |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/classpath/ClassPathFileSystemWatcherTests.java | {
"start": 4089,
"end": 4437
} | class ____ implements ApplicationListener<ClassPathChangedEvent> {
private final List<ClassPathChangedEvent> events = new CopyOnWriteArrayList<>();
@Override
public void onApplicationEvent(ClassPathChangedEvent event) {
this.events.add(event);
}
List<ClassPathChangedEvent> getEvents() {
return this.events;
}
}
static | Listener |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/ai/model/mcp/McpServerVersionInfoTest.java | {
"start": 1128,
"end": 4317
} | class ____ extends BasicRequestTest {
@Test
void testSerialize() throws JsonProcessingException {
McpServerVersionInfo mcpServerVersionInfo = new McpServerVersionInfo();
mcpServerVersionInfo.setId(UUID.randomUUID().toString());
mcpServerVersionInfo.setName("testVersionInfo");
mcpServerVersionInfo.setLatestPublishedVersion("1.0.0");
mcpServerVersionInfo.setVersionDetail(new ServerVersionDetail());
mcpServerVersionInfo.getVersionDetail().setVersion("1.0.0");
mcpServerVersionInfo.getVersionDetail().setRelease_date("2023-07-01T00:00:00Z");
mcpServerVersionInfo.getVersionDetail().setIs_latest(true);
mcpServerVersionInfo.setVersions(Collections.singletonList(mcpServerVersionInfo.getVersionDetail()));
String json = mapper.writeValueAsString(mcpServerVersionInfo);
assertNotNull(json);
assertTrue(json.contains(String.format("\"id\":\"%s\"", mcpServerVersionInfo.getId())));
assertTrue(json.contains("\"name\":\"testVersionInfo\""));
assertTrue(json.contains("\"versionDetail\":{"));
assertTrue(json.contains("\"version\":\"1.0.0\""));
assertTrue(json.contains("\"release_date\":\"2023-07-01T00:00:00Z\""));
assertTrue(json.contains("\"is_latest\":true"));
assertTrue(json.contains("\"latestPublishedVersion\":\"1.0.0\""));
assertTrue(json.contains("\"versionDetails\":[{"));
}
@Test
void testDeserialize() throws JsonProcessingException {
String json = "{\"id\":\"b646506e-901b-41a1-8790-a4378d11055e\",\"name\":\"testVersionInfo\",\"versionDetail\":"
+ "{\"version\":\"1.0.0\",\"release_date\":\"2023-07-01T00:00:00Z\",\"is_latest\":true},\"enabled\":true,"
+ "\"latestPublishedVersion\":\"1.0.0\",\"versionDetails\":[{\"version\":\"1.0.0\",\"release_date\":"
+ "\"2023-07-01T00:00:00Z\",\"is_latest\":true}]}";
McpServerVersionInfo mcpServerVersionInfo = mapper.readValue(json, McpServerVersionInfo.class);
assertNotNull(mcpServerVersionInfo);
assertEquals("b646506e-901b-41a1-8790-a4378d11055e", mcpServerVersionInfo.getId());
assertEquals("testVersionInfo", mcpServerVersionInfo.getName());
assertNotNull(mcpServerVersionInfo.getVersionDetail());
assertEquals("1.0.0", mcpServerVersionInfo.getVersionDetail().getVersion());
assertEquals("2023-07-01T00:00:00Z", mcpServerVersionInfo.getVersionDetail().getRelease_date());
assertTrue(mcpServerVersionInfo.getVersionDetail().getIs_latest());
assertTrue(mcpServerVersionInfo.isEnabled());
assertEquals("1.0.0", mcpServerVersionInfo.getLatestPublishedVersion());
assertNotNull(mcpServerVersionInfo.getVersionDetails());
assertEquals(1, mcpServerVersionInfo.getVersionDetails().size());
ServerVersionDetail versionDetail = mcpServerVersionInfo.getVersionDetails().get(0);
assertEquals("1.0.0", versionDetail.getVersion());
assertEquals("2023-07-01T00:00:00Z", versionDetail.getRelease_date());
assertTrue(versionDetail.getIs_latest());
}
} | McpServerVersionInfoTest |
java | dropwizard__dropwizard | dropwizard-servlets/src/test/java/io/dropwizard/servlets/ServletsTest.java | {
"start": 297,
"end": 1074
} | class ____ {
private final HttpServletRequest request = mock(HttpServletRequest.class);
private final HttpServletRequest fullRequest = mock(HttpServletRequest.class);
@BeforeEach
void setUp() throws Exception {
when(request.getRequestURI()).thenReturn("/one/two");
when(fullRequest.getRequestURI()).thenReturn("/one/two");
when(fullRequest.getQueryString()).thenReturn("one=two&three=four");
}
@Test
void formatsBasicURIs() throws Exception {
assertThat(Servlets.getFullUrl(request))
.isEqualTo("/one/two");
}
@Test
void formatsFullURIs() throws Exception {
assertThat(Servlets.getFullUrl(fullRequest))
.isEqualTo("/one/two?one=two&three=four");
}
}
| ServletsTest |
java | apache__camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/MailEndpointTest.java | {
"start": 3265,
"end": 3317
} | class ____ extends MailBinding {
}
}
| MyMailBinding |
java | elastic__elasticsearch | libs/gpu-codec/src/main/java/org/elasticsearch/gpu/codec/MergedQuantizedVectorValues.java | {
"start": 11697,
"end": 13232
} | class ____ extends QuantizedByteVectorValues {
private final QuantizedByteVectorValues in;
private final VectorSimilarityFunction vectorSimilarityFunction;
private final ScalarQuantizer scalarQuantizer, oldScalarQuantizer;
OffsetCorrectedQuantizedByteVectorValues(
QuantizedByteVectorValues in,
VectorSimilarityFunction vectorSimilarityFunction,
ScalarQuantizer scalarQuantizer,
ScalarQuantizer oldScalarQuantizer
) {
this.in = in;
this.vectorSimilarityFunction = vectorSimilarityFunction;
this.scalarQuantizer = scalarQuantizer;
this.oldScalarQuantizer = oldScalarQuantizer;
}
@Override
public float getScoreCorrectionConstant(int ord) throws IOException {
return scalarQuantizer.recalculateCorrectiveOffset(in.vectorValue(ord), oldScalarQuantizer, vectorSimilarityFunction);
}
@Override
public int dimension() {
return in.dimension();
}
@Override
public int size() {
return in.size();
}
@Override
public byte[] vectorValue(int ord) throws IOException {
return in.vectorValue(ord);
}
@Override
public int ordToDoc(int ord) {
return in.ordToDoc(ord);
}
@Override
public DocIndexIterator iterator() {
return in.iterator();
}
}
}
| OffsetCorrectedQuantizedByteVectorValues |
java | apache__camel | components/camel-microprofile/camel-microprofile-fault-tolerance/src/test/java/org/apache/camel/component/microprofile/faulttolerance/FaultToleranceRefConfigurationNoReflectionTest.java | {
"start": 1417,
"end": 3397
} | class ____ extends CamelTestSupport {
private BeanIntrospection bi;
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
bi = PluginHelper.getBeanIntrospection(context);
bi.setLoggingLevel(LoggingLevel.INFO);
bi.resetCounters();
FaultToleranceConfigurationDefinition config = new FaultToleranceConfigurationDefinition();
config.setTimeoutPoolSize("5");
config.setFailureRatio("25");
config.setRequestVolumeThreshold("10");
config.setDelay("5000");
context.getRegistry().bind("myConfig", config);
return context;
}
@Test
public void testFaultTolerance() throws Exception {
assertEquals(0, bi.getInvokedCounter());
getMockEndpoint("mock:result").expectedBodiesReceived("Bye World");
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_SUCCESSFUL_EXECUTION, true);
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_FROM_FALLBACK, false);
template.sendBody("direct:start", "Hello World");
MockEndpoint.assertIsSatisfied(context);
assertEquals(0, bi.getInvokedCounter());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").circuitBreaker().configuration("myConfig").faultToleranceConfiguration().delay(2000)
.timeoutEnabled(true).timeoutDuration(5000).end()
.to("direct:foo").to("log:foo").onFallback().transform().constant("Fallback message").end()
.to("log:result").to("mock:result");
from("direct:foo").transform().constant("Bye World");
}
};
}
}
| FaultToleranceRefConfigurationNoReflectionTest |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/issues/SpringTwoCamelContextDirectEndpointTest.java | {
"start": 1224,
"end": 2646
} | class ____ {
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/spring/issues/SpringTwoCamelContextDirectEndpointTest.xml");
}
@Test
public void testSpringTwoCamelContextDirectEndpoint() throws Exception {
AbstractXmlApplicationContext ac = createApplicationContext();
ac.start();
CamelContext camel1 = ac.getBean("camel-1", CamelContext.class);
CamelContext camel2 = ac.getBean("camel-2", CamelContext.class);
Endpoint start1 = camel1.getEndpoint("direct:start");
Endpoint start2 = camel2.getEndpoint("direct:start");
assertNotSame(start1, start2);
Endpoint foo1 = camel1.getEndpoint("direct:foo");
Endpoint foo2 = camel2.getEndpoint("direct:foo");
assertNotSame(foo1, foo2);
MockEndpoint mock1 = camel1.getEndpoint("mock:a", MockEndpoint.class);
mock1.expectedBodiesReceived("Hello World");
MockEndpoint mock2 = camel2.getEndpoint("mock:b", MockEndpoint.class);
mock2.expectedBodiesReceived("Bye World");
camel1.createProducerTemplate().sendBody("direct:start", "Hello World");
camel2.createProducerTemplate().sendBody("direct:start", "Bye World");
mock1.assertIsSatisfied();
mock2.assertIsSatisfied();
ac.stop();
}
}
| SpringTwoCamelContextDirectEndpointTest |
java | ReactiveX__RxJava | src/jmh/java/io/reactivex/rxjava3/xmapz/FlowableSwitchMapMaybeEmptyPerf.java | {
"start": 1111,
"end": 2751
} | class ____ {
@Param({ "1", "10", "100", "1000", "10000", "100000", "1000000" })
public int count;
Flowable<Integer> flowableConvert;
Flowable<Integer> flowableDedicated;
Flowable<Integer> flowablePlain;
@Setup
public void setup() {
Integer[] sourceArray = new Integer[count];
Arrays.fill(sourceArray, 777);
Flowable<Integer> source = Flowable.fromArray(sourceArray);
flowablePlain = source.switchMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer v) {
return Flowable.empty();
}
});
flowableConvert = source.switchMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer v) {
return Maybe.<Integer>empty().toFlowable();
}
});
flowableDedicated = source.switchMapMaybe(new Function<Integer, Maybe<Integer>>() {
@Override
public Maybe<Integer> apply(Integer v) {
return Maybe.empty();
}
});
}
@Benchmark
public Object flowablePlain(Blackhole bh) {
return flowablePlain.subscribeWith(new PerfConsumer(bh));
}
@Benchmark
public Object flowableConvert(Blackhole bh) {
return flowableConvert.subscribeWith(new PerfConsumer(bh));
}
@Benchmark
public Object flowableDedicated(Blackhole bh) {
return flowableDedicated.subscribeWith(new PerfConsumer(bh));
}
}
| FlowableSwitchMapMaybeEmptyPerf |
java | apache__camel | components/camel-whatsapp/src/main/java/org/apache/camel/component/whatsapp/model/Org.java | {
"start": 862,
"end": 1438
} | class ____ {
private String company;
private String department;
private String title;
public Org() {
}
public String getCompany() {
return company;
}
public void setCompany(String company) {
this.company = company;
}
public String getDepartment() {
return department;
}
public void setDepartment(String department) {
this.department = department;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}
| Org |
java | spring-projects__spring-boot | module/spring-boot-health/src/test/java/org/springframework/boot/health/application/AvailabilityStateHealthIndicatorTests.java | {
"start": 1546,
"end": 4803
} | class ____ {
@Mock
@SuppressWarnings("NullAway.Init")
private ApplicationAvailability applicationAvailability;
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenApplicationAvailabilityIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new AvailabilityStateHealthIndicator(null, LivenessState.class, (statusMappings) -> {
}))
.withMessage("'applicationAvailability' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenStateTypeIsNullThrowsException() {
assertThatIllegalArgumentException().isThrownBy(
() -> new AvailabilityStateHealthIndicator(this.applicationAvailability, null, (statusMappings) -> {
}))
.withMessage("'stateType' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenStatusMappingIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(
() -> new AvailabilityStateHealthIndicator(this.applicationAvailability, LivenessState.class, null))
.withMessage("'statusMappings' must not be null");
}
@Test
void createWhenStatusMappingDoesNotCoverAllEnumsThrowsException() {
assertThatIllegalStateException()
.isThrownBy(() -> new AvailabilityStateHealthIndicator(this.applicationAvailability, LivenessState.class,
(statusMappings) -> statusMappings.add(LivenessState.CORRECT, Status.UP)))
.withMessage("StatusMappings does not include BROKEN");
}
@Test
void healthReturnsMappedStatus() {
AvailabilityStateHealthIndicator indicator = new AvailabilityStateHealthIndicator(this.applicationAvailability,
LivenessState.class, (statusMappings) -> {
statusMappings.add(LivenessState.CORRECT, Status.UP);
statusMappings.add(LivenessState.BROKEN, Status.DOWN);
});
given(this.applicationAvailability.getState(LivenessState.class)).willReturn(LivenessState.BROKEN);
Health health = indicator.health(false);
assertThat(health).isNotNull();
assertThat(health.getStatus()).isEqualTo(Status.DOWN);
}
@Test
void healthReturnsDefaultStatus() {
AvailabilityStateHealthIndicator indicator = new AvailabilityStateHealthIndicator(this.applicationAvailability,
LivenessState.class, (statusMappings) -> {
statusMappings.add(LivenessState.CORRECT, Status.UP);
statusMappings.addDefaultStatus(Status.UNKNOWN);
});
given(this.applicationAvailability.getState(LivenessState.class)).willReturn(LivenessState.BROKEN);
Health health = indicator.health(false);
assertThat(health).isNotNull();
assertThat(health.getStatus()).isEqualTo(Status.UNKNOWN);
}
@Test
void healthWhenNotEnumReturnsMappedStatus() {
AvailabilityStateHealthIndicator indicator = new AvailabilityStateHealthIndicator(this.applicationAvailability,
TestAvailabilityState.class, (statusMappings) -> {
statusMappings.add(TestAvailabilityState.ONE, Status.UP);
statusMappings.addDefaultStatus(Status.DOWN);
});
given(this.applicationAvailability.getState(TestAvailabilityState.class)).willReturn(TestAvailabilityState.TWO);
Health health = indicator.health(false);
assertThat(health).isNotNull();
assertThat(health.getStatus()).isEqualTo(Status.DOWN);
}
static | AvailabilityStateHealthIndicatorTests |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedRoute.java | {
"start": 3027,
"end": 40552
} | class ____ extends ManagedPerformanceCounter implements TimerListener, ManagedRouteMBean {
public static final String VALUE_UNKNOWN = "Unknown";
private static final Logger LOG = LoggerFactory.getLogger(ManagedRoute.class);
protected final Route route;
protected final String description;
protected final String note;
protected final String configurationId;
protected final String sourceLocation;
protected final String sourceLocationShort;
protected final CamelContext context;
private final LoadTriplet load = new LoadTriplet();
private final LoadThroughput thp = new LoadThroughput();
private final String jmxDomain;
public ManagedRoute(CamelContext context, Route route) {
this.route = route;
this.context = context;
this.description = route.getDescription();
this.note = route.getNote();
this.configurationId = route.getConfigurationId();
this.sourceLocation = route.getSourceLocation();
this.sourceLocationShort = route.getSourceLocationShort();
this.jmxDomain = context.getManagementStrategy().getManagementAgent().getMBeanObjectDomainName();
}
@Override
public void init(ManagementStrategy strategy) {
super.init(strategy);
boolean enabled
= context.getManagementStrategy().getManagementAgent().getStatisticsLevel() != ManagementStatisticsLevel.Off;
setStatisticsEnabled(enabled);
}
public Route getRoute() {
return route;
}
public CamelContext getContext() {
return context;
}
@Override
public String getRouteId() {
String id = route.getId();
if (id == null) {
id = VALUE_UNKNOWN;
}
return id;
}
@Override
public String getNodePrefixId() {
return route.getNodePrefixId();
}
@Override
public String getRouteGroup() {
return route.getGroup();
}
@Override
public boolean isCreatedByRouteTemplate() {
return "true".equals(route.getProperties().getOrDefault(Route.TEMPLATE_PROPERTY, "false"));
}
@Override
public boolean isCreatedByKamelet() {
return "true".equals(route.getProperties().getOrDefault(Route.KAMELET_PROPERTY, "false"));
}
@Override
public TabularData getRouteProperties() {
try {
final Map<String, Object> properties = route.getProperties();
final TabularData answer = new TabularDataSupport(CamelOpenMBeanTypes.camelRoutePropertiesTabularType());
final CompositeType ct = CamelOpenMBeanTypes.camelRoutePropertiesCompositeType();
// gather route properties
for (Map.Entry<String, Object> entry : properties.entrySet()) {
final String key = entry.getKey();
final String val = context.getTypeConverter().convertTo(String.class, entry.getValue());
CompositeData data = new CompositeDataSupport(
ct,
new String[] { "key", "value" },
new Object[] { key, val });
answer.put(data);
}
return answer;
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public String getDescription() {
return description;
}
@Override
public String getNote() {
return note;
}
@Override
public Boolean getAutoStartup() {
return route.isAutoStartup();
}
@Override
public String getSourceLocation() {
return sourceLocation;
}
@Override
public String getSourceLocationShort() {
return sourceLocationShort;
}
@Override
public String getRouteConfigurationId() {
return configurationId;
}
@Override
public String getEndpointUri() {
if (route.getEndpoint() != null) {
return route.getEndpoint().getEndpointUri();
}
return VALUE_UNKNOWN;
}
@Override
public String getState() {
// must use String type to be sure remote JMX can read the attribute without requiring Camel classes.
ServiceStatus status = context.getRouteController().getRouteStatus(route.getId());
// if no status exists then its stopped
if (status == null) {
status = ServiceStatus.Stopped;
}
return status.name();
}
@Override
public String getUptime() {
return route.getUptime();
}
@Override
public long getUptimeMillis() {
return route.getUptimeMillis();
}
@Override
public String getCamelId() {
return context.getName();
}
@Override
public String getCamelManagementName() {
return context.getManagementName();
}
@Override
public Boolean getTracing() {
return route.isTracing();
}
@Override
public void setTracing(Boolean tracing) {
route.setTracing(tracing);
}
@Override
public Boolean getMessageHistory() {
return route.isMessageHistory();
}
@Override
public Boolean getLogMask() {
return route.isLogMask();
}
@Override
public String getRoutePolicyList() {
List<RoutePolicy> policyList = route.getRoutePolicyList();
if (policyList == null || policyList.isEmpty()) {
// return an empty string to have it displayed nicely in JMX consoles
return "";
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < policyList.size(); i++) {
RoutePolicy policy = policyList.get(i);
sb.append(policy.getClass().getSimpleName());
sb.append("(").append(ObjectHelper.getIdentityHashCode(policy)).append(")");
if (i < policyList.size() - 1) {
sb.append(", ");
}
}
return sb.toString();
}
@Override
public String getLoad01() {
double load1 = load.getLoad1();
if (Double.isNaN(load1)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load1);
}
}
@Override
public String getLoad05() {
double load5 = load.getLoad5();
if (Double.isNaN(load5)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load5);
}
}
@Override
public String getLoad15() {
double load15 = load.getLoad15();
if (Double.isNaN(load15)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", load15);
}
}
@Override
public String getThroughput() {
double d = thp.getThroughput();
if (Double.isNaN(d)) {
// empty string if load statistics is disabled
return "";
} else {
return String.format("%.2f", d);
}
}
@Override
public void onTimer() {
load.update(getInflightExchanges());
thp.update(getExchangesTotal());
}
@Override
public void start() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
try {
context.getRouteController().startRoute(getRouteId());
} catch (Exception e) {
LOG.warn("Error starting route: {} due to: {}. This exception is ignored.", getRouteId(), e.getMessage(), e);
throw e;
}
}
@Override
public void stop() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
try {
context.getRouteController().stopRoute(getRouteId());
} catch (Exception e) {
LOG.warn("Error stopping route: {} due to: {}. This exception is ignored.", getRouteId(), e.getMessage(), e);
throw e;
}
}
@Override
public void stopAndFail() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
Throwable cause = new RejectedExecutionException("Route " + getRouteId() + " is forced stopped and marked as failed");
context.getRouteController().stopRoute(getRouteId(), cause);
}
@Override
public void stop(long timeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
context.getRouteController().stopRoute(getRouteId(), timeout, TimeUnit.SECONDS);
}
@Override
public boolean stop(Long timeout, Boolean abortAfterTimeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
return context.getRouteController().stopRoute(getRouteId(), timeout, TimeUnit.SECONDS, abortAfterTimeout);
}
/**
* @deprecated not in use
*/
@Deprecated(since = "4.8.0")
public void shutdown() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
String routeId = getRouteId();
context.getRouteController().stopRoute(routeId);
context.removeRoute(routeId);
}
/**
* @deprecated not in use
*/
@Deprecated(since = "4.8.0")
public void shutdown(long timeout) throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
String routeId = getRouteId();
context.getRouteController().stopRoute(routeId, timeout, TimeUnit.SECONDS);
context.removeRoute(routeId);
}
@Override
public boolean remove() throws Exception {
if (!context.getStatus().isStarted()) {
throw new IllegalArgumentException("CamelContext is not started");
}
return context.removeRoute(getRouteId());
}
@Override
public void restart() throws Exception {
restart(1);
}
@Override
public void restart(long delay) throws Exception {
stop();
if (delay > 0) {
try {
LOG.debug("Sleeping {} seconds before starting route: {}", delay, getRouteId());
Thread.sleep(delay * 1000);
} catch (InterruptedException e) {
LOG.info("Interrupted while waiting before starting the route");
Thread.currentThread().interrupt();
}
}
start();
}
@Override
public String dumpRouteAsXml() throws Exception {
return dumpRouteAsXml(false);
}
@Override
public String dumpRouteAsXml(boolean resolvePlaceholders) throws Exception {
return dumpRouteAsXml(resolvePlaceholders, true);
}
@Override
public String dumpRouteAsXml(boolean resolvePlaceholders, boolean generatedIds) throws Exception {
return dumpRouteAsXml(resolvePlaceholders, true, false);
}
@Override
public String dumpRouteAsXml(boolean resolvePlaceholders, boolean generatedIds, boolean sourceLocation) throws Exception {
String id = route.getId();
RouteDefinition def = context.getCamelContextExtension().getContextPlugin(Model.class).getRouteDefinition(id);
if (def != null) {
// if we are debugging then ids is needed for the debugger
if (context.isDebugging()) {
generatedIds = true;
}
return PluginHelper.getModelToXMLDumper(context).dumpModelAsXml(context, def, resolvePlaceholders, generatedIds,
sourceLocation);
}
return null;
}
@Override
public String dumpRouteAsYaml() throws Exception {
return dumpRouteAsYaml(false, false);
}
@Override
public String dumpRouteAsYaml(boolean resolvePlaceholders) throws Exception {
return dumpRouteAsYaml(resolvePlaceholders, false);
}
@Override
public String dumpRouteAsYaml(boolean resolvePlaceholders, boolean uriAsParameters) throws Exception {
return dumpRouteAsYaml(resolvePlaceholders, uriAsParameters, true, false);
}
@Override
public String dumpRouteAsYaml(
boolean resolvePlaceholders, boolean uriAsParameters, boolean generatedIds, boolean sourceLocation)
throws Exception {
String id = route.getId();
RouteDefinition def = context.getCamelContextExtension().getContextPlugin(Model.class).getRouteDefinition(id);
if (def != null) {
return PluginHelper.getModelToYAMLDumper(context).dumpModelAsYaml(context, def, resolvePlaceholders,
uriAsParameters, generatedIds, sourceLocation);
}
return null;
}
@Override
public String dumpRouteStatsAsXml(boolean fullStats, boolean includeProcessors) throws Exception {
// in this logic we need to calculate the accumulated processing time for the processor in the route
// and hence why the logic is a bit more complicated to do this, as we need to calculate that from
// the bottom -> top of the route but this information is valuable for profiling routes
StringBuilder sb = new StringBuilder();
// need to calculate this value first, as we need that value for the route stat
long processorAccumulatedTime = 0L;
// gather all the processors for this route, which requires JMX
if (includeProcessors) {
sb.append(" <processorStats>\n");
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName.getInstance(
jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
Set<ObjectName> names = server.queryNames(query, null);
List<ManagedProcessorMBean> mps = new ArrayList<>();
for (ObjectName on : names) {
ManagedProcessorMBean processor = context.getManagementStrategy().getManagementAgent().newProxyClient(on,
ManagedProcessorMBean.class);
// the processor must belong to this route
if (getRouteId().equals(processor.getRouteId())) {
mps.add(processor);
}
}
mps.sort(new OrderProcessorMBeans());
// walk the processors in reverse order, and calculate the accumulated total time
Map<String, Long> accumulatedTimes = new HashMap<>();
Collections.reverse(mps);
for (ManagedProcessorMBean processor : mps) {
processorAccumulatedTime += processor.getTotalProcessingTime();
accumulatedTimes.put(processor.getProcessorId(), processorAccumulatedTime);
}
// and reverse back again
Collections.reverse(mps);
// and now add the sorted list of processors to the xml output
for (ManagedProcessorMBean processor : mps) {
int line = processor.getSourceLineNumber() != null ? processor.getSourceLineNumber() : -1;
sb.append(" <processorStat")
.append(String.format(
" id=\"%s\" index=\"%s\" state=\"%s\" disabled=\"%s\" sourceLineNumber=\"%s\"",
processor.getProcessorId(), processor.getIndex(), processor.getState(),
processor.getDisabled(), line));
// do we have an accumulated time then append that
Long accTime = accumulatedTimes.get(processor.getProcessorId());
if (accTime != null) {
sb.append(" accumulatedProcessingTime=\"").append(accTime).append("\"");
}
// use substring as we only want the attributes
sb.append(" ").append(processor.dumpStatsAsXml(fullStats).substring(7)).append("\n");
}
}
sb.append(" </processorStats>\n");
}
// route self time is route total - processor accumulated total)
long routeSelfTime = getTotalProcessingTime() - processorAccumulatedTime;
if (routeSelfTime < 0) {
// ensure we don't calculate that as negative
routeSelfTime = 0;
}
StringBuilder answer = new StringBuilder();
answer.append("<routeStat").append(String.format(" id=\"%s\"", route.getId()))
.append(String.format(" state=\"%s\"", getState()))
.append(String.format(" uptime=\"%s\"", getUptimeMillis()));
if (getRouteGroup() != null) {
answer.append(String.format(" group=\"%s\"", getRouteGroup()));
}
if (sourceLocation != null) {
answer.append(String.format(" sourceLocation=\"%s\"", getSourceLocation()));
}
// use substring as we only want the attributes
String stat = dumpStatsAsXml(fullStats);
answer.append(" exchangesInflight=\"").append(getInflightExchanges()).append("\"");
answer.append(" selfProcessingTime=\"").append(routeSelfTime).append("\"");
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
answer.append(" oldestInflightExchangeId=\"\"");
answer.append(" oldestInflightDuration=\"\"");
} else {
answer.append(" oldestInflightExchangeId=\"").append(oldest.getExchange().getExchangeId()).append("\"");
answer.append(" oldestInflightDuration=\"").append(oldest.getDuration()).append("\"");
}
answer.append(" ").append(stat, 7, stat.length() - 2).append(">\n");
if (includeProcessors) {
answer.append(sb);
}
answer.append("</routeStat>");
return answer.toString();
}
@Override
public String dumpRouteStatsAsJSon(boolean fullStats, boolean includeProcessors) throws Exception {
// in this logic we need to calculate the accumulated processing time for the processor in the route
// and hence why the logic is a bit more complicated to do this, as we need to calculate that from
// the bottom -> top of the route but this information is valuable for profiling routes
JsonObject root = new JsonObject();
root.put("id", getRouteId());
root.put("state", getState());
root.put("uptime", getUptimeMillis());
if (getRouteGroup() != null) {
root.put("group", getRouteGroup());
}
if (sourceLocation != null) {
root.put("sourceLocation", sourceLocation);
}
statsAsJSon(root, fullStats);
root.put("exchangesInflight", getInflightExchanges());
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest != null) {
root.put("oldestInflightExchangeId", oldest.getExchange().getExchangeId());
root.put("oldestInflightDuration", oldest.getDuration());
}
// need to calculate this value first, as we need that value for the route stat
long processorAccumulatedTime = 0L;
// gather all the processors for this route, which requires JMX
JsonArray arr = null;
if (includeProcessors) {
arr = new JsonArray();
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName.getInstance(
jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
Set<ObjectName> names = server.queryNames(query, null);
List<ManagedProcessorMBean> mps = new ArrayList<>();
for (ObjectName on : names) {
ManagedProcessorMBean processor = context.getManagementStrategy().getManagementAgent().newProxyClient(on,
ManagedProcessorMBean.class);
// the processor must belong to this route
if (getRouteId().equals(processor.getRouteId())) {
mps.add(processor);
}
}
mps.sort(new OrderProcessorMBeans());
// walk the processors in reverse order, and calculate the accumulated total time
Map<String, Long> accumulatedTimes = new HashMap<>();
Collections.reverse(mps);
for (ManagedProcessorMBean processor : mps) {
processorAccumulatedTime += processor.getTotalProcessingTime();
accumulatedTimes.put(processor.getProcessorId(), processorAccumulatedTime);
}
// and reverse back again
Collections.reverse(mps);
// and now add the sorted list of processors to the xml output
for (ManagedProcessorMBean processor : mps) {
JsonObject jo = new JsonObject();
arr.add(jo);
processor.statsAsJSon(jo, fullStats);
int line = processor.getSourceLineNumber() != null ? processor.getSourceLineNumber() : -1;
jo.put("id", processor.getProcessorId());
jo.put("index", processor.getIndex());
jo.put("state", processor.getState());
jo.put("disabled", processor.getDisabled());
jo.put("sourceLineNumber", line);
// do we have an accumulated time then append that
Long accTime = accumulatedTimes.get(processor.getProcessorId());
if (accTime != null) {
jo.put("accumulatedProcessingTime", accTime);
}
}
}
}
// route self time is route total - processor accumulated total
long routeSelfTime = getTotalProcessingTime() - processorAccumulatedTime;
if (routeSelfTime < 0) {
// ensure we don't calculate that as negative
routeSelfTime = 0;
}
root.put("selfProcessingTime", routeSelfTime);
if (arr != null) {
// processors should be last
root.put("processors", arr);
}
return root.toJson();
}
@Override
public String dumpStepStatsAsXml(boolean fullStats) throws Exception {
// in this logic we need to calculate the accumulated processing time for the processor in the route
// and hence why the logic is a bit more complicated to do this, as we need to calculate that from
// the bottom -> top of the route but this information is valuable for profiling routes
StringBuilder sb = new StringBuilder();
// gather all the steps for this route, which requires JMX
sb.append(" <stepStats>\n");
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName
.getInstance(jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=steps,*");
Set<ObjectName> names = server.queryNames(query, null);
List<ManagedStepMBean> mps = new ArrayList<>();
for (ObjectName on : names) {
ManagedStepMBean step
= context.getManagementStrategy().getManagementAgent().newProxyClient(on, ManagedStepMBean.class);
// the step must belong to this route
if (getRouteId().equals(step.getRouteId())) {
mps.add(step);
}
}
mps.sort(new OrderProcessorMBeans());
// and now add the sorted list of steps to the xml output
for (ManagedStepMBean step : mps) {
int line = step.getSourceLineNumber() != null ? step.getSourceLineNumber() : -1;
sb.append(" <stepStat")
.append(String.format(" id=\"%s\" index=\"%s\" state=\"%s\" sourceLineNumber=\"%s\"",
step.getProcessorId(),
step.getIndex(), step.getState(), line));
// use substring as we only want the attributes
sb.append(" ").append(step.dumpStatsAsXml(fullStats).substring(7)).append("\n");
}
}
sb.append(" </stepStats>\n");
StringBuilder answer = new StringBuilder();
answer.append("<routeStat").append(String.format(" id=\"%s\"", route.getId()))
.append(String.format(" state=\"%s\"", getState()))
.append(String.format(" uptime=\"%s\"", getUptimeMillis()));
if (getRouteGroup() != null) {
answer.append(String.format(" group=\"%s\"", getRouteGroup()));
}
if (sourceLocation != null) {
answer.append(String.format(" sourceLocation=\"%s\"", getSourceLocation()));
}
// use substring as we only want the attributes
String stat = dumpStatsAsXml(fullStats);
answer.append(" exchangesInflight=\"").append(getInflightExchanges()).append("\"");
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
answer.append(" oldestInflightExchangeId=\"\"");
answer.append(" oldestInflightDuration=\"\"");
} else {
answer.append(" oldestInflightExchangeId=\"").append(oldest.getExchange().getExchangeId()).append("\"");
answer.append(" oldestInflightDuration=\"").append(oldest.getDuration()).append("\"");
}
answer.append(" ").append(stat, 7, stat.length() - 2).append(">\n");
answer.append(sb);
answer.append("</routeStat>");
return answer.toString();
}
@Override
public String dumpRouteSourceLocationsAsXml() throws Exception {
StringBuilder sb = new StringBuilder();
sb.append("<routeLocations>");
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
List<ManagedProcessorMBean> processors = new ArrayList<>();
// gather all the processors for this CamelContext, which requires JMX
ObjectName query = ObjectName
.getInstance(jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
Set<ObjectName> names = server.queryNames(query, null);
for (ObjectName on : names) {
ManagedProcessorMBean processor
= context.getManagementStrategy().getManagementAgent().newProxyClient(on, ManagedProcessorMBean.class);
// the processor must belong to this route
if (getRouteId().equals(processor.getRouteId())) {
processors.add(processor);
}
}
processors.sort(new OrderProcessorMBeans());
// grab route consumer
RouteDefinition rd = ((ModelCamelContext) context).getRouteDefinition(route.getRouteId());
if (rd != null) {
String id = rd.getRouteId();
int line = rd.getInput().getLineNumber();
String location = getSourceLocation() != null ? getSourceLocation() : "";
sb.append("\n <routeLocation")
.append(String.format(
" routeId=\"%s\" id=\"%s\" index=\"%s\" sourceLocation=\"%s\" sourceLineNumber=\"%s\"/>",
route.getRouteId(), id, 0, location, line));
}
for (ManagedProcessorMBean processor : processors) {
// the step must belong to this route
if (route.getRouteId().equals(processor.getRouteId())) {
int line = processor.getSourceLineNumber() != null ? processor.getSourceLineNumber() : -1;
String location = processor.getSourceLocation() != null ? processor.getSourceLocation() : "";
sb.append("\n <routeLocation")
.append(String.format(
" routeId=\"%s\" id=\"%s\" index=\"%s\" sourceLocation=\"%s\" sourceLineNumber=\"%s\"/>",
route.getRouteId(), processor.getProcessorId(), processor.getIndex(), location, line));
}
}
}
sb.append("\n</routeLocations>");
return sb.toString();
}
@Override
public void reset(boolean includeProcessors) throws Exception {
reset();
load.reset();
thp.reset();
// and now reset all processors for this route
if (includeProcessors) {
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
// get all the processor mbeans and sort them accordingly to their index
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
ObjectName query = ObjectName.getInstance(
jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
QueryExp queryExp = Query.match(new AttributeValueExp("RouteId"), new StringValueExp(getRouteId()));
Set<ObjectName> names = server.queryNames(query, queryExp);
for (ObjectName name : names) {
server.invoke(name, "reset", null, null);
}
}
}
}
@Override
public void updateRouteFromXml(String xml) throws Exception {
// check whether this is allowed
if (!isUpdateRouteEnabled()) {
throw new IllegalAccessException("Updating route is not enabled");
}
// convert to model from xml
ExtendedCamelContext ecc = context.getCamelContextExtension();
InputStream is = context.getTypeConverter().convertTo(InputStream.class, xml);
RoutesDefinition routes = LwModelHelper.loadRoutesDefinition(is);
if (routes == null || routes.getRoutes().isEmpty()) {
return;
}
RouteDefinition def = routes.getRoutes().get(0);
// if the xml does not contain the route-id then we fix this by adding the actual route id
// this may be needed if the route-id was auto-generated, as the intend is to update this route
// and not add a new route, adding a new route, use the MBean operation on ManagedCamelContext instead.
if (ObjectHelper.isEmpty(def.getId())) {
def.setId(getRouteId());
} else if (!def.getId().equals(getRouteId())) {
throw new IllegalArgumentException(
"Cannot update route from XML as routeIds does not match. routeId: "
+ getRouteId() + ", routeId from XML: " + def.getId());
}
LOG.debug("Updating route: {} from xml: {}", def.getId(), xml);
try {
// add will remove existing route first
ecc.getContextPlugin(Model.class).addRouteDefinition(def);
} catch (Exception e) {
// log the error as warn as the management api may be invoked remotely over JMX which does not propagate such exception
String msg = "Error updating route: " + def.getId() + " from xml: " + xml + " due: " + e.getMessage();
LOG.warn(msg, e);
throw e;
}
}
@Override
public boolean isUpdateRouteEnabled() {
// check whether this is allowed
Boolean enabled = context.getManagementStrategy().getManagementAgent().getUpdateRouteEnabled();
return enabled != null ? enabled : false;
}
@Override
public boolean isRemoteEndpoint() {
if (route.getEndpoint() != null) {
return route.getEndpoint().isRemote();
}
return false;
}
@Override
public boolean equals(Object o) {
return this == o || o != null && getClass() == o.getClass() && route.equals(((ManagedRoute) o).route);
}
@Override
public int hashCode() {
return route.hashCode();
}
private InflightRepository.InflightExchange getOldestInflightEntry() {
return getContext().getInflightRepository().oldest(getRouteId());
}
@Override
public Long getOldestInflightDuration() {
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
return null;
} else {
return oldest.getDuration();
}
}
@Override
public String getOldestInflightExchangeId() {
InflightRepository.InflightExchange oldest = getOldestInflightEntry();
if (oldest == null) {
return null;
} else {
return oldest.getExchange().getExchangeId();
}
}
@Override
public Boolean getHasRouteController() {
return route.getRouteController() != null;
}
@Override
public RouteError getLastError() {
org.apache.camel.spi.RouteError error = route.getLastError();
if (error == null) {
return null;
} else {
return new RouteError() {
@Override
public Phase getPhase() {
if (error.getPhase() != null) {
switch (error.getPhase()) {
case START:
return Phase.START;
case STOP:
return Phase.STOP;
case SUSPEND:
return Phase.SUSPEND;
case RESUME:
return Phase.RESUME;
case SHUTDOWN:
return Phase.SHUTDOWN;
case REMOVE:
return Phase.REMOVE;
default:
throw new IllegalStateException();
}
}
return null;
}
@Override
public Throwable getException() {
return error.getException();
}
@Override
public Date getDate() {
return error.getDate();
}
};
}
}
@Override
public Collection<String> processorIds() throws Exception {
List<String> ids = new ArrayList<>();
MBeanServer server = getContext().getManagementStrategy().getManagementAgent().getMBeanServer();
if (server != null) {
String prefix = getContext().getManagementStrategy().getManagementAgent().getIncludeHostName() ? "*/" : "";
// gather all the processors for this CamelContext, which requires JMX
ObjectName query = ObjectName
.getInstance(jmxDomain + ":context=" + prefix + getContext().getManagementName() + ",type=processors,*");
Set<ObjectName> names = server.queryNames(query, null);
for (ObjectName on : names) {
ManagedProcessorMBean processor
= context.getManagementStrategy().getManagementAgent().newProxyClient(on, ManagedProcessorMBean.class);
// the processor must belong to this route
if (getRouteId().equals(processor.getRouteId())) {
ids.add(processor.getProcessorId());
}
}
}
return ids;
}
private Integer getInflightExchanges() {
return (int) super.getExchangesInflight();
}
/**
* Used for sorting the processor mbeans accordingly to their index.
*/
private static final | ManagedRoute |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/EnumUtils.java | {
"start": 2722,
"end": 3562
} | enum ____ or has more than 64 values.
* @since 3.0.1
*/
private static <E extends Enum<E>> Class<E> checkBitVectorable(final Class<E> enumClass) {
final E[] constants = asEnum(enumClass).getEnumConstants();
Validate.isTrue(constants.length <= Long.SIZE, CANNOT_STORE_S_S_VALUES_IN_S_BITS, Integer.valueOf(constants.length), enumClass.getSimpleName(),
Integer.valueOf(Long.SIZE));
return enumClass;
}
/**
* Creates a long bit vector representation of the given array of Enum values.
*
* <p>This generates a value that is usable by {@link EnumUtils#processBitVector}.</p>
*
* <p>Do not use this method if you have more than 64 values in your Enum, as this
* would create a value greater than a long can hold.</p>
*
* @param enumClass the | class |
java | apache__avro | lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java | {
"start": 1456,
"end": 1787
} | class ____ <code>RecordWriter</code>s that writes Trevni
* container files.
*
* @param <K> The type of key the record writer should generate.
* @param <V> The type of value the record wrtier should generate.
* @param <T> The type of the entries within the Trevni container file being
* written.
*/
public abstract | for |
java | micronaut-projects__micronaut-core | http/src/main/java/io/micronaut/http/body/TypedMessageBodyWriter.java | {
"start": 963,
"end": 1262
} | interface ____<T> extends MessageBodyWriter<T> {
/**
* @return The body type.
*/
@NonNull
Argument<T> getType();
@Override
default boolean isWriteable(Argument<T> type, MediaType mediaType) {
return getType().isAssignableFrom(type);
}
}
| TypedMessageBodyWriter |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java | {
"start": 2002,
"end": 9000
} | class ____ extends ObjectMapper.Builder {
// Controls whether subfields are configured as time-series dimensions.
protected Explicit<Boolean> timeSeriesDimensionSubFields = Explicit.IMPLICIT_FALSE;
// Controls which pass-through fields take precedence in case of conflicting aliases.
protected int priority = -1;
public Builder(String name) {
// Subobjects are not currently supported.
super(name, Explicit.implicit(Subobjects.DISABLED));
}
@Override
public PassThroughObjectMapper.Builder add(Mapper.Builder builder) {
if (timeSeriesDimensionSubFields.value() && builder instanceof FieldMapper.DimensionBuilder dimensionBuilder) {
dimensionBuilder.setInheritDimensionParameterFromParentObject();
}
super.add(builder);
return this;
}
public PassThroughObjectMapper.Builder setContainsDimensions() {
timeSeriesDimensionSubFields = Explicit.EXPLICIT_TRUE;
return this;
}
public PassThroughObjectMapper.Builder setPriority(int priority) {
this.priority = priority;
return this;
}
@Override
public PassThroughObjectMapper build(MapperBuilderContext context) {
return new PassThroughObjectMapper(
leafName(),
context.buildFullName(leafName()),
enabled,
sourceKeepMode,
dynamic,
buildMappers(context.createChildContext(leafName(), timeSeriesDimensionSubFields.value(), dynamic)),
timeSeriesDimensionSubFields,
priority
);
}
}
// If set, its subfields are marked as time-series dimensions (for the types supporting this).
private final Explicit<Boolean> timeSeriesDimensionSubFields;
private final int priority;
PassThroughObjectMapper(
String name,
String fullPath,
Explicit<Boolean> enabled,
Optional<SourceKeepMode> sourceKeepMode,
Dynamic dynamic,
Map<String, Mapper> mappers,
Explicit<Boolean> timeSeriesDimensionSubFields,
int priority
) {
// Subobjects are not currently supported.
super(name, fullPath, enabled, Explicit.implicit(Subobjects.DISABLED), sourceKeepMode, dynamic, mappers);
this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields;
this.priority = priority;
if (priority < 0) {
throw new MapperException("Pass-through object [" + fullPath + "] is missing a non-negative value for parameter [priority]");
}
}
@Override
PassThroughObjectMapper withoutMappers() {
return new PassThroughObjectMapper(
leafName(),
fullPath(),
enabled,
sourceKeepMode,
dynamic,
Map.of(),
timeSeriesDimensionSubFields,
priority
);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
public boolean containsDimensions() {
return timeSeriesDimensionSubFields.value();
}
public int priority() {
return priority;
}
public Explicit<Boolean> timeSeriesDimensionSubFields() {
return timeSeriesDimensionSubFields;
}
@Override
public PassThroughObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) {
PassThroughObjectMapper.Builder builder = new PassThroughObjectMapper.Builder(leafName());
builder.enabled = enabled;
builder.dynamic = dynamic;
builder.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields;
builder.priority = priority;
return builder;
}
@Override
public PassThroughObjectMapper merge(Mapper mergeWith, MapperMergeContext parentBuilderContext) {
if (mergeWith instanceof ObjectMapper == false) {
MapperErrors.throwObjectMappingConflictError(mergeWith.fullPath());
}
ObjectMapper mergeWithObjectMapper = (ObjectMapper) mergeWith;
if (mergeWithObjectMapper instanceof PassThroughObjectMapper mergeWithPassThrough) {
final var mergeResult = MergeResult.build(this, mergeWithPassThrough, parentBuilderContext);
final Explicit<Boolean> containsDimensions = (mergeWithPassThrough.timeSeriesDimensionSubFields.explicit())
? mergeWithPassThrough.timeSeriesDimensionSubFields
: this.timeSeriesDimensionSubFields;
return new PassThroughObjectMapper(
leafName(),
fullPath(),
mergeResult.enabled(),
mergeResult.sourceKeepMode(),
mergeResult.dynamic(),
mergeResult.mappers(),
containsDimensions,
Math.max(priority, mergeWithPassThrough.priority)
);
}
if (mergeWithObjectMapper instanceof NestedObjectMapper) {
MapperErrors.throwNestedMappingConflictError(fullPath());
}
if (isEligibleForMerge(mergeWithObjectMapper) == false) {
MapperErrors.throwPassThroughMappingConflictError(fullPath());
}
MergeResult mergeResult = MergeResult.build(this, mergeWithObjectMapper, parentBuilderContext);
return new PassThroughObjectMapper(
leafName(),
fullPath(),
mergeResult.enabled(),
mergeResult.sourceKeepMode(),
mergeResult.dynamic(),
mergeResult.mappers(),
timeSeriesDimensionSubFields,
priority
);
}
/**
* An object mapper is compatible to be merged with a passthrough mapper if
* - It is not a root mapper
* - If it does not have subobjects true
*/
static boolean isEligibleForMerge(ObjectMapper objectMapper) {
return objectMapper.isRoot() == false
&& (objectMapper.subobjects == null
|| objectMapper.subobjects.explicit() == false
|| objectMapper.subobjects.value().equals(Subobjects.DISABLED));
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(leafName());
builder.field("type", CONTENT_TYPE);
if (timeSeriesDimensionSubFields.explicit()) {
builder.field(TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM, timeSeriesDimensionSubFields.value());
}
if (priority >= 0) {
builder.field(PRIORITY_PARAM_NAME, priority);
}
if (dynamic != null) {
builder.field("dynamic", dynamic.name().toLowerCase(Locale.ROOT));
}
if (isEnabled() != Defaults.ENABLED) {
builder.field("enabled", enabled.value());
}
serializeMappers(builder, params);
return builder.endObject();
}
public static | Builder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/HANADialect.java | {
"start": 58411,
"end": 61593
} | class ____ extends NClobJdbcType {
/** serial version uid. */
@Serial
private static final long serialVersionUID = 5651116091681647859L;
final int maxLobPrefetchSize;
public HANANClobJdbcType(int maxLobPrefetchSize) {
this.maxLobPrefetchSize = maxLobPrefetchSize;
}
@Override
public String toString() {
return "HANANClobTypeDescriptor";
}
@Override
public <X> BasicBinder<X> getNClobBinder(final JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(final PreparedStatement st, final X value, final int index, final WrapperOptions options) throws SQLException {
final CharacterStream characterStream = javaType.unwrap( value, CharacterStream.class, options );
if ( value instanceof NClobImplementer) {
try ( Reader r = new CloseSuppressingReader( characterStream.asReader() ) ) {
st.setCharacterStream( index, r, characterStream.getLength() );
}
catch (IOException e) {
// can't happen => ignore
}
}
else {
st.setCharacterStream( index, characterStream.asReader(), characterStream.getLength() );
}
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options) throws SQLException {
final CharacterStream characterStream = javaType.unwrap( value, CharacterStream.class, options );
if ( value instanceof NClobImplementer ) {
try ( Reader r = new CloseSuppressingReader( characterStream.asReader() ) ) {
st.setCharacterStream( name, r, characterStream.getLength() );
}
catch (IOException e) {
// can't happen => ignore
}
}
else {
st.setCharacterStream( name, characterStream.asReader(), characterStream.getLength() );
}
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
private X doExtract(NClob nclob, WrapperOptions options) throws SQLException {
final X result;
if ( nclob == null ) {
result = getJavaType().wrap( null, options );
}
else if ( nclob.length() < maxLobPrefetchSize ) {
result = javaType.wrap(nclob, options);
nclob.free();
}
else {
final MaterializedNClob materialized = new MaterializedNClob( extractString( nclob ) );
nclob.free();
result = getJavaType().wrap( materialized, options );
}
return result;
}
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return doExtract( rs.getNClob( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return doExtract( statement.getNClob( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return doExtract( statement.getNClob( name ), options );
}
};
}
public int getMaxLobPrefetchSize() {
return maxLobPrefetchSize;
}
}
public static | HANANClobJdbcType |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/asyncwindow/buffers/AsyncStateRecordsWindowBuffer.java | {
"start": 2642,
"end": 7836
} | class ____ implements AsyncStateWindowBuffer {
private final AsyncStateRecordsCombiner combineFunction;
private final WindowBytesMultiMap recordsBuffer;
private final WindowKey reuseWindowKey;
private final AbstractRowDataSerializer<RowData> recordSerializer;
private final ZoneId shiftTimeZone;
private final RecordEqualiser keyEqualiser;
private final AsyncStateKeyContext keyContext;
private long minSliceEnd = Long.MAX_VALUE;
public AsyncStateRecordsWindowBuffer(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
AsyncStateRecordsCombiner combineFunction,
PagedTypeSerializer<RowData> keySer,
AbstractRowDataSerializer<RowData> inputSer,
RecordEqualiser keyEqualiser,
AsyncStateKeyContext keyContext,
ZoneId shiftTimeZone) {
this.combineFunction = combineFunction;
this.recordsBuffer =
new WindowBytesMultiMap(
operatorOwner, memoryManager, memorySize, keySer, inputSer.getArity());
this.recordSerializer = inputSer;
this.keyEqualiser = keyEqualiser;
this.keyContext = keyContext;
this.reuseWindowKey = new WindowKeySerializer(keySer).createInstance();
this.shiftTimeZone = shiftTimeZone;
}
@Override
public StateFuture<Void> addElement(RowData dataKey, long sliceEnd, RowData element)
throws Exception {
StateFuture<Void> resultFuture = REUSABLE_VOID_STATE_FUTURE;
// track the lowest trigger time, if watermark exceeds the trigger time,
// it means there are some elements in the buffer belong to a window going to be fired,
// and we need to flush the buffer into state for firing.
minSliceEnd = Math.min(sliceEnd, minSliceEnd);
reuseWindowKey.replace(sliceEnd, dataKey);
BytesMap.LookupInfo<WindowKey, Iterator<RowData>> lookup =
recordsBuffer.lookup(reuseWindowKey);
try {
recordsBuffer.append(lookup, recordSerializer.toBinaryRow(element));
} catch (EOFException e) {
// buffer is full, flush it to state
resultFuture = flush(dataKey);
// remember to add the input element again
addElement(dataKey, sliceEnd, element);
}
return resultFuture;
}
@Override
public StateFuture<Void> advanceProgress(@Nullable RowData currentKey, long progress)
throws Exception {
if (isWindowFired(minSliceEnd, progress, shiftTimeZone)) {
// there should be some window to be fired, flush buffer to state first
return flush(currentKey);
}
return REUSABLE_VOID_STATE_FUTURE;
}
@Override
public StateFuture<Void> flush(@Nullable RowData currentKey) throws Exception {
StateFuture<Void> flushFuture = REUSABLE_VOID_STATE_FUTURE;
if (recordsBuffer.getNumKeys() > 0) {
// due to the delayed processing of async requests, all objects cannot be reused, so
// they must be copied.
KeyValueIterator<WindowKey, Iterator<RowData>> entryIterator =
recordsBuffer.getEntryIterator(true);
while (entryIterator.advanceNext()) {
WindowKey windowKey = entryIterator.getKey();
long window = windowKey.getWindow();
List<RowData> allData = itertorToList(entryIterator.getValue());
if (currentKey != null && keyEqualiser.equals(currentKey, windowKey.getKey())) {
flushFuture = combineFunction.asyncCombine(window, allData.iterator());
} else {
// no need to wait for combining the records excluding current key
keyContext.asyncProcessWithKey(
windowKey.getKey(),
() -> combineFunction.asyncCombine(window, allData.iterator()));
}
}
recordsBuffer.reset();
// reset trigger time
minSliceEnd = Long.MAX_VALUE;
}
return flushFuture;
}
/**
* Convert iterator to list.
*
* <p>This may put some pressure on heap memory since the data in the iterator comes from
* managed memory. We can optimize this method once we come up with a better approach.
*/
private List<RowData> itertorToList(Iterator<RowData> records) {
List<RowData> list = new ArrayList<>();
while (records.hasNext()) {
list.add(records.next());
}
return list;
}
@Override
public void close() throws Exception {
recordsBuffer.free();
combineFunction.close();
}
// ------------------------------------------------------------------------------------------
// Factory
// ------------------------------------------------------------------------------------------
/**
* Factory to create {@link AsyncStateRecordsWindowBuffer} with {@link
* AsyncStateRecordsCombiner.Factory}.
*/
public static final | AsyncStateRecordsWindowBuffer |
java | quarkusio__quarkus | extensions/panache/hibernate-reactive-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/reactive/rest/data/panache/deployment/build/BuildConditionsWithResourceDisabledTest.java | {
"start": 360,
"end": 898
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.overrideConfigKey("collections.endpoint", "disable")
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClasses(Collection.class, CollectionsResource.class));
@Test
void shouldResourceNotBeFound() {
given().accept("application/json")
.when().get("/collections")
.then().statusCode(404);
}
}
| BuildConditionsWithResourceDisabledTest |
java | spring-projects__spring-boot | module/spring-boot-graphql/src/main/java/org/springframework/boot/graphql/autoconfigure/rsocket/RSocketGraphQlClientAutoConfiguration.java | {
"start": 2125,
"end": 2483
} | class ____ {
@Bean
@Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
@ConditionalOnMissingBean
RSocketGraphQlClient.Builder<?> rsocketGraphQlClientBuilder(RSocketRequester.Builder rsocketRequesterBuilder) {
return RSocketGraphQlClient.builder(rsocketRequesterBuilder.dataMimeType(MimeTypeUtils.APPLICATION_JSON));
}
}
| RSocketGraphQlClientAutoConfiguration |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/annotation/REntity.java | {
"start": 1125,
"end": 1279
} | class ____ a Live Object.
*
* @author Rui Gu (https://github.com/jackygurui)
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
public @ | is |
java | google__guava | android/guava-tests/test/com/google/common/html/HtmlEscapersTest.java | {
"start": 943,
"end": 2401
} | class ____ extends TestCase {
public void testHtmlEscaper() throws Exception {
assertEquals("xxx", htmlEscaper().escape("xxx"));
assertEquals(""test"", htmlEscaper().escape("\"test\""));
assertEquals("'test'", htmlEscaper().escape("\'test'"));
assertEquals("test & test & test", htmlEscaper().escape("test & test & test"));
assertEquals("test << 1", htmlEscaper().escape("test << 1"));
assertEquals("test >> 1", htmlEscaper().escape("test >> 1"));
assertEquals("<tab>", htmlEscaper().escape("<tab>"));
// Test simple escape of '&'.
assertEquals("foo&bar", htmlEscaper().escape("foo&bar"));
// If the string contains no escapes, it should return the arg.
// Note: assert<b>Same</b> for this implementation.
String s = "blah blah farhvergnugen";
assertSame(s, htmlEscaper().escape(s));
// Tests escapes at begin and end of string.
assertEquals("<p>", htmlEscaper().escape("<p>"));
// Test all escapes.
assertEquals("a"b<c>d&", htmlEscaper().escape("a\"b<c>d&"));
// Test two escapes in a row.
assertEquals("foo&&bar", htmlEscaper().escape("foo&&bar"));
// Test many non-escaped characters.
s =
"!@#$%^*()_+=-/?\\|]}[{,.;:"
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "1234567890";
assertSame(s, htmlEscaper().escape(s));
}
}
| HtmlEscapersTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/FileEndpointBuilderFactory.java | {
"start": 114247,
"end": 129312
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedFileEndpointProducerBuilder advanced() {
return (AdvancedFileEndpointProducerBuilder) this;
}
/**
* This option is used to specify the encoding of the file. You can use
* this on the consumer, to specify the encodings of the files, which
* allow Camel to know the charset it should load the file content in
* case the file content is being accessed. Likewise when writing a
* file, you can use this option to specify which charset to write the
* file as well. Do mind that when writing the file Camel may have to
* read the message content into memory to be able to convert the data
* into the configured charset, so do not use this if you have big
* messages.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param charset the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder charset(String charset) {
doSetProperty("charset", charset);
return this;
}
/**
* Producer: If provided, then Camel will write a 2nd done file when the
* original file has been written. The done file will be empty. This
* option configures what file name to use. Either you can specify a
* fixed name. Or you can use dynamic placeholders. The done file will
* always be written in the same folder as the original file. Consumer:
* If provided, Camel will only consume files if a done file exists.
* This option configures what file name to use. Either you can specify
* a fixed name. Or you can use dynamic placeholders.The done file is
* always expected in the same folder as the original file. Only
* ${file.name} and ${file.name.next} is supported as dynamic
* placeholders.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param doneFileName the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder doneFileName(String doneFileName) {
doSetProperty("doneFileName", doneFileName);
return this;
}
/**
* Use Expression such as File Language to dynamically set the filename.
* For consumers, it's used as a filename filter. For producers, it's
* used to evaluate the filename to write. If an expression is set, it
* take precedence over the CamelFileName header. (Note: The header
* itself can also be an Expression). The expression options support
* both String and Expression types. If the expression is a String type,
* it is always evaluated using the File Language. If the expression is
* an Expression type, the specified Expression type is used - this
* allows you, for instance, to use OGNL expressions. For the consumer,
* you can use it to filter filenames, so you can for instance consume
* today's file using the File Language syntax:
* mydata-${date:now:yyyyMMdd}.txt. The producers support the
* CamelOverruleFileName header which takes precedence over any existing
* CamelFileName header; the CamelOverruleFileName is a header that is
* used only once, and makes it easier as this avoids to temporary store
* CamelFileName and have to restore it afterwards.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param fileName the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder fileName(String fileName) {
doSetProperty("fileName", fileName);
return this;
}
/**
* Used to append characters (text) after writing files. This can for
* example be used to add new lines or other separators when writing and
* appending new files or existing files. To specify new-line (slash-n
* or slash-r) or tab (slash-t) characters then escape with an extra
* slash, eg slash-slash-n.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param appendChars the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder appendChars(String appendChars) {
doSetProperty("appendChars", appendChars);
return this;
}
/**
* If provided, then Camel will write a checksum file when the original
* file has been written. The checksum file will contain the checksum
* created with the provided algorithm for the original file. The
* checksum file will always be written in the same folder as the
* original file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param checksumFileAlgorithm the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder checksumFileAlgorithm(String checksumFileAlgorithm) {
doSetProperty("checksumFileAlgorithm", checksumFileAlgorithm);
return this;
}
/**
* What to do if a file already exists with the same name. Override,
* which is the default, replaces the existing file. - Append - adds
* content to the existing file. - Fail - throws a
* GenericFileOperationException, indicating that there is already an
* existing file. - Ignore - silently ignores the problem and does not
* override the existing file, but assumes everything is okay. - Move -
* option requires to use the moveExisting option to be configured as
* well. The option eagerDeleteTargetFile can be used to control what to
* do if an moving the file, and there exists already an existing file,
* otherwise causing the move operation to fail. The Move option will
* move any existing files, before writing the target file. - TryRename
* is only applicable if tempFileName option is in use. This allows to
* try renaming the file from the temporary name to the actual name,
* without doing any exists check. This check may be faster on some file
* systems and especially FTP servers.
*
* The option is a:
* <code>org.apache.camel.component.file.GenericFileExist</code> type.
*
* Default: Override
* Group: producer
*
* @param fileExist the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder fileExist(org.apache.camel.component.file.GenericFileExist fileExist) {
doSetProperty("fileExist", fileExist);
return this;
}
/**
* What to do if a file already exists with the same name. Override,
* which is the default, replaces the existing file. - Append - adds
* content to the existing file. - Fail - throws a
* GenericFileOperationException, indicating that there is already an
* existing file. - Ignore - silently ignores the problem and does not
* override the existing file, but assumes everything is okay. - Move -
* option requires to use the moveExisting option to be configured as
* well. The option eagerDeleteTargetFile can be used to control what to
* do if an moving the file, and there exists already an existing file,
* otherwise causing the move operation to fail. The Move option will
* move any existing files, before writing the target file. - TryRename
* is only applicable if tempFileName option is in use. This allows to
* try renaming the file from the temporary name to the actual name,
* without doing any exists check. This check may be faster on some file
* systems and especially FTP servers.
*
* The option will be converted to a
* <code>org.apache.camel.component.file.GenericFileExist</code> type.
*
* Default: Override
* Group: producer
*
* @param fileExist the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder fileExist(String fileExist) {
doSetProperty("fileExist", fileExist);
return this;
}
/**
* Flatten is used to flatten the file name path to strip any leading
* paths, so it's just the file name. This allows you to consume
* recursively into sub-directories, but when you eg write the files to
* another directory they will be written in a single directory. Setting
* this to true on the producer enforces that any file name in
* CamelFileName header will be stripped for any leading paths.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param flatten the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder flatten(boolean flatten) {
doSetProperty("flatten", flatten);
return this;
}
/**
* Flatten is used to flatten the file name path to strip any leading
* paths, so it's just the file name. This allows you to consume
* recursively into sub-directories, but when you eg write the files to
* another directory they will be written in a single directory. Setting
* this to true on the producer enforces that any file name in
* CamelFileName header will be stripped for any leading paths.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param flatten the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder flatten(String flatten) {
doSetProperty("flatten", flatten);
return this;
}
/**
* Used for jailing (restricting) writing files to the starting
* directory (and sub) only. This is enabled by default to not allow
* Camel to write files to outside directories (to be more secured out
* of the box). You can turn this off to allow writing files to
* directories outside the starting directory, such as parent or root
* folders.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param jailStartingDirectory the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder jailStartingDirectory(boolean jailStartingDirectory) {
doSetProperty("jailStartingDirectory", jailStartingDirectory);
return this;
}
/**
* Used for jailing (restricting) writing files to the starting
* directory (and sub) only. This is enabled by default to not allow
* Camel to write files to outside directories (to be more secured out
* of the box). You can turn this off to allow writing files to
* directories outside the starting directory, such as parent or root
* folders.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param jailStartingDirectory the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder jailStartingDirectory(String jailStartingDirectory) {
doSetProperty("jailStartingDirectory", jailStartingDirectory);
return this;
}
/**
* Expression (such as File Language) used to compute file name to use
* when fileExist=Move is configured. To move files into a backup
* subdirectory just enter backup. This option only supports the
* following File Language tokens: file:name, file:name.ext,
* file:name.noext, file:onlyname, file:onlyname.noext, file:ext, and
* file:parent. Notice the file:parent is not supported by the FTP
* component, as the FTP component can only move any existing files to a
* relative directory based on current dir as base.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param moveExisting the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder moveExisting(String moveExisting) {
doSetProperty("moveExisting", moveExisting);
return this;
}
/**
* The same as tempPrefix option but offering a more fine grained
* control on the naming of the temporary filename as it uses the File
* Language. The location for tempFilename is relative to the final file
* location in the option 'fileName', not the target directory in the
* base uri. For example if option fileName includes a directory prefix:
* dir/finalFilename then tempFileName is relative to that subdirectory
* dir.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param tempFileName the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder tempFileName(String tempFileName) {
doSetProperty("tempFileName", tempFileName);
return this;
}
/**
* This option is used to write the file using a temporary name and
* then, after the write is complete, rename it to the real name. Can be
* used to identify files being written and also avoid consumers (not
* using exclusive read locks) reading in progress files. Is often used
* by FTP when uploading big files.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param tempPrefix the value to set
* @return the dsl builder
*/
default FileEndpointProducerBuilder tempPrefix(String tempPrefix) {
doSetProperty("tempPrefix", tempPrefix);
return this;
}
}
/**
* Advanced builder for endpoint producers for the File component.
*/
public | FileEndpointProducerBuilder |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/RocksDBKeyedStateBackend.java | {
"start": 8987,
"end": 9370
} | interface ____ {
<K, N, SV, S extends State, IS extends S> IS createState(
StateDescriptor<S, SV> stateDesc,
Tuple2<ColumnFamilyHandle, RegisteredKeyValueStateBackendMetaInfo<N, SV>>
registerResult,
RocksDBKeyedStateBackend<K> backend)
throws Exception;
}
private | StateCreateFactory |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/impl/http2/multiplex/Http2MultiplexServerChannelInitializer.java | {
"start": 1272,
"end": 6132
} | class ____ implements Http2ServerChannelInitializer {
private final CompressionManager compressionManager;
private final boolean decompressionSupported;
private final Http2Settings initialSettings;
private final Http2MultiplexConnectionFactory connectionFactory;
private final int rstFloodMaxRstFramePerWindow;
private final int rstFloodWindowDuration;
private final boolean logEnabled;
public Http2MultiplexServerChannelInitializer(ContextInternal context,
CompressionManager compressionManager,
boolean decompressionSupported,
HttpServerMetrics<?, ?, ?> serverMetrics,
Object connectionMetric,
Supplier<ContextInternal> streamContextSupplier,
Handler<HttpServerConnection> connectionHandler,
Http2Settings initialSettings,
int rstFloodMaxRstFramePerWindow,
int rstFloodWindowDuration,
boolean logEnabled) {
Http2MultiplexConnectionFactory connectionFactory = (handler, chctx) -> {
Http2MultiplexServerConnection connection = new Http2MultiplexServerConnection(
handler,
compressionManager,
serverMetrics,
chctx,
context,
streamContextSupplier,
connectionHandler);
connection.metric(connectionMetric);
return connection;
};
this.initialSettings = initialSettings;
this.connectionFactory = connectionFactory;
this.compressionManager = compressionManager;
this.decompressionSupported = decompressionSupported;
this.rstFloodMaxRstFramePerWindow = rstFloodMaxRstFramePerWindow;
this.rstFloodWindowDuration = rstFloodWindowDuration;
this.logEnabled = logEnabled;
}
@Override
public void configureHttp2(ContextInternal context, ChannelPipeline pipeline, boolean ssl) {
Http2MultiplexHandler handler = new Http2MultiplexHandler(
pipeline.channel(),
context,
connectionFactory,
initialSettings);
Http2FrameCodec frameCodec = new Http2CustomFrameCodecBuilder(compressionManager, decompressionSupported)
.server(true)
.decoderEnforceMaxRstFramesPerWindow(rstFloodMaxRstFramePerWindow, rstFloodWindowDuration)
.encoderEnforceMaxRstFramesPerWindow(rstFloodMaxRstFramePerWindow, rstFloodWindowDuration)
.initialSettings(initialSettings)
.logEnabled(logEnabled)
.build();
frameCodec.connection().addListener(handler);
if (ssl) {
pipeline.remove("chunkedWriter");
}
pipeline.remove("handler");
pipeline.addLast("codec", frameCodec);
pipeline.addLast("multiplex", new io.netty.handler.codec.http2.Http2MultiplexHandler(handler));
pipeline.addLast("handler", handler);
}
@Override
public void configureHttp1OrH2CUpgradeHandler(ContextInternal context, ChannelPipeline pipeline, SslChannelProvider sslChannelProvider, SslContextManager sslContextManager) {
HttpServerUpgradeHandler.UpgradeCodecFactory upgradeCodecFactory = protocol -> {
if (AsciiString.contentEquals(Http2CodecUtil.HTTP_UPGRADE_PROTOCOL_NAME, protocol)) {
Http2MultiplexHandler handler = new Http2MultiplexHandler(
pipeline.channel(),
context,
connectionFactory,
initialSettings);
Http2FrameCodec frameCodec = new Http2CustomFrameCodecBuilder(compressionManager, decompressionSupported)
.server(true)
.initialSettings(initialSettings)
.logEnabled(logEnabled)
.build();
frameCodec.connection().addListener(handler);
io.netty.handler.codec.http2.Http2MultiplexHandler http2MultiplexHandler = new io.netty.handler.codec.http2.Http2MultiplexHandler(handler);
return new Http2ServerUpgradeCodec(
frameCodec,
http2MultiplexHandler,
handler);
} else {
return null;
}
};
HttpServerUpgradeHandler.SourceCodec sourceCodec = ctx -> {
ChannelPipeline p = ctx.pipeline();
if (p.get("chunkedWriter") != null) {
p.remove("chunkedWriter");
}
p.remove("httpDecoder");
p.remove("httpEncoder");
p.remove("handler");
if (decompressionSupported) {
p.remove("inflater");
}
if (compressionManager != null) {
p.remove("deflater");
}
};
// Make max buffered configurable
pipeline.addBefore("handler", "h2c", new HttpServerUpgradeHandler(sourceCodec, upgradeCodecFactory, 128 * 1024));
}
}
| Http2MultiplexServerChannelInitializer |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/internal/MoreTypes.java | {
"start": 1533,
"end": 11643
} | class ____ {
public static final Type[] EMPTY_TYPE_ARRAY = new Type[] {};
private MoreTypes() {}
private static final Map<TypeLiteral<?>, TypeLiteral<?>> PRIMITIVE_TO_WRAPPER = Map.of(
TypeLiteral.get(boolean.class),
TypeLiteral.get(Boolean.class),
TypeLiteral.get(byte.class),
TypeLiteral.get(Byte.class),
TypeLiteral.get(short.class),
TypeLiteral.get(Short.class),
TypeLiteral.get(int.class),
TypeLiteral.get(Integer.class),
TypeLiteral.get(long.class),
TypeLiteral.get(Long.class),
TypeLiteral.get(float.class),
TypeLiteral.get(Float.class),
TypeLiteral.get(double.class),
TypeLiteral.get(Double.class),
TypeLiteral.get(char.class),
TypeLiteral.get(Character.class),
TypeLiteral.get(void.class),
TypeLiteral.get(Void.class)
);
/**
* Returns an equivalent type that's safe for use in a key. The returned type will be free of
* primitive types. Type literals of primitives will return the corresponding wrapper types.
*
* @throws ConfigurationException if {@code type} contains a type variable
*/
public static <T> TypeLiteral<T> makeKeySafe(TypeLiteral<T> type) {
if (isFullySpecified(type.getType()) == false) {
String message = type + " cannot be used as a key; It is not fully specified.";
throw new ConfigurationException(singleton(new Message(message)));
}
@SuppressWarnings("unchecked")
TypeLiteral<T> wrappedPrimitives = (TypeLiteral<T>) PRIMITIVE_TO_WRAPPER.get(type);
return wrappedPrimitives != null ? wrappedPrimitives : type;
}
/**
* Returns true if {@code type} is free from type variables.
*/
private static boolean isFullySpecified(Type type) {
if (type instanceof Class) {
return true;
} else if (type instanceof CompositeType) {
return ((CompositeType) type).isFullySpecified();
} else if (type instanceof TypeVariable) {
return false;
} else {
return ((CompositeType) canonicalize(type)).isFullySpecified();
}
}
/**
* Returns a type that is functionally equal but not necessarily equal
* according to {@link Object#equals(Object) Object.equals()}.
*/
public static Type canonicalize(Type type) {
if (type instanceof ParameterizedTypeImpl || type instanceof GenericArrayTypeImpl || type instanceof WildcardTypeImpl) {
return type;
} else if (type instanceof ParameterizedType p) {
return new ParameterizedTypeImpl(p.getOwnerType(), p.getRawType(), p.getActualTypeArguments());
} else if (type instanceof GenericArrayType g) {
return new GenericArrayTypeImpl(g.getGenericComponentType());
} else if (type instanceof Class<?> c && ((Class<?>) type).isArray()) {
return new GenericArrayTypeImpl(c.getComponentType());
} else if (type instanceof WildcardType w) {
return new WildcardTypeImpl(w.getUpperBounds(), w.getLowerBounds());
} else {
// type is either serializable as-is or unsupported
return type;
}
}
public static Class<?> getRawType(Type type) {
if (type instanceof Class<?>) {
// type is a normal class.
return (Class<?>) type;
} else if (type instanceof ParameterizedType parameterizedType) {
// I'm not exactly sure why getRawType() returns Type instead of Class.
// Neal isn't either but suspects some pathological case related
// to nested classes exists.
Type rawType = parameterizedType.getRawType();
if ((rawType instanceof Class) == false) {
throw new IllegalArgumentException("Expected a Class, but <" + type + "> is of type " + type.getClass().getName());
}
return (Class<?>) rawType;
} else if (type instanceof GenericArrayType) {
// TODO: Is this sufficient?
return Object[].class;
} else if (type instanceof TypeVariable) {
// we could use the variable's bounds, but that'll won't work if there are multiple.
// having a raw type that's more general than necessary is okay
return Object.class;
} else {
throw new IllegalArgumentException(
"Expected a Class, ParameterizedType, or " + "GenericArrayType, but <" + type + "> is of type " + type.getClass().getName()
);
}
}
/**
* Returns true if {@code a} and {@code b} are equal.
*/
public static boolean equals(Type a, Type b) {
if (a == b) {
// also handles (a == null && b == null)
return true;
} else if (a instanceof Class) {
// Class already specifies equals().
return a.equals(b);
} else if (a instanceof ParameterizedType pa) {
if ((b instanceof ParameterizedType) == false) {
return false;
}
// TODO: save a .clone() call
ParameterizedType pb = (ParameterizedType) b;
return Objects.equals(pa.getOwnerType(), pb.getOwnerType())
&& pa.getRawType().equals(pb.getRawType())
&& Arrays.equals(pa.getActualTypeArguments(), pb.getActualTypeArguments());
} else if (a instanceof GenericArrayType ga) {
if ((b instanceof GenericArrayType) == false) {
return false;
}
GenericArrayType gb = (GenericArrayType) b;
return equals(ga.getGenericComponentType(), gb.getGenericComponentType());
} else if (a instanceof WildcardType wa) {
if ((b instanceof WildcardType) == false) {
return false;
}
WildcardType wb = (WildcardType) b;
return Arrays.equals(wa.getUpperBounds(), wb.getUpperBounds()) && Arrays.equals(wa.getLowerBounds(), wb.getLowerBounds());
} else if (a instanceof TypeVariable<?> va) {
if ((b instanceof TypeVariable) == false) {
return false;
}
TypeVariable<?> vb = (TypeVariable<?>) b;
return va.getGenericDeclaration() == vb.getGenericDeclaration() && va.getName().equals(vb.getName());
} else {
// This isn't a type we support. Could be a generic array type, wildcard type, etc.
return false;
}
}
/**
* Returns the hashCode of {@code type}.
*/
public static int hashCode(Type type) {
if (type instanceof Class) {
// Class specifies hashCode().
return type.hashCode();
} else if (type instanceof ParameterizedType p) {
return Arrays.hashCode(p.getActualTypeArguments()) ^ p.getRawType().hashCode() ^ hashCodeOrZero(p.getOwnerType());
} else if (type instanceof GenericArrayType) {
return hashCode(((GenericArrayType) type).getGenericComponentType());
} else if (type instanceof WildcardType w) {
return Arrays.hashCode(w.getLowerBounds()) ^ Arrays.hashCode(w.getUpperBounds());
} else {
// This isn't a type we support. Probably a type variable
return hashCodeOrZero(type);
}
}
private static int hashCodeOrZero(Object o) {
return o != null ? o.hashCode() : 0;
}
public static String toString(Type type) {
if (type instanceof Class<?>) {
return ((Class<?>) type).getName();
} else if (type instanceof ParameterizedType parameterizedType) {
Type[] arguments = parameterizedType.getActualTypeArguments();
Type ownerType = parameterizedType.getOwnerType();
StringBuilder stringBuilder = new StringBuilder();
if (ownerType != null) {
stringBuilder.append(toString(ownerType)).append(".");
}
stringBuilder.append(toString(parameterizedType.getRawType()));
if (arguments.length > 0) {
stringBuilder.append("<").append(toString(arguments[0]));
for (int i = 1; i < arguments.length; i++) {
stringBuilder.append(", ").append(toString(arguments[i]));
}
}
return stringBuilder.append(">").toString();
} else if (type instanceof GenericArrayType) {
return toString(((GenericArrayType) type).getGenericComponentType()) + "[]";
} else if (type instanceof WildcardType wildcardType) {
Type[] lowerBounds = wildcardType.getLowerBounds();
Type[] upperBounds = wildcardType.getUpperBounds();
if (upperBounds.length != 1 || lowerBounds.length > 1) {
throw new UnsupportedOperationException("Unsupported wildcard type " + type);
}
if (lowerBounds.length == 1) {
if (upperBounds[0] != Object.class) {
throw new UnsupportedOperationException("Unsupported wildcard type " + type);
}
return "? super " + toString(lowerBounds[0]);
} else if (upperBounds[0] == Object.class) {
return "?";
} else {
return "? extends " + toString(upperBounds[0]);
}
} else {
return type.toString();
}
}
/**
* Returns {@code Field.class}, {@code Method.class} or {@code Constructor.class}.
*/
public static Class<? extends Member> memberType(Member member) {
Objects.requireNonNull(member, "member");
if (member instanceof Field) {
return Field.class;
} else if (member instanceof Method) {
return Method.class;
} else if (member instanceof Constructor) {
return Constructor.class;
} else {
throw new IllegalArgumentException("Unsupported implementation | MoreTypes |
java | square__javapoet | src/test/java/com/squareup/javapoet/CodeBlockTest.java | {
"start": 951,
"end": 12069
} | class ____ {
@Test public void equalsAndHashCode() {
CodeBlock a = CodeBlock.builder().build();
CodeBlock b = CodeBlock.builder().build();
assertThat(a.equals(b)).isTrue();
assertThat(a.hashCode()).isEqualTo(b.hashCode());
a = CodeBlock.builder().add("$L", "taco").build();
b = CodeBlock.builder().add("$L", "taco").build();
assertThat(a.equals(b)).isTrue();
assertThat(a.hashCode()).isEqualTo(b.hashCode());
}
@Test public void of() {
CodeBlock a = CodeBlock.of("$L taco", "delicious");
assertThat(a.toString()).isEqualTo("delicious taco");
}
@Test public void isEmpty() {
assertTrue(CodeBlock.builder().isEmpty());
assertTrue(CodeBlock.builder().add("").isEmpty());
assertFalse(CodeBlock.builder().add(" ").isEmpty());
}
@Test public void indentCannotBeIndexed() {
try {
CodeBlock.builder().add("$1>", "taco").build();
fail();
} catch (IllegalArgumentException exp) {
assertThat(exp)
.hasMessageThat()
.isEqualTo("$$, $>, $<, $[, $], $W, and $Z may not have an index");
}
}
@Test public void deindentCannotBeIndexed() {
try {
CodeBlock.builder().add("$1<", "taco").build();
fail();
} catch (IllegalArgumentException exp) {
assertThat(exp)
.hasMessageThat()
.isEqualTo("$$, $>, $<, $[, $], $W, and $Z may not have an index");
}
}
@Test public void dollarSignEscapeCannotBeIndexed() {
try {
CodeBlock.builder().add("$1$", "taco").build();
fail();
} catch (IllegalArgumentException exp) {
assertThat(exp)
.hasMessageThat()
.isEqualTo("$$, $>, $<, $[, $], $W, and $Z may not have an index");
}
}
@Test public void statementBeginningCannotBeIndexed() {
try {
CodeBlock.builder().add("$1[", "taco").build();
fail();
} catch (IllegalArgumentException exp) {
assertThat(exp)
.hasMessageThat()
.isEqualTo("$$, $>, $<, $[, $], $W, and $Z may not have an index");
}
}
@Test public void statementEndingCannotBeIndexed() {
try {
CodeBlock.builder().add("$1]", "taco").build();
fail();
} catch (IllegalArgumentException exp) {
assertThat(exp)
.hasMessageThat()
.isEqualTo("$$, $>, $<, $[, $], $W, and $Z may not have an index");
}
}
@Test public void nameFormatCanBeIndexed() {
CodeBlock block = CodeBlock.builder().add("$1N", "taco").build();
assertThat(block.toString()).isEqualTo("taco");
}
@Test public void literalFormatCanBeIndexed() {
CodeBlock block = CodeBlock.builder().add("$1L", "taco").build();
assertThat(block.toString()).isEqualTo("taco");
}
@Test public void stringFormatCanBeIndexed() {
CodeBlock block = CodeBlock.builder().add("$1S", "taco").build();
assertThat(block.toString()).isEqualTo("\"taco\"");
}
@Test public void typeFormatCanBeIndexed() {
CodeBlock block = CodeBlock.builder().add("$1T", String.class).build();
assertThat(block.toString()).isEqualTo("java.lang.String");
}
@Test public void simpleNamedArgument() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("text", "taco");
CodeBlock block = CodeBlock.builder().addNamed("$text:S", map).build();
assertThat(block.toString()).isEqualTo("\"taco\"");
}
@Test public void repeatedNamedArgument() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("text", "tacos");
CodeBlock block = CodeBlock.builder()
.addNamed("\"I like \" + $text:S + \". Do you like \" + $text:S + \"?\"", map)
.build();
assertThat(block.toString()).isEqualTo(
"\"I like \" + \"tacos\" + \". Do you like \" + \"tacos\" + \"?\"");
}
@Test public void namedAndNoArgFormat() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("text", "tacos");
CodeBlock block = CodeBlock.builder()
.addNamed("$>\n$text:L for $$3.50", map).build();
assertThat(block.toString()).isEqualTo("\n tacos for $3.50");
}
@Test public void missingNamedArgument() {
try {
Map<String, Object> map = new LinkedHashMap<>();
CodeBlock.builder().addNamed("$text:S", map).build();
fail();
} catch(IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("Missing named argument for $text");
}
}
@Test public void lowerCaseNamed() {
try {
Map<String, Object> map = new LinkedHashMap<>();
map.put("Text", "tacos");
CodeBlock block = CodeBlock.builder().addNamed("$Text:S", map).build();
fail();
} catch(IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("argument 'Text' must start with a lowercase character");
}
}
@Test public void multipleNamedArguments() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("pipe", System.class);
map.put("text", "tacos");
CodeBlock block = CodeBlock.builder()
.addNamed("$pipe:T.out.println(\"Let's eat some $text:L\");", map)
.build();
assertThat(block.toString()).isEqualTo(
"java.lang.System.out.println(\"Let's eat some tacos\");");
}
@Test public void namedNewline() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("clazz", Integer.class);
CodeBlock block = CodeBlock.builder().addNamed("$clazz:T\n", map).build();
assertThat(block.toString()).isEqualTo("java.lang.Integer\n");
}
@Test public void danglingNamed() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("clazz", Integer.class);
try {
CodeBlock.builder().addNamed("$clazz:T$", map).build();
fail();
} catch(IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("dangling $ at end");
}
}
@Test public void indexTooHigh() {
try {
CodeBlock.builder().add("$2T", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("index 2 for '$2T' not in range (received 1 arguments)");
}
}
@Test public void indexIsZero() {
try {
CodeBlock.builder().add("$0T", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("index 0 for '$0T' not in range (received 1 arguments)");
}
}
@Test public void indexIsNegative() {
try {
CodeBlock.builder().add("$-1T", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("invalid format string: '$-1T'");
}
}
@Test public void indexWithoutFormatType() {
try {
CodeBlock.builder().add("$1", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("dangling format characters in '$1'");
}
}
@Test public void indexWithoutFormatTypeNotAtStringEnd() {
try {
CodeBlock.builder().add("$1 taco", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("invalid format string: '$1 taco'");
}
}
@Test public void indexButNoArguments() {
try {
CodeBlock.builder().add("$1T").build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("index 1 for '$1T' not in range (received 0 arguments)");
}
}
@Test public void formatIndicatorAlone() {
try {
CodeBlock.builder().add("$", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("dangling format characters in '$'");
}
}
@Test public void formatIndicatorWithoutIndexOrFormatType() {
try {
CodeBlock.builder().add("$ tacoString", String.class).build();
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected).hasMessageThat().isEqualTo("invalid format string: '$ tacoString'");
}
}
@Test public void sameIndexCanBeUsedWithDifferentFormats() {
CodeBlock block = CodeBlock.builder()
.add("$1T.out.println($1S)", ClassName.get(System.class))
.build();
assertThat(block.toString()).isEqualTo("java.lang.System.out.println(\"java.lang.System\")");
}
@Test public void tooManyStatementEnters() {
CodeBlock codeBlock = CodeBlock.builder().add("$[$[").build();
try {
// We can't report this error until rendering type because code blocks might be composed.
codeBlock.toString();
fail();
} catch (IllegalStateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("statement enter $[ followed by statement enter $[");
}
}
@Test public void statementExitWithoutStatementEnter() {
CodeBlock codeBlock = CodeBlock.builder().add("$]").build();
try {
// We can't report this error until rendering type because code blocks might be composed.
codeBlock.toString();
fail();
} catch (IllegalStateException expected) {
assertThat(expected).hasMessageThat().isEqualTo("statement exit $] has no matching statement enter $[");
}
}
@Test public void join() {
List<CodeBlock> codeBlocks = new ArrayList<>();
codeBlocks.add(CodeBlock.of("$S", "hello"));
codeBlocks.add(CodeBlock.of("$T", ClassName.get("world", "World")));
codeBlocks.add(CodeBlock.of("need tacos"));
CodeBlock joined = CodeBlock.join(codeBlocks, " || ");
assertThat(joined.toString()).isEqualTo("\"hello\" || world.World || need tacos");
}
@Test public void joining() {
List<CodeBlock> codeBlocks = new ArrayList<>();
codeBlocks.add(CodeBlock.of("$S", "hello"));
codeBlocks.add(CodeBlock.of("$T", ClassName.get("world", "World")));
codeBlocks.add(CodeBlock.of("need tacos"));
CodeBlock joined = codeBlocks.stream().collect(CodeBlock.joining(" || "));
assertThat(joined.toString()).isEqualTo("\"hello\" || world.World || need tacos");
}
@Test public void joiningSingle() {
List<CodeBlock> codeBlocks = new ArrayList<>();
codeBlocks.add(CodeBlock.of("$S", "hello"));
CodeBlock joined = codeBlocks.stream().collect(CodeBlock.joining(" || "));
assertThat(joined.toString()).isEqualTo("\"hello\"");
}
@Test public void joiningWithPrefixAndSuffix() {
List<CodeBlock> codeBlocks = new ArrayList<>();
codeBlocks.add(CodeBlock.of("$S", "hello"));
codeBlocks.add(CodeBlock.of("$T", ClassName.get("world", "World")));
codeBlocks.add(CodeBlock.of("need tacos"));
CodeBlock joined = codeBlocks.stream().collect(CodeBlock.joining(" || ", "start {", "} end"));
assertThat(joined.toString()).isEqualTo("start {\"hello\" || world.World || need tacos} end");
}
@Test public void clear() {
CodeBlock block = CodeBlock.builder()
.addStatement("$S", "Test string")
.clear()
.build();
assertThat(block.toString()).isEmpty();
}
}
| CodeBlockTest |
java | spring-projects__spring-security | core/src/main/java/org/springframework/security/authentication/AuthenticationDetailsSource.java | {
"start": 906,
"end": 1181
} | class ____ it wishes a new authentication details instance to be
* created.
* @param context the request object, which may be used by the authentication details
* object
* @return a fully-configured authentication details instance
*/
T buildDetails(C context);
}
| when |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/params/ParameterizedTestExtensionTests.java | {
"start": 13040,
"end": 13222
} | class ____ {
@ParameterizedTest
@ArgumentsSource(ArgumentsProviderWithCloseHandler.class)
void method(String parameter) {
}
}
static | ArgumentsProviderWithCloseHandlerTestCase |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/metadata/TotalFeatureImportance.java | {
"start": 5418,
"end": 8301
} | class ____ implements ToXContentObject, Writeable {
private static final String NAME = "importance";
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ConstructingObjectParser<Importance, Void> LENIENT_PARSER = createParser(true);
public static final ConstructingObjectParser<Importance, Void> STRICT_PARSER = createParser(false);
private static ConstructingObjectParser<Importance, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Importance, Void> parser = new ConstructingObjectParser<>(
NAME,
ignoreUnknownFields,
a -> new Importance((double) a[0], (double) a[1], (double) a[2])
);
parser.declareDouble(ConstructingObjectParser.constructorArg(), MEAN_MAGNITUDE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), MIN);
parser.declareDouble(ConstructingObjectParser.constructorArg(), MAX);
return parser;
}
private final double meanMagnitude;
private final double min;
private final double max;
public Importance(double meanMagnitude, double min, double max) {
this.meanMagnitude = meanMagnitude;
this.min = min;
this.max = max;
}
public Importance(StreamInput in) throws IOException {
this.meanMagnitude = in.readDouble();
this.min = in.readDouble();
this.max = in.readDouble();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Importance that = (Importance) o;
return Double.compare(that.meanMagnitude, meanMagnitude) == 0
&& Double.compare(that.min, min) == 0
&& Double.compare(that.max, max) == 0;
}
@Override
public int hashCode() {
return Objects.hash(meanMagnitude, min, max);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(meanMagnitude);
out.writeDouble(min);
out.writeDouble(max);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.map(asMap());
}
private Map<String, Object> asMap() {
Map<String, Object> map = new LinkedHashMap<>();
map.put(MEAN_MAGNITUDE.getPreferredName(), meanMagnitude);
map.put(MIN.getPreferredName(), min);
map.put(MAX.getPreferredName(), max);
return map;
}
}
public static | Importance |
java | processing__processing4 | java/src/processing/mode/java/RuntimePathBuilder.java | {
"start": 5076,
"end": 5835
} | class ____ and
* import recommendations) and that are required to be re-calculated due to different events.
*
* The following collections determine which types of paths apply in each and are assigned in the
* constructor. Note that often factories are included in more than one of these collections
* and are cached independently as their values are invalidated at different events.
*/
// Path caches that are invalidated by one or more events within processing.
private final List<CachedRuntimePathFactory> libraryDependentCaches;
private final List<CachedRuntimePathFactory> libraryImportsDependentCaches;
private final List<CachedRuntimePathFactory> codeFolderDependentCaches;
// Path factories involved in determining sketch | path |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/CustomDeserializersTest.java | {
"start": 4369,
"end": 4698
} | class ____ extends KeyDeserializer {
@Override
public CustomKey deserializeKey(String key, DeserializationContext ctxt) {
return new CustomKey(Integer.valueOf(key));
}
}
// [databind#375]
@Target({ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
@ | CustomKeyDeserializer |
java | apache__spark | sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentityColumnSpec.java | {
"start": 982,
"end": 2752
} | class ____ {
private final long start;
private final long step;
private final boolean allowExplicitInsert;
/**
* Creates an identity column specification.
* @param start the start value to generate the identity values
* @param step the step value to generate the identity values
* @param allowExplicitInsert whether the identity column allows explicit insertion of values
*/
public IdentityColumnSpec(long start, long step, boolean allowExplicitInsert) {
this.start = start;
this.step = step;
this.allowExplicitInsert = allowExplicitInsert;
}
/**
* @return the start value to generate the identity values
*/
public long getStart() {
return start;
}
/**
* @return the step value to generate the identity values
*/
public long getStep() {
return step;
}
/**
* @return whether the identity column allows explicit insertion of values
*/
public boolean isAllowExplicitInsert() {
return allowExplicitInsert;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IdentityColumnSpec that = (IdentityColumnSpec) o;
return start == that.start &&
step == that.step &&
allowExplicitInsert == that.allowExplicitInsert;
}
@Override
public int hashCode() {
return Objects.hash(start, step, allowExplicitInsert);
}
@Override
public String toString() {
return "IdentityColumnSpec{" +
"start=" + start +
", step=" + step +
", allowExplicitInsert=" + allowExplicitInsert +
"}";
}
}
| IdentityColumnSpec |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/InvalidVoterKeyException.java | {
"start": 847,
"end": 1139
} | class ____ extends ApiException {
private static final long serialVersionUID = 1;
public InvalidVoterKeyException(String s) {
super(s);
}
public InvalidVoterKeyException(String message, Throwable cause) {
super(message, cause);
}
}
| InvalidVoterKeyException |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/NoHelpCommandArgumentsException.java | {
"start": 803,
"end": 1019
} | class ____ extends CommandException {
private static final long serialVersionUID = 1L;
public NoHelpCommandArgumentsException() {
super(Option.SHOW_USAGE, Option.HIDE_MESSAGE);
}
}
| NoHelpCommandArgumentsException |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/FuturesTest.java | {
"start": 123977,
"end": 134484
} | class ____ test has
* a bug!), switch the second branch to call untimed future.get() instead of
* pseudoTimedGet.
*/
return inputs.hasDelayed(iFuture, jFuture)
? pseudoTimedGetUninterruptibly(future, timeout, unit)
: pseudoTimedGetUninterruptibly(future, 2500, MILLISECONDS);
}
@J2ktIncompatible
@GwtIncompatible // threads
public void testAllAsList_extensive() throws InterruptedException {
runExtensiveMergerTest(Merger.allMerger);
}
@J2ktIncompatible
@GwtIncompatible // threads
public void testSuccessfulAsList_extensive() throws InterruptedException {
runExtensiveMergerTest(Merger.successMerger);
}
public void testSuccessfulAsList() throws Exception {
// Create input and output
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
SettableFuture<String> future3 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2, future3);
// Attach a listener
SingleCallListener listener = new SingleCallListener();
compound.addListener(listener, directExecutor());
// Satisfy each input and check the output
assertFalse(compound.isDone());
future1.set(DATA1);
assertFalse(compound.isDone());
future2.set(DATA2);
assertFalse(compound.isDone());
listener.expectCall();
future3.set(DATA3);
assertTrue(listener.wasCalled());
List<String> results = getDone(compound);
assertThat(results).containsExactly(DATA1, DATA2, DATA3).inOrder();
}
public void testSuccessfulAsList_emptyList() throws Exception {
SingleCallListener listener = new SingleCallListener();
listener.expectCall();
List<ListenableFuture<String>> futures = ImmutableList.of();
ListenableFuture<List<String>> compound = successfulAsList(futures);
compound.addListener(listener, directExecutor());
assertThat(getDone(compound)).isEmpty();
assertTrue(listener.wasCalled());
}
public void testSuccessfulAsList_emptyArray() throws Exception {
SingleCallListener listener = new SingleCallListener();
listener.expectCall();
ListenableFuture<List<String>> compound = successfulAsList();
compound.addListener(listener, directExecutor());
assertThat(getDone(compound)).isEmpty();
assertTrue(listener.wasCalled());
}
public void testSuccessfulAsList_partialFailure() throws Exception {
SingleCallListener listener = new SingleCallListener();
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
compound.addListener(listener, directExecutor());
assertFalse(compound.isDone());
future1.setException(new Throwable("failed1"));
assertFalse(compound.isDone());
listener.expectCall();
future2.set(DATA2);
assertTrue(listener.wasCalled());
List<String> results = getDone(compound);
assertThat(results).containsExactly(null, DATA2).inOrder();
}
public void testSuccessfulAsList_totalFailure() throws Exception {
SingleCallListener listener = new SingleCallListener();
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
compound.addListener(listener, directExecutor());
assertFalse(compound.isDone());
future1.setException(new Throwable("failed1"));
assertFalse(compound.isDone());
listener.expectCall();
future2.setException(new Throwable("failed2"));
assertTrue(listener.wasCalled());
List<String> results = getDone(compound);
assertThat(results).containsExactly(null, null).inOrder();
}
public void testSuccessfulAsList_cancelled() throws Exception {
SingleCallListener listener = new SingleCallListener();
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
compound.addListener(listener, directExecutor());
assertFalse(compound.isDone());
future1.cancel(true);
assertFalse(compound.isDone());
listener.expectCall();
future2.set(DATA2);
assertTrue(listener.wasCalled());
List<String> results = getDone(compound);
assertThat(results).containsExactly(null, DATA2).inOrder();
}
public void testSuccessfulAsList_resultCancelled() throws Exception {
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
future2.set(DATA2);
assertFalse(compound.isDone());
assertTrue(compound.cancel(false));
assertTrue(compound.isCancelled());
assertTrue(future1.isCancelled());
assertFalse(future1.wasInterrupted());
}
public void testSuccessfulAsList_resultCancelledRacingInputDone() throws Exception {
TestLogHandler listenerLoggerHandler = new TestLogHandler();
Logger exceptionLogger = Logger.getLogger(AbstractFuture.class.getName());
exceptionLogger.addHandler(listenerLoggerHandler);
try {
doTestSuccessfulAsListResultCancelledRacingInputDone();
assertWithMessage("Nothing should be logged")
.that(listenerLoggerHandler.getStoredLogRecords())
.isEmpty();
} finally {
exceptionLogger.removeHandler(listenerLoggerHandler);
}
}
private static void doTestSuccessfulAsListResultCancelledRacingInputDone() throws Exception {
// Simple (combined.cancel -> input.cancel -> setOneValue):
successfulAsList(ImmutableList.of(SettableFuture.create())).cancel(true);
/*
* Complex (combined.cancel -> input.cancel -> other.set -> setOneValue),
* to show that this isn't just about problems with the input future we just
* cancelled:
*/
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
future1.addListener(
new Runnable() {
@Override
public void run() {
assertTrue(future1.isCancelled());
/*
* This test relies on behavior that's unspecified but currently
* guaranteed by the implementation: Cancellation of inputs is
* performed in the order they were provided to the constructor. Verify
* that as a sanity check:
*/
assertFalse(future2.isCancelled());
// Now attempt to trigger the exception:
future2.set(DATA2);
}
},
directExecutor());
assertTrue(compound.cancel(false));
assertTrue(compound.isCancelled());
assertTrue(future1.isCancelled());
assertFalse(future2.isCancelled());
try {
getDone(compound);
fail();
} catch (CancellationException expected) {
}
}
public void testSuccessfulAsList_resultInterrupted() throws Exception {
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2);
future2.set(DATA2);
assertFalse(compound.isDone());
assertTrue(compound.cancel(true));
assertTrue(compound.isCancelled());
assertTrue(future1.isCancelled());
assertTrue(future1.wasInterrupted());
}
public void testSuccessfulAsList_mixed() throws Exception {
SingleCallListener listener = new SingleCallListener();
SettableFuture<String> future1 = SettableFuture.create();
SettableFuture<String> future2 = SettableFuture.create();
SettableFuture<String> future3 = SettableFuture.create();
ListenableFuture<List<String>> compound = successfulAsList(future1, future2, future3);
compound.addListener(listener, directExecutor());
// First is cancelled, second fails, third succeeds
assertFalse(compound.isDone());
future1.cancel(true);
assertFalse(compound.isDone());
future2.setException(new Throwable("failed2"));
assertFalse(compound.isDone());
listener.expectCall();
future3.set(DATA3);
assertTrue(listener.wasCalled());
List<String> results = getDone(compound);
assertThat(results).containsExactly(null, null, DATA3).inOrder();
}
/** Non-Error exceptions are never logged. */
@J2ktIncompatible // TODO(b/324550390): Enable
public void testSuccessfulAsList_logging_exception() throws Exception {
assertEquals(
newArrayList((Object) null),
getDone(successfulAsList(immediateFailedFuture(new MyException()))));
assertWithMessage("Nothing should be logged")
.that(aggregateFutureLogHandler.getStoredLogRecords())
.isEmpty();
// Not even if there are a bunch of failures.
assertEquals(
newArrayList(null, null, null),
getDone(
successfulAsList(
immediateFailedFuture(new MyException()),
immediateFailedFuture(new MyException()),
immediateFailedFuture(new MyException()))));
assertWithMessage("Nothing should be logged")
.that(aggregateFutureLogHandler.getStoredLogRecords())
.isEmpty();
}
/** Ensure that errors are always logged. */
@J2ktIncompatible // TODO(b/324550390): Enable
public void testSuccessfulAsList_logging_error() throws Exception {
assertEquals(
newArrayList((Object) null),
getDone(successfulAsList(immediateFailedFuture(new SomeError()))));
List<LogRecord> logged = aggregateFutureLogHandler.getStoredLogRecords();
assertThat(logged).hasSize(1); // errors are always logged
assertThat(logged.get(0).getThrown()).isInstanceOf(SomeError.class);
}
public void testSuccessfulAsList_failureLoggedEvenAfterOutputCancelled() throws Exception {
ListenableFuture<String> input = new CancelPanickingFuture<>();
ListenableFuture<List<String>> output = successfulAsList(input);
output.cancel(false);
List<LogRecord> logged = aggregateFutureLogHandler.getStoredLogRecords();
assertThat(logged).hasSize(1);
assertThat(logged.get(0).getThrown()).hasMessageThat().isEqualTo("You can't fire me, I quit.");
}
private static final | under |
java | apache__camel | components/camel-hl7/src/main/java/org/apache/camel/component/hl7/HL7Charset.java | {
"start": 1258,
"end": 4721
} | enum ____ {
ISO_8859_1("8859/1", "ISO-8859-1"),
ISO_8859_2("8859/2", "ISO-8859-2"),
ISO_8859_3("8859/3", "ISO-8859-3"),
ISO_8859_4("8859/4", "ISO-8859-4"),
ISO_8859_5("8859/5", "ISO-8859-5"),
ISO_8859_6("8859/1", "ISO-8859-6"),
ISO_8859_7("8859/1", "ISO-8859-7"),
ISO_8859_8("8859/1", "ISO-8859-8"),
ISO_8859_9("8859/1", "ISO-8859-9"),
ASCII("ASCII", "US-ASCII"),
BIG_5("BIG-5", "Big5"),
CNS("CNS 11643-1992", "ISO-2022-CN"),
GB_1830_2000("GB 18030-2000", ""),
ISO_IR14("ISO IR14", "ISO-2022-JP"),
ISO_IR159("ISO IR159", "EUC-JP"),
ISO_IR87("ISO IR87", "EUC-JP"),
KS_X_1001("KS X 1001", "EUC-KR"),
UNICODE("UNICODE", "UTF-8"),
UTF_16("UNICODE UTF-16", "UTF-16"),
UTF_32("UNICODE UTF-32", "UTF-32"),
UTF_8("UNICODE UTF-8", "UTF-8");
private final String hl7CharsetName;
private final String javaCharsetName;
HL7Charset(String hl7CharsetName, String javaCharsetName) {
this.hl7CharsetName = hl7CharsetName;
this.javaCharsetName = javaCharsetName;
}
public String getHL7CharsetName() {
return hl7CharsetName;
}
public String getJavaCharsetName() {
return javaCharsetName;
}
/**
* Returns the HL7Charset that matches the parameter
*
* @param s charset string
* @return HL7Charset enum
*/
public static HL7Charset getHL7Charset(String s) {
if (s != null && s.length() > 0) {
for (HL7Charset charset : HL7Charset.values()) {
if (charset.hl7CharsetName.equals(s) || charset.javaCharsetName.equals(s)) {
return charset;
}
}
}
return null;
}
/**
* Returns the charset to be used for marshalling HL7 messages. If MSH-18 is empty, the charset configured in
* Camel's charset properties/headers is returned.
*
* @param message HL7 message
* @param exchange Exchange
* @return Java charset name
*/
public static String getCharsetName(Message message, Exchange exchange) throws HL7Exception {
String defaultCharsetName = ExchangeHelper.getCharsetName(exchange);
String msh18 = ((Segment) message.get("MSH")).getField(18, 0).toString();
return getCharsetName(msh18, defaultCharsetName);
}
/**
* Returns the charset to be used for unmarshalling HL7 messages. If MSH-18 is empty, the temporary charset name is
* returned.
*
* @param bytes HL7 message as byte array
* @param guessedCharsetName the temporary charset guessed to be able to read MSH-18
* @return Java charset name
*
* @see org.apache.camel.component.hl7.HL7DataFormat#guessCharsetName(byte[],
* org.apache.camel.Exchange)
*/
public static String getCharsetName(byte[] bytes, String guessedCharsetName)
throws UnsupportedEncodingException, HL7Exception {
String tmp = new String(bytes, guessedCharsetName);
String msh18 = PreParser.getFields(tmp, "MSH-18")[0];
return getCharsetName(msh18, guessedCharsetName);
}
private static String getCharsetName(String msh18, String defaultCharsetName) {
HL7Charset charset = HL7Charset.getHL7Charset(msh18);
return charset != null ? charset.getJavaCharsetName() : defaultCharsetName;
}
}
| HL7Charset |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | {
"start": 22648,
"end": 24638
} | class ____ {
/**
* Data type expected from the first table ecosystem operator for input conversion. The data
* type might not be a row type and can possibly be nullable.
*/
private final DataType physicalDataType;
/**
* Whether the first table ecosystem operator should treat the physical record as top-level
* record and thus perform implicit flattening. Otherwise the record needs to be wrapped in
* a top-level row.
*/
private final boolean isTopLevelRecord;
/**
* Schema derived from the physical data type. It does not include the projections of the
* user-provided schema.
*/
private final Schema schema;
/**
* List of indices to adjust the presents and order of columns from {@link #schema} for the
* final column structure.
*/
private final @Nullable List<String> projections;
private ConsumingResult(
DataType physicalDataType,
boolean isTopLevelRecord,
Schema schema,
@Nullable List<String> projections) {
this.physicalDataType = physicalDataType;
this.isTopLevelRecord = isTopLevelRecord;
this.schema = schema;
this.projections = projections;
}
public DataType getPhysicalDataType() {
return physicalDataType;
}
public boolean isTopLevelRecord() {
return isTopLevelRecord;
}
public Schema getSchema() {
return schema;
}
public @Nullable List<String> getProjections() {
return projections;
}
}
/**
* Result of {@link #createConsumingResult(DataTypeFactory, TypeInformation, Schema)}.
*
* <p>The result should be applied as: projections -> schema -> physical data type.
*/
@Internal
public static final | ConsumingResult |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2ClientCredentialsGrantTests.java | {
"start": 28257,
"end": 29285
} | class ____ extends AuthorizationServerConfiguration {
// @formatter:off
@Bean
SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
http
.oauth2AuthorizationServer((authorizationServer) ->
authorizationServer
.tokenEndpoint((tokenEndpoint) ->
tokenEndpoint
.accessTokenRequestConverter(authenticationConverter)
.accessTokenRequestConverters(authenticationConvertersConsumer)
.authenticationProvider(authenticationProvider)
.authenticationProviders(authenticationProvidersConsumer)
.accessTokenResponseHandler(authenticationSuccessHandler)
.errorResponseHandler(authenticationFailureHandler))
)
.authorizeHttpRequests((authorize) ->
authorize.anyRequest().authenticated()
);
return http.build();
}
// @formatter:on
}
@EnableWebSecurity
@Configuration(proxyBeanMethods = false)
static | AuthorizationServerConfigurationCustomTokenEndpoint |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/wrappedio/impl/DynamicWrappedIO.java | {
"start": 14918,
"end": 15115
} | class ____?
* @return true if the instance is loaded.
*/
public static boolean isAvailable() {
return instance().loaded();
}
/**
* Open a file.
* <p>
* If the WrappedIO | loaded |
java | apache__spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/StUtilsSuite.java | {
"start": 1206,
"end": 6439
} | class ____ {
/** Common test data used across multiple tests below. */
private final byte[] testWkb = new byte[] {0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, (byte)0xF0, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40};
// A sample Geography byte array for testing purposes, representing a POINT(1 2) with SRID 4326.
private final int testGeographySrid = 4326;
private final byte[] testGeographyBytes;
// A sample Geometry byte array for testing purposes, representing a POINT(1 2) with SRID 0.
private final int testGeometrySrid = 0;
private final byte[] testGeometryBytes;
{
// Initialize headers.
ByteOrder end = Geo.DEFAULT_ENDIANNESS;
int sridLen = Geo.HEADER_SIZE;
byte[] geogSrid = ByteBuffer.allocate(sridLen).order(end).putInt(testGeographySrid).array();
byte[] geomSrid = ByteBuffer.allocate(sridLen).order(end).putInt(testGeometrySrid).array();
// Initialize GEOGRAPHY.
int wkbLen = testWkb.length;
testGeographyBytes = new byte[sridLen + wkbLen];
System.arraycopy(geogSrid, 0, testGeographyBytes, 0, sridLen);
System.arraycopy(testWkb, 0, testGeographyBytes, sridLen, wkbLen);
// Initialize GEOMETRY.
testGeometryBytes = new byte[sridLen + wkbLen];
System.arraycopy(geomSrid, 0, testGeometryBytes, 0, sridLen);
System.arraycopy(testWkb, 0, testGeometryBytes, sridLen, wkbLen);
}
/** Geospatial type casting utility methods. */
@Test
void testGeographyToGeometry() {
GeographyVal geographyVal = GeographyVal.fromBytes(testGeographyBytes);
GeometryVal geometryVal = STUtils.geographyToGeometry(geographyVal);
assertNotNull(geometryVal);
assertArrayEquals(geographyVal.getBytes(), geometryVal.getBytes());
}
/** Tests for ST expression utility methods. */
// ST_AsBinary
@Test
void testStAsBinaryGeography() {
GeographyVal geographyVal = GeographyVal.fromBytes(testGeographyBytes);
byte[] geographyWkb = STUtils.stAsBinary(geographyVal);
assertNotNull(geographyWkb);
assertArrayEquals(testWkb, geographyWkb);
}
@Test
void testStAsBinaryGeometry() {
GeometryVal geometryVal = GeometryVal.fromBytes(testGeometryBytes);
byte[] geometryWkb = STUtils.stAsBinary(geometryVal);
assertNotNull(geometryWkb);
assertArrayEquals(testWkb, geometryWkb);
}
// ST_GeogFromWKB
@Test
void testStGeogFromWKB() {
GeographyVal geographyVal = STUtils.stGeogFromWKB(testWkb);
assertNotNull(geographyVal);
assertArrayEquals(testGeographyBytes, geographyVal.getBytes());
}
// ST_GeomFromWKB
@Test
void testStGeomFromWKB() {
GeometryVal geometryVal = STUtils.stGeomFromWKB(testWkb);
assertNotNull(geometryVal);
assertArrayEquals(testGeometryBytes, geometryVal.getBytes());
}
// ST_Srid
@Test
void testStSridGeography() {
GeographyVal geographyVal = GeographyVal.fromBytes(testGeographyBytes);
assertEquals(testGeographySrid, STUtils.stSrid(geographyVal));
}
@Test
void testStSridGeometry() {
GeometryVal geometryVal = GeometryVal.fromBytes(testGeometryBytes);
assertEquals(testGeometrySrid, STUtils.stSrid(geometryVal));
}
// ST_SetSrid
@Test
void testStSetSridGeography() {
for (int validGeographySrid : new int[]{4326}) {
GeographyVal geographyVal = GeographyVal.fromBytes(testGeographyBytes);
GeographyVal updatedGeographyVal = STUtils.stSetSrid(geographyVal, validGeographySrid);
assertNotNull(updatedGeographyVal);
Geography updatedGeography = Geography.fromBytes(updatedGeographyVal.getBytes());
assertEquals(validGeographySrid, updatedGeography.srid());
}
}
@Test
void testStSetSridGeographyInvalidSrid() {
for (int invalidGeographySrid : new int[]{-9999, -2, -1, 0, 1, 2, 3857, 9999}) {
GeographyVal geographyVal = GeographyVal.fromBytes(testGeographyBytes);
SparkIllegalArgumentException exception = assertThrows(SparkIllegalArgumentException.class,
() -> STUtils.stSetSrid(geographyVal, invalidGeographySrid));
assertEquals("ST_INVALID_SRID_VALUE", exception.getCondition());
assertTrue(exception.getMessage().contains("value: " + invalidGeographySrid + "."));
}
}
@Test
void testStSetSridGeometry() {
for (int validGeographySrid : new int[]{0, 3857, 4326}) {
GeometryVal geometryVal = GeometryVal.fromBytes(testGeometryBytes);
GeometryVal updatedGeometryVal = STUtils.stSetSrid(geometryVal, validGeographySrid);
assertNotNull(updatedGeometryVal);
Geometry updatedGeometry = Geometry.fromBytes(updatedGeometryVal.getBytes());
assertEquals(validGeographySrid, updatedGeometry.srid());
}
}
@Test
void testStSetSridGeometryInvalidSrid() {
for (int invalidGeometrySrid : new int[]{-9999, -2, -1, 1, 2, 9999}) {
GeometryVal geometryVal = GeometryVal.fromBytes(testGeometryBytes);
SparkIllegalArgumentException exception = assertThrows(SparkIllegalArgumentException.class,
() -> STUtils.stSetSrid(geometryVal, invalidGeometrySrid));
assertEquals("ST_INVALID_SRID_VALUE", exception.getCondition());
assertTrue(exception.getMessage().contains("value: " + invalidGeometrySrid + "."));
}
}
}
| STUtilsSuite |
java | google__guice | extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProvider2Test.java | {
"start": 28650,
"end": 29445
} | interface ____ {
Mustang create(Color color);
}
@Test
public void testFactoryBuildingConcreteTypes() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(double.class).toInstance(5.0d);
// note there is no 'thatMakes()' call here:
bind(MustangFactory.class)
.toProvider(FactoryProvider.newFactory(MustangFactory.class, Mustang.class));
}
});
MustangFactory factory = injector.getInstance(MustangFactory.class);
Mustang mustang = factory.create(Color.RED);
assertSame(Color.RED, mustang.color);
assertEquals(5.0d, mustang.engineSize, 0.0);
}
static | MustangFactory |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/stream/compact/CompactMessages.java | {
"start": 4520,
"end": 4988
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private final Map<String, List<Path>> compactedFiles;
public CompactOutput(Map<String, List<Path>> compactedFiles) {
this.compactedFiles = compactedFiles;
}
public Map<String, List<Path>> getCompactedFiles() {
return compactedFiles;
}
}
/** A flag to end compaction. */
public static | CompactOutput |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/ArtifactCoordinatesFactory.java | {
"start": 1224,
"end": 3461
} | interface ____ extends Service {
/**
* Creates artifact coordinates.
*
* @param request the request holding coordinates creation parameters
* @return an {@code ArtifactCoordinates}, never {@code null}
* @throws IllegalArgumentException if {@code request} is null or {@code request.session} is null or invalid
*/
@Nonnull
ArtifactCoordinates create(@Nonnull ArtifactCoordinatesFactoryRequest request);
/**
* Creates coordinates out of string that is formatted like:
* {@code <groupId>:<artifactId>[:<extension>[:<classifier>]]:<version>}
*
* @param session the session.
* @param coordinatesString the string having "standard" coordinates.
* @return an {@code ArtifactCoordinates}, never {@code null}
* @throws IllegalArgumentException if {@code session} is null or invalid
*/
@Nonnull
default ArtifactCoordinates create(@Nonnull Session session, @Nonnull String coordinatesString) {
return create(ArtifactCoordinatesFactoryRequest.build(session, coordinatesString));
}
@Nonnull
default ArtifactCoordinates create(
@Nonnull Session session, String groupId, String artifactId, String version, String extension) {
return create(ArtifactCoordinatesFactoryRequest.build(session, groupId, artifactId, version, extension));
}
@Nonnull
default ArtifactCoordinates create(
@Nonnull Session session,
String groupId,
String artifactId,
String version,
String classifier,
String extension,
String type) {
return create(ArtifactCoordinatesFactoryRequest.build(
session, groupId, artifactId, version, classifier, extension, type));
}
@Nonnull
default ArtifactCoordinates create(@Nonnull Session session, Artifact artifact) {
return create(ArtifactCoordinatesFactoryRequest.build(
session,
artifact.getGroupId(),
artifact.getArtifactId(),
artifact.getVersion().toString(),
artifact.getClassifier(),
artifact.getExtension(),
null));
}
}
| ArtifactCoordinatesFactory |
java | google__guice | extensions/persist/src/com/google/inject/persist/PersistFilter.java | {
"start": 1843,
"end": 2445
} | class ____ extends ServletModule {
* public void configureServlets() {
* filter("/*").through(PersistFilter.class);
*
* serve("/index.html").with(MyHtmlServlet.class);
* // Etc.
* }
* }
* }</pre>
*
* <p>This filter is thread safe and allows you to create injectors concurrently and deploy multiple
* guice-persist modules within the same injector, or even multiple injectors with persist modules
* withing the same JVM or web app.
*
* <p>This filter requires the Guice Servlet extension.
*
* @author Dhanji R. Prasanna (dhanji@gmail.com)
*/
@Singleton
public final | MyModule |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/AllFirstDoubleByTimestampAggregator.java | {
"start": 1450,
"end": 2051
} | class ____ generated. Edit `X-AllValueByTimestampAggregator.java.st` instead.
*/
@Aggregator(
{
@IntermediateState(name = "timestamps", type = "LONG"),
@IntermediateState(name = "values", type = "DOUBLE"),
@IntermediateState(name = "seen", type = "BOOLEAN"),
@IntermediateState(name = "hasValue", type = "BOOLEAN") }
)
@GroupingAggregator(
{
@IntermediateState(name = "timestamps", type = "LONG_BLOCK"),
@IntermediateState(name = "values", type = "DOUBLE_BLOCK"),
@IntermediateState(name = "hasValues", type = "BOOLEAN_BLOCK") }
)
public | is |
java | apache__rocketmq | proxy/src/test/java/org/apache/rocketmq/proxy/service/receipt/DefaultReceiptHandleManagerTest.java | {
"start": 3065,
"end": 26051
} | class ____ extends BaseServiceTest {
private DefaultReceiptHandleManager receiptHandleManager;
@Mock
protected MessagingProcessor messagingProcessor;
@Mock
protected MetadataService metadataService;
@Mock
protected ConsumerManager consumerManager;
private static final ProxyContext PROXY_CONTEXT = ProxyContext.create();
private static final String GROUP = "group";
private static final String TOPIC = "topic";
private static final String BROKER_NAME = "broker";
private static final int QUEUE_ID = 1;
private static final String MESSAGE_ID = "messageId";
private static final long OFFSET = 123L;
private static final long INVISIBLE_TIME = 60000L;
private static final int RECONSUME_TIMES = 1;
private static final String MSG_ID = MessageClientIDSetter.createUniqID();
private MessageReceiptHandle messageReceiptHandle;
private String receiptHandle;
@Before
public void setup() {
receiptHandleManager = new DefaultReceiptHandleManager(metadataService, consumerManager, new StateEventListener<RenewEvent>() {
@Override
public void fireEvent(RenewEvent event) {
MessageReceiptHandle messageReceiptHandle = event.getMessageReceiptHandle();
ReceiptHandle handle = ReceiptHandle.decode(messageReceiptHandle.getReceiptHandleStr());
messagingProcessor.changeInvisibleTime(PROXY_CONTEXT, handle, messageReceiptHandle.getMessageId(),
messageReceiptHandle.getGroup(), messageReceiptHandle.getTopic(), event.getRenewTime())
.whenComplete((v, t) -> {
if (t != null) {
event.getFuture().completeExceptionally(t);
return;
}
event.getFuture().complete(v);
});
}
});
ProxyConfig config = ConfigurationManager.getProxyConfig();
receiptHandle = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis() - INVISIBLE_TIME + config.getRenewAheadTimeMillis() - 5)
.invisibleTime(INVISIBLE_TIME)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build().encode();
PROXY_CONTEXT.withVal(ContextVariable.CLIENT_ID, "channel-id");
PROXY_CONTEXT.withVal(ContextVariable.CHANNEL, new LocalChannel());
Mockito.doNothing().when(consumerManager).appendConsumerIdsChangeListener(Mockito.any(ConsumerIdsChangeListener.class));
messageReceiptHandle = new MessageReceiptHandle(GROUP, TOPIC, QUEUE_ID, receiptHandle, MESSAGE_ID, OFFSET,
RECONSUME_TIMES);
}
@Test
public void testAddReceiptHandle() {
Channel channel = new LocalChannel();
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(new SubscriptionGroupConfig());
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(ConfigurationManager.getProxyConfig().getDefaultInvisibleTimeMills()));
}
@Test
public void testAddDuplicationMessage() {
ProxyConfig config = ConfigurationManager.getProxyConfig();
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
{
String receiptHandle = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis() - INVISIBLE_TIME + config.getRenewAheadTimeMillis() - 1000)
.invisibleTime(INVISIBLE_TIME)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build().encode();
MessageReceiptHandle messageReceiptHandle = new MessageReceiptHandle(GROUP, TOPIC, QUEUE_ID, receiptHandle, MESSAGE_ID, OFFSET,
RECONSUME_TIMES);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
}
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(new SubscriptionGroupConfig());
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.scheduleRenewTask();
ArgumentCaptor<ReceiptHandle> handleArgumentCaptor = ArgumentCaptor.forClass(ReceiptHandle.class);
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), handleArgumentCaptor.capture(), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(ConfigurationManager.getProxyConfig().getDefaultInvisibleTimeMills()));
assertEquals(receiptHandle, handleArgumentCaptor.getValue().encode());
}
@Test
public void testRenewReceiptHandle() {
ProxyConfig config = ConfigurationManager.getProxyConfig();
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
SubscriptionGroupConfig groupConfig = new SubscriptionGroupConfig();
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(groupConfig);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
long newInvisibleTime = 18000L;
ReceiptHandle newReceiptHandleClass = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis() - newInvisibleTime + config.getRenewAheadTimeMillis() - 5)
.invisibleTime(newInvisibleTime)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build();
String newReceiptHandle = newReceiptHandleClass.encode();
RetryPolicy retryPolicy = new RenewStrategyPolicy();
AtomicInteger times = new AtomicInteger(0);
AckResult ackResult = new AckResult();
ackResult.setStatus(AckStatus.OK);
ackResult.setExtraInfo(newReceiptHandle);
Mockito.when(messagingProcessor.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(times.get()))))
.thenReturn(CompletableFuture.completedFuture(ackResult));
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.argThat(r -> r.getInvisibleTime() == INVISIBLE_TIME), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(times.get())));
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.argThat(r -> r.getInvisibleTime() == newInvisibleTime), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(times.incrementAndGet())));
receiptHandleManager.scheduleRenewTask();
}
@Test
public void testRenewExceedMaxRenewTimes() {
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
CompletableFuture<AckResult> ackResultFuture = new CompletableFuture<>();
ackResultFuture.completeExceptionally(new MQClientException(0, "error"));
RetryPolicy retryPolicy = new RenewStrategyPolicy();
Mockito.when(messagingProcessor.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(messageReceiptHandle.getRenewTimes()))))
.thenReturn(ackResultFuture);
await().atMost(Duration.ofSeconds(3)).until(() -> {
receiptHandleManager.scheduleRenewTask();
try {
ReceiptHandleGroup receiptHandleGroup = receiptHandleManager.receiptHandleGroupMap.values().stream().findFirst().get();
return receiptHandleGroup.isEmpty();
} catch (Exception e) {
return false;
}
});
Mockito.verify(messagingProcessor, Mockito.times(3))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(messageReceiptHandle.getRenewTimes())));
}
@Test
public void testRenewWithInvalidHandle() {
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
CompletableFuture<AckResult> ackResultFuture = new CompletableFuture<>();
ackResultFuture.completeExceptionally(new ProxyException(ProxyExceptionCode.INVALID_RECEIPT_HANDLE, "error"));
Mockito.when(messagingProcessor.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(ConfigurationManager.getProxyConfig().getDefaultInvisibleTimeMills())))
.thenReturn(ackResultFuture);
await().atMost(Duration.ofSeconds(1)).until(() -> {
receiptHandleManager.scheduleRenewTask();
try {
ReceiptHandleGroup receiptHandleGroup = receiptHandleManager.receiptHandleGroupMap.values().stream().findFirst().get();
return receiptHandleGroup.isEmpty();
} catch (Exception e) {
return false;
}
});
}
@Test
public void testRenewWithErrorThenOK() {
ProxyConfig config = ConfigurationManager.getProxyConfig();
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
AtomicInteger count = new AtomicInteger(0);
List<CompletableFuture<AckResult>> futureList = new ArrayList<>();
{
CompletableFuture<AckResult> ackResultFuture = new CompletableFuture<>();
ackResultFuture.completeExceptionally(new MQClientException(0, "error"));
futureList.add(ackResultFuture);
futureList.add(ackResultFuture);
}
{
long newInvisibleTime = 2000L;
ReceiptHandle newReceiptHandleClass = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis() - newInvisibleTime + config.getRenewAheadTimeMillis() - 5)
.invisibleTime(newInvisibleTime)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build();
String newReceiptHandle = newReceiptHandleClass.encode();
AckResult ackResult = new AckResult();
ackResult.setStatus(AckStatus.OK);
ackResult.setExtraInfo(newReceiptHandle);
futureList.add(CompletableFuture.completedFuture(ackResult));
}
{
CompletableFuture<AckResult> ackResultFuture = new CompletableFuture<>();
ackResultFuture.completeExceptionally(new MQClientException(0, "error"));
futureList.add(ackResultFuture);
futureList.add(ackResultFuture);
futureList.add(ackResultFuture);
futureList.add(ackResultFuture);
}
RetryPolicy retryPolicy = new RenewStrategyPolicy();
AtomicInteger times = new AtomicInteger(0);
for (int i = 0; i < 6; i++) {
Mockito.doAnswer((Answer<CompletableFuture<AckResult>>) mock -> {
return futureList.get(count.getAndIncrement());
}).when(messagingProcessor).changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(retryPolicy.nextDelayDuration(times.getAndIncrement())));
}
await().pollDelay(Duration.ZERO).pollInterval(Duration.ofMillis(10)).atMost(Duration.ofSeconds(10)).until(() -> {
receiptHandleManager.scheduleRenewTask();
try {
ReceiptHandleGroup receiptHandleGroup = receiptHandleManager.receiptHandleGroupMap.values().stream().findFirst().get();
return receiptHandleGroup.isEmpty();
} catch (Exception e) {
return false;
}
});
assertEquals(6, count.get());
}
@Test
public void testRenewReceiptHandleWhenTimeout() {
long newInvisibleTime = 200L;
long maxRenewMs = ConfigurationManager.getProxyConfig().getRenewMaxTimeMillis();
String newReceiptHandle = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis() - maxRenewMs)
.invisibleTime(newInvisibleTime)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build().encode();
messageReceiptHandle = new MessageReceiptHandle(GROUP, TOPIC, QUEUE_ID, newReceiptHandle, MESSAGE_ID, OFFSET,
RECONSUME_TIMES);
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
SubscriptionGroupConfig groupConfig = new SubscriptionGroupConfig();
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(groupConfig);
Mockito.when(messagingProcessor.changeInvisibleTime(Mockito.any(), Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.anyLong()))
.thenReturn(CompletableFuture.completedFuture(new AckResult()));
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(groupConfig.getGroupRetryPolicy().getRetryPolicy().nextDelayDuration(RECONSUME_TIMES)));
await().atMost(Duration.ofSeconds(1)).untilAsserted(() -> {
ReceiptHandleGroup receiptHandleGroup = receiptHandleManager.receiptHandleGroupMap.values().stream().findFirst().get();
assertTrue(receiptHandleGroup.isEmpty());
});
}
@Test
public void testRenewReceiptHandleWhenTimeoutWithNoSubscription() {
long newInvisibleTime = 0L;
String newReceiptHandle = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(0)
.invisibleTime(newInvisibleTime)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build().encode();
messageReceiptHandle = new MessageReceiptHandle(GROUP, TOPIC, QUEUE_ID, newReceiptHandle, MESSAGE_ID, OFFSET,
RECONSUME_TIMES);
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(null);
Mockito.when(messagingProcessor.changeInvisibleTime(Mockito.any(), Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.anyLong()))
.thenReturn(CompletableFuture.completedFuture(new AckResult()));
receiptHandleManager.scheduleRenewTask();
await().atMost(Duration.ofSeconds(1)).until(() -> {
try {
ReceiptHandleGroup receiptHandleGroup = receiptHandleManager.receiptHandleGroupMap.values().stream().findFirst().get();
return receiptHandleGroup.isEmpty();
} catch (Exception e) {
return false;
}
});
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(0))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.anyString(),
Mockito.anyString(), Mockito.anyString(), Mockito.anyLong());
}
@Test
public void testRenewReceiptHandleWhenNotArrivingTime() {
String newReceiptHandle = ReceiptHandle.builder()
.startOffset(0L)
.retrieveTime(System.currentTimeMillis())
.invisibleTime(INVISIBLE_TIME)
.reviveQueueId(1)
.topicType(ReceiptHandle.NORMAL_TOPIC)
.brokerName(BROKER_NAME)
.queueId(QUEUE_ID)
.offset(OFFSET)
.commitLogOffset(0L)
.build().encode();
messageReceiptHandle = new MessageReceiptHandle(GROUP, TOPIC, QUEUE_ID, newReceiptHandle, MESSAGE_ID, OFFSET,
RECONSUME_TIMES);
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
SubscriptionGroupConfig groupConfig = new SubscriptionGroupConfig();
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(groupConfig);
Mockito.when(consumerManager.findChannel(Mockito.eq(GROUP), Mockito.eq(channel))).thenReturn(Mockito.mock(ClientChannelInfo.class));
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(0))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.anyString(),
Mockito.anyString(), Mockito.anyString(), Mockito.anyLong());
}
@Test
public void testRemoveReceiptHandle() {
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
receiptHandleManager.removeReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, receiptHandle);
SubscriptionGroupConfig groupConfig = new SubscriptionGroupConfig();
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(groupConfig);
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(0))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.anyString(),
Mockito.anyString(), Mockito.anyString(), Mockito.anyLong());
}
@Test
public void testClearGroup() {
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
receiptHandleManager.clearGroup(new ReceiptHandleGroupKey(channel, GROUP));
SubscriptionGroupConfig groupConfig = new SubscriptionGroupConfig();
Mockito.when(metadataService.getSubscriptionGroupConfig(Mockito.any(), Mockito.eq(GROUP))).thenReturn(groupConfig);
receiptHandleManager.scheduleRenewTask();
Mockito.verify(messagingProcessor, Mockito.timeout(1000).times(1))
.changeInvisibleTime(Mockito.any(ProxyContext.class), Mockito.any(ReceiptHandle.class), Mockito.eq(MESSAGE_ID),
Mockito.eq(GROUP), Mockito.eq(TOPIC), Mockito.eq(ConfigurationManager.getProxyConfig().getInvisibleTimeMillisWhenClear()));
}
@Test
public void testClientOffline() {
ArgumentCaptor<ConsumerIdsChangeListener> listenerArgumentCaptor = ArgumentCaptor.forClass(ConsumerIdsChangeListener.class);
Mockito.verify(consumerManager, Mockito.times(1)).appendConsumerIdsChangeListener(listenerArgumentCaptor.capture());
Channel channel = PROXY_CONTEXT.getVal(ContextVariable.CHANNEL);
receiptHandleManager.addReceiptHandle(PROXY_CONTEXT, channel, GROUP, MSG_ID, messageReceiptHandle);
listenerArgumentCaptor.getValue().handle(ConsumerGroupEvent.CLIENT_UNREGISTER, GROUP, new ClientChannelInfo(channel, "", LanguageCode.JAVA, 0));
assertTrue(receiptHandleManager.receiptHandleGroupMap.isEmpty());
}
} | DefaultReceiptHandleManagerTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/resolve/Resolve_AllColumn_Test_1.java | {
"start": 330,
"end": 883
} | class ____ extends TestCase {
public void test_resolve() throws Exception {
SchemaRepository repository = new SchemaRepository(DbType.mysql);
repository.acceptDDL("create table t_emp(emp_id bigint, name varchar(20));");
SQLStatement stmt = SQLUtils.parseSingleMysqlStatement("select 1 as tag, * from t_emp");
repository.resolve(stmt, SchemaResolveVisitor.Option.ResolveAllColumn);
assertEquals("SELECT 1 AS tag, emp_id, name\n" +
"FROM t_emp", stmt.toString());
}
}
| Resolve_AllColumn_Test_1 |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_wangran1.java | {
"start": 1768,
"end": 2195
} | class ____ {
public Queue() {
}
private int id;
private String name;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
| Queue |
java | apache__kafka | streams/test-utils/src/main/java/org/apache/kafka/streams/TopologyTestDriver.java | {
"start": 56206,
"end": 57377
} | class ____ implements Time {
private final AtomicLong timeMs;
private final AtomicLong highResTimeNs;
MockTime(final long startTimestampMs) {
this.timeMs = new AtomicLong(startTimestampMs);
this.highResTimeNs = new AtomicLong(startTimestampMs * 1000L * 1000L);
}
@Override
public long milliseconds() {
return timeMs.get();
}
@Override
public long nanoseconds() {
return highResTimeNs.get();
}
@Override
public long hiResClockMs() {
return TimeUnit.NANOSECONDS.toMillis(nanoseconds());
}
@Override
public void sleep(final long ms) {
if (ms < 0) {
throw new IllegalArgumentException("Sleep ms cannot be negative.");
}
timeMs.addAndGet(ms);
highResTimeNs.addAndGet(TimeUnit.MILLISECONDS.toNanos(ms));
}
@Override
public void waitObject(final Object obj, final Supplier<Boolean> condition, final long timeoutMs) {
throw new UnsupportedOperationException();
}
}
static | MockTime |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/authentication/JdbcUserServiceBeanDefinitionParser.java | {
"start": 985,
"end": 2903
} | class ____ extends AbstractUserDetailsServiceBeanDefinitionParser {
static final String ATT_DATA_SOURCE = "data-source-ref";
static final String ATT_USERS_BY_USERNAME_QUERY = "users-by-username-query";
static final String ATT_AUTHORITIES_BY_USERNAME_QUERY = "authorities-by-username-query";
static final String ATT_GROUP_AUTHORITIES_QUERY = "group-authorities-by-username-query";
static final String ATT_ROLE_PREFIX = "role-prefix";
@Override
protected String getBeanClassName(Element element) {
return "org.springframework.security.provisioning.JdbcUserDetailsManager";
}
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
String dataSource = element.getAttribute(ATT_DATA_SOURCE);
if (StringUtils.hasText(dataSource)) {
builder.addPropertyReference("dataSource", dataSource);
}
else {
parserContext.getReaderContext()
.error(ATT_DATA_SOURCE + " is required for " + Elements.JDBC_USER_SERVICE,
parserContext.extractSource(element));
}
String usersQuery = element.getAttribute(ATT_USERS_BY_USERNAME_QUERY);
String authoritiesQuery = element.getAttribute(ATT_AUTHORITIES_BY_USERNAME_QUERY);
String groupAuthoritiesQuery = element.getAttribute(ATT_GROUP_AUTHORITIES_QUERY);
String rolePrefix = element.getAttribute(ATT_ROLE_PREFIX);
if (StringUtils.hasText(rolePrefix)) {
builder.addPropertyValue("rolePrefix", rolePrefix);
}
if (StringUtils.hasText(usersQuery)) {
builder.addPropertyValue("usersByUsernameQuery", usersQuery);
}
if (StringUtils.hasText(authoritiesQuery)) {
builder.addPropertyValue("authoritiesByUsernameQuery", authoritiesQuery);
}
if (StringUtils.hasText(groupAuthoritiesQuery)) {
builder.addPropertyValue("enableGroups", Boolean.TRUE);
builder.addPropertyValue("groupAuthoritiesByUsernameQuery", groupAuthoritiesQuery);
}
}
}
| JdbcUserServiceBeanDefinitionParser |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java | {
"start": 8807,
"end": 9094
} | class ____ {
public void doTest() {
String str = Client.<String>before();
}
}
""")
.addOutputLines(
"out/Caller.java",
"""
package com.google.foo;
public final | Caller |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloClientEndpointBuilderFactory.java | {
"start": 81605,
"end": 81942
} | class ____ extends AbstractEndpointBuilder implements MiloClientEndpointBuilder, AdvancedMiloClientEndpointBuilder {
public MiloClientEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new MiloClientEndpointBuilderImpl(path);
}
} | MiloClientEndpointBuilderImpl |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestSetUpgradeModeAction.java | {
"start": 1011,
"end": 1786
} | class ____ extends BaseRestHandler {
@Override
public List<Route> routes() {
return List.of(new Route(POST, BASE_PATH + "set_upgrade_mode"));
}
@Override
public String getName() {
return "ml_set_upgrade_mode_action";
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
SetUpgradeModeAction.Request request = new SetUpgradeModeAction.Request(
getMasterNodeTimeout(restRequest),
getAckTimeout(restRequest),
restRequest.paramAsBoolean("enabled", false)
);
return channel -> client.execute(SetUpgradeModeAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
| RestSetUpgradeModeAction |
java | quarkusio__quarkus | extensions/grpc/stubs/src/main/java/io/quarkus/grpc/stubs/MultiStreamObserver.java | {
"start": 129,
"end": 608
} | class ____<T> implements StreamObserver<T> {
private final MultiEmitter<? super T> emitter;
public MultiStreamObserver(MultiEmitter<? super T> emitter) {
this.emitter = emitter;
}
@Override
public void onNext(T item) {
emitter.emit(item);
}
@Override
public void onError(Throwable failure) {
emitter.fail(failure);
}
@Override
public void onCompleted() {
emitter.complete();
}
}
| MultiStreamObserver |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/layout/Log4j2_1482_CoreTest.java | {
"start": 1009,
"end": 1519
} | class ____ extends Log4j2_1482_Test {
@Override
protected void log(final int runNumber) {
if (runNumber == 2) {
// System.out.println("Set a breakpoint here.");
}
final Logger logger = LogManager.getLogger("auditcsvfile");
final int val1 = 9, val2 = 11, val3 = 12;
logger.info("Info Message!", val1, val2, val3);
logger.info("Info Message!", val1, val2, val3);
logger.info("Info Message!", val1, val2, val3);
}
}
| Log4j2_1482_CoreTest |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/DeleteSearchApplicationAction.java | {
"start": 1452,
"end": 3594
} | class ____ extends LegacyActionRequest implements ToXContentObject {
private final String name;
public static final ParseField NAME_FIELD = new ParseField("name");
public Request(StreamInput in) throws IOException {
super(in);
this.name = in.readString();
}
public Request(String name) {
this.name = name;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (name == null || name.isEmpty()) {
validationException = addValidationError("Name missing", validationException);
}
return validationException;
}
public String getName() {
return name;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(name);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request that = (Request) o;
return Objects.equals(name, that.name);
}
@Override
public int hashCode() {
return Objects.hash(name);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(NAME_FIELD.getPreferredName(), name);
builder.endObject();
return builder;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Request, Void> PARSER = new ConstructingObjectParser<>(
"delete_search_application_action_request",
p -> new Request((String) p[0])
);
static {
PARSER.declareString(constructorArg(), NAME_FIELD);
}
public static Request parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
}
}
| Request |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java | {
"start": 17947,
"end": 19215
} | class ____ extends RawLocalFileSystem {
protected static HashMap<String,String> owners = new HashMap<String,String>();
protected static HashMap<String,String> groups = new HashMap<String,String>();
@Override
public FSDataOutputStream create(Path p) throws IOException {
//owners.remove(p);
//groups.remove(p);
return super.create(p);
}
@Override
public void setOwner(Path p, String username, String groupname)
throws IOException {
String f = makeQualified(p).toString();
if (username != null) {
owners.put(f, username);
}
if (groupname != null) {
groups.put(f, groupname);
}
}
@Override
public FileStatus getFileStatus(Path p) throws IOException {
String f = makeQualified(p).toString();
FileStatus stat = super.getFileStatus(p);
stat.getPermission();
if (owners.containsKey(f)) {
stat.setOwner("STUB-"+owners.get(f));
} else {
stat.setOwner("REAL-"+stat.getOwner());
}
if (groups.containsKey(f)) {
stat.setGroup("STUB-"+groups.get(f));
} else {
stat.setGroup("REAL-"+stat.getGroup());
}
return stat;
}
}
static | RawLocalFileSystemExtn |
java | apache__avro | lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java | {
"start": 5476,
"end": 29164
} | class ____$ extends"));
assertTrue(contents.contains("volatile$ short$;"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
@Test
void manglingForEnums(TestInfo testInfo) throws IOException {
String enumSchema = "" + "{ \"name\": \"instanceof\", \"type\": \"enum\","
+ " \"symbols\": [\"new\", \"super\", \"switch\"] }";
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(enumSchema)).compile();
assertEquals(1, outputs.size());
String contents = outputs.iterator().next().contents;
assertTrue(contents.contains("new$"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
@Test
void schemaSplit(TestInfo testInfo) throws IOException {
SpecificCompiler compiler = new SpecificCompiler(new Schema.Parser().parse(SCHEMA));
compiler.maxStringChars = 10;
Collection<OutputFile> files = compiler.compile();
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), files);
}
@Test
void protocolSplit(TestInfo testInfo) throws IOException {
SpecificCompiler compiler = new SpecificCompiler(Protocol.parse(PROTOCOL));
compiler.maxStringChars = 10;
Collection<OutputFile> files = compiler.compile();
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), files);
}
@Test
void schemaWithDocs() {
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(TestSchema.SCHEMA_WITH_DOC_TAGS))
.compile();
assertEquals(3, outputs.size());
int count = 0;
for (OutputFile o : outputs) {
if (o.path.endsWith("outer_record.java")) {
count++;
assertTrue(o.contents.contains("/** This is not a world record. */"));
assertTrue(o.contents.contains("/** Inner Fixed */"));
assertTrue(o.contents.contains("/** Inner Enum */"));
assertTrue(o.contents.contains("/** Inner String */"));
}
if (o.path.endsWith("very_inner_fixed.java")) {
count++;
assertTrue(o.contents.contains("/** Very Inner Fixed */"));
assertTrue(o.contents.contains("@org.apache.avro.specific.FixedSize(1)"));
}
if (o.path.endsWith("very_inner_enum.java")) {
count++;
assertTrue(o.contents.contains("/** Very Inner Enum */"));
}
}
assertEquals(3, count);
}
@Test
void protocolWithDocs() throws IOException {
Protocol protocol = TestProtocolParsing.getSimpleProtocol();
Collection<OutputFile> out = new SpecificCompiler(protocol).compile();
assertEquals(6, out.size());
int count = 0;
for (OutputFile o : out) {
if (o.path.endsWith("Simple.java")) {
count++;
assertTrue(o.contents.contains("/** Protocol used for testing. */"));
assertTrue(o.contents.contains("* Send a greeting"));
}
}
assertEquals(1, count, "Missed generated protocol!");
}
@Test
void needCompile() throws IOException, InterruptedException {
String schema = "" + "{ \"name\": \"Foo\", \"type\": \"record\", "
+ " \"fields\": [ {\"name\": \"package\", \"type\": \"string\" },"
+ " {\"name\": \"short\", \"type\": \"Foo\" } ] }";
File inputFile = new File(INPUT_DIR.getPath(), "input.avsc");
try (FileWriter fw = new FileWriter(inputFile)) {
fw.write(schema);
}
File outputDir = OUTPUT_DIR;
File outputFile = new File(outputDir, "Foo.java");
outputFile.delete();
assertFalse(outputFile.exists());
outputDir.delete();
assertFalse(outputDir.exists());
SpecificCompiler.compileSchema(inputFile, outputDir);
assertTrue(outputDir.exists());
assertTrue(outputFile.exists());
long lastModified = outputFile.lastModified();
Thread.sleep(1000); // granularity of JVM doesn't seem to go below 1 sec
SpecificCompiler.compileSchema(inputFile, outputDir);
assertEquals(lastModified, outputFile.lastModified());
try (FileWriter fw = new FileWriter(inputFile)) {
fw.write(schema);
}
SpecificCompiler.compileSchema(inputFile, outputDir);
assertTrue(lastModified != outputFile.lastModified());
}
/**
* Creates a record with the given name, error status, and fields.
*
* @param name the name of the schema.
* @param isError true if the schema represents an error; false otherwise.
* @param fields the field(s) to add to the schema.
* @return the schema.
*/
private Schema createRecord(String name, boolean isError, Field... fields) {
Schema record = Schema.createRecord(name, null, null, isError);
record.setFields(Arrays.asList(fields));
return record;
}
@Test
void generateGetMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, height), height));
assertEquals("getHeightAndWidth",
SpecificCompiler.generateGetMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("getMessage", SpecificCompiler.generateGetMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$", SpecificCompiler.generateGetMethod(createRecord("test", true, message), message));
assertEquals("getCause", SpecificCompiler.generateGetMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("getCause$", SpecificCompiler.generateGetMethod(createRecord("test", true, cause), cause));
assertEquals("getClass$", SpecificCompiler.generateGetMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("getClass$", SpecificCompiler.generateGetMethod(createRecord("test", true, clasz), clasz));
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight$0",
SpecificCompiler.generateGetMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$", SpecificCompiler.generateGetMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$",
SpecificCompiler.generateGetMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$0",
SpecificCompiler.generateGetMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$",
SpecificCompiler.generateGetMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$0",
SpecificCompiler.generateGetMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateSetMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, height), height));
assertEquals("setHeightAndWidth",
SpecificCompiler.generateSetMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("setMessage", SpecificCompiler.generateSetMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$", SpecificCompiler.generateSetMethod(createRecord("test", true, message), message));
assertEquals("setCause", SpecificCompiler.generateSetMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("setCause$", SpecificCompiler.generateSetMethod(createRecord("test", true, cause), cause));
assertEquals("setClass$", SpecificCompiler.generateSetMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("setClass$", SpecificCompiler.generateSetMethod(createRecord("test", true, clasz), clasz));
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight$0",
SpecificCompiler.generateSetMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$", SpecificCompiler.generateSetMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$",
SpecificCompiler.generateSetMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$0",
SpecificCompiler.generateSetMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$",
SpecificCompiler.generateSetMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$0",
SpecificCompiler.generateSetMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateHasMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, height), height));
assertEquals("hasHeightAndWidth",
SpecificCompiler.generateHasMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("hasMessage", SpecificCompiler.generateHasMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(createRecord("test", true, message), message));
assertEquals("hasCause", SpecificCompiler.generateHasMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("hasCause$", SpecificCompiler.generateHasMethod(createRecord("test", true, cause), cause));
assertEquals("hasClass$", SpecificCompiler.generateHasMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("hasClass$", SpecificCompiler.generateHasMethod(createRecord("test", true, clasz), clasz));
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight$0",
SpecificCompiler.generateHasMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$",
SpecificCompiler.generateHasMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$0",
SpecificCompiler.generateHasMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$",
SpecificCompiler.generateHasMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$0",
SpecificCompiler.generateHasMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateClearMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearHeight", SpecificCompiler.generateClearMethod(createRecord("test", false, height), height));
assertEquals("clearHeightAndWidth",
SpecificCompiler.generateClearMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("clearMessage", SpecificCompiler.generateClearMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(createRecord("test", true, message), message));
assertEquals("clearCause", SpecificCompiler.generateClearMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("clearCause$", SpecificCompiler.generateClearMethod(createRecord("test", true, cause), cause));
assertEquals("clearClass$", SpecificCompiler.generateClearMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("clearClass$", SpecificCompiler.generateClearMethod(createRecord("test", true, clasz), clasz));
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight", SpecificCompiler.generateClearMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight",
SpecificCompiler.generateClearMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight$0",
SpecificCompiler.generateClearMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$",
SpecificCompiler.generateClearMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$0",
SpecificCompiler.generateClearMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$",
SpecificCompiler.generateClearMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$0",
SpecificCompiler.generateClearMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void annotations() throws Exception {
// an | volatile |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/support/jsse/SSLContextServerParameters.java | {
"start": 6587,
"end": 7691
} | class ____ no bearing on {@code SSLSocketFactory} instances and therefore provides no configurers for that
* purpose.
*/
@Override
protected List<Configurer<SSLSocketFactory>> getSSLSocketFactoryConfigurers(SSLContext context) {
return Collections.emptyList();
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SSLContextServerParameters[clientAuthentication=");
builder.append(clientAuthentication);
builder.append(", getCipherSuites()=");
builder.append(getCipherSuites());
builder.append(", getCipherSuitesFilter()=");
builder.append(getCipherSuitesFilter());
builder.append(", getSecureSocketProtocols()=");
builder.append(getSecureSocketProtocols());
builder.append(", getSecureSocketProtocolsFilter()=");
builder.append(getSecureSocketProtocolsFilter());
builder.append(", getSessionTimeout()=");
builder.append(getSessionTimeout());
builder.append("]");
return builder.toString();
}
}
| has |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/requests/ApiVersionsResponseTest.java | {
"start": 2145,
"end": 13214
} | class ____ {
@ParameterizedTest
@EnumSource(ApiMessageType.ListenerType.class)
public void shouldHaveCorrectDefaultApiVersionsResponse(ApiMessageType.ListenerType scope) {
ApiVersionsResponse defaultResponse = TestUtils.defaultApiVersionsResponse(scope);
assertEquals(ApiKeys.apisForListener(scope).size(), defaultResponse.data().apiKeys().size(),
"API versions for all API keys must be maintained.");
for (ApiKeys key : ApiKeys.apisForListener(scope)) {
ApiVersion version = defaultResponse.apiVersion(key.id);
assertNotNull(version, "Could not find ApiVersion for API " + key.name);
if (key == ApiKeys.PRODUCE)
assertEquals(ApiKeys.PRODUCE_API_VERSIONS_RESPONSE_MIN_VERSION, version.minVersion(), "Incorrect min version for Api " + key.name);
else
assertEquals(key.oldestVersion(), version.minVersion(), "Incorrect min version for Api " + key.name);
assertEquals(key.latestVersion(), version.maxVersion(), "Incorrect max version for Api " + key.name);
// Check if versions less than min version are indeed set as null, i.e., removed.
for (int i = 0; i < version.minVersion(); ++i) {
assertNull(key.messageType.requestSchemas()[i],
"Request version " + i + " for API " + version.apiKey() + " must be null");
assertNull(key.messageType.responseSchemas()[i],
"Response version " + i + " for API " + version.apiKey() + " must be null");
}
// The min version returned in ApiResponse for Produce is not the actual min version, so adjust it
var minVersion = (key == ApiKeys.PRODUCE && scope == ListenerType.BROKER) ?
ApiKeys.PRODUCE.oldestVersion() : version.minVersion();
// Check if versions between min and max versions are non null, i.e., valid.
for (int i = minVersion; i <= version.maxVersion(); ++i) {
assertNotNull(key.messageType.requestSchemas()[i],
"Request version " + i + " for API " + version.apiKey() + " must not be null");
assertNotNull(key.messageType.responseSchemas()[i],
"Response version " + i + " for API " + version.apiKey() + " must not be null");
}
}
assertTrue(defaultResponse.data().supportedFeatures().isEmpty());
assertTrue(defaultResponse.data().finalizedFeatures().isEmpty());
assertEquals(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH, defaultResponse.data().finalizedFeaturesEpoch());
}
@Test
public void shouldHaveCommonlyAgreedApiVersionResponseWithControllerOnForwardableAPIs() {
final ApiKeys forwardableAPIKey = ApiKeys.CREATE_ACLS;
final ApiKeys nonForwardableAPIKey = ApiKeys.JOIN_GROUP;
final short minVersion = 2;
final short maxVersion = 3;
Map<ApiKeys, ApiVersion> activeControllerApiVersions = Utils.mkMap(
Utils.mkEntry(forwardableAPIKey, new ApiVersion()
.setApiKey(forwardableAPIKey.id)
.setMinVersion(minVersion)
.setMaxVersion(maxVersion)),
Utils.mkEntry(nonForwardableAPIKey, new ApiVersion()
.setApiKey(nonForwardableAPIKey.id)
.setMinVersion(minVersion)
.setMaxVersion(maxVersion))
);
ApiVersionCollection commonResponse = ApiVersionsResponse.intersectForwardableApis(
ApiMessageType.ListenerType.BROKER,
activeControllerApiVersions,
true,
false
);
verifyVersions(forwardableAPIKey.id, minVersion, maxVersion, commonResponse);
verifyVersions(nonForwardableAPIKey.id, ApiKeys.JOIN_GROUP.oldestVersion(),
ApiKeys.JOIN_GROUP.latestVersion(), commonResponse);
}
@Test
public void shouldReturnAllKeysWhenThrottleMsIsDefaultThrottle() {
ApiVersionsResponse response = new ApiVersionsResponse.Builder().
setThrottleTimeMs(AbstractResponse.DEFAULT_THROTTLE_TIME).
setApiVersions(ApiVersionsResponse.filterApis(
ListenerType.BROKER,
true,
true)).
setSupportedFeatures(Features.emptySupportedFeatures()).
setFinalizedFeatures(Collections.emptyMap()).
setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH).
build();
assertEquals(new HashSet<>(ApiKeys.apisForListener(ListenerType.BROKER)), apiKeysInResponse(response));
assertEquals(AbstractResponse.DEFAULT_THROTTLE_TIME, response.throttleTimeMs());
assertTrue(response.data().supportedFeatures().isEmpty());
assertTrue(response.data().finalizedFeatures().isEmpty());
assertEquals(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH, response.data().finalizedFeaturesEpoch());
}
@Test
public void shouldCreateApiResponseWithTelemetryWhenEnabled() {
ApiVersionsResponse response = new ApiVersionsResponse.Builder().
setThrottleTimeMs(10).
setApiVersions(ApiVersionsResponse.filterApis(
ListenerType.BROKER,
true,
true)).
setSupportedFeatures(Features.emptySupportedFeatures()).
setFinalizedFeatures(Collections.emptyMap()).
setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH).
build();
verifyApiKeysForTelemetry(response, 2);
}
@Test
public void shouldNotCreateApiResponseWithTelemetryWhenDisabled() {
ApiVersionsResponse response = new ApiVersionsResponse.Builder().
setThrottleTimeMs(10).
setApiVersions(ApiVersionsResponse.filterApis(
ListenerType.BROKER,
true,
false)).
setSupportedFeatures(Features.emptySupportedFeatures()).
setFinalizedFeatures(Collections.emptyMap()).
setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH).
build();
verifyApiKeysForTelemetry(response, 0);
}
@Test
public void testBrokerApisAreEnabled() {
ApiVersionsResponse response = new ApiVersionsResponse.Builder().
setThrottleTimeMs(AbstractResponse.DEFAULT_THROTTLE_TIME).
setApiVersions(ApiVersionsResponse.filterApis(
ListenerType.BROKER,
true,
true)).
setSupportedFeatures(Features.emptySupportedFeatures()).
setFinalizedFeatures(Collections.emptyMap()).
setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH).
build();
Set<ApiKeys> exposed = apiKeysInResponse(response);
Arrays.stream(ApiKeys.values())
.filter(key -> key.messageType.listeners().contains(ListenerType.BROKER))
.forEach(key -> assertTrue(exposed.contains(key)));
Arrays.stream(ApiKeys.values())
.filter(key -> key.messageType.listeners()
.stream().noneMatch(listener -> listener == ListenerType.BROKER))
.forEach(key -> assertFalse(exposed.contains(key)));
}
@Test
public void testIntersect() {
assertFalse(ApiVersionsResponse.intersect(null, null).isPresent());
assertThrows(IllegalArgumentException.class,
() -> ApiVersionsResponse.intersect(new ApiVersion().setApiKey((short) 10), new ApiVersion().setApiKey((short) 3)));
short min = 0;
short max = 10;
ApiVersion thisVersion = new ApiVersion()
.setApiKey(ApiKeys.FETCH.id)
.setMinVersion(min)
.setMaxVersion(Short.MAX_VALUE);
ApiVersion other = new ApiVersion()
.setApiKey(ApiKeys.FETCH.id)
.setMinVersion(Short.MIN_VALUE)
.setMaxVersion(max);
ApiVersion expected = new ApiVersion()
.setApiKey(ApiKeys.FETCH.id)
.setMinVersion(min)
.setMaxVersion(max);
assertFalse(ApiVersionsResponse.intersect(thisVersion, null).isPresent());
assertFalse(ApiVersionsResponse.intersect(null, other).isPresent());
assertEquals(expected, ApiVersionsResponse.intersect(thisVersion, other).get());
// test for symmetric
assertEquals(expected, ApiVersionsResponse.intersect(other, thisVersion).get());
}
@ParameterizedTest
@ValueSource(booleans = {false, true})
public void testAlterV0Features(boolean alterV0Features) {
Features<SupportedVersionRange> supported =
Features.supportedFeatures(Collections.singletonMap("my.feature",
new SupportedVersionRange((short) 0, (short) 1)));
ApiVersionsResponse response = new ApiVersionsResponse.Builder().
setApiVersions(ApiVersionsResponse.filterApis(
ListenerType.BROKER,
true,
true)).
setSupportedFeatures(supported).
setFinalizedFeatures(Collections.emptyMap()).
setFinalizedFeaturesEpoch(ApiVersionsResponse.UNKNOWN_FINALIZED_FEATURES_EPOCH).
setAlterFeatureLevel0(alterV0Features).
build();
if (alterV0Features) {
assertNull(response.data().supportedFeatures().find("my.feature"));
} else {
assertEquals(new SupportedFeatureKey().
setName("my.feature").
setMinVersion((short) 0).
setMaxVersion((short) 1),
response.data().supportedFeatures().find("my.feature"));
}
}
private void verifyVersions(short forwardableAPIKey,
short minVersion,
short maxVersion,
ApiVersionCollection commonResponse) {
ApiVersion expectedVersionsForForwardableAPI =
new ApiVersion()
.setApiKey(forwardableAPIKey)
.setMinVersion(minVersion)
.setMaxVersion(maxVersion);
assertEquals(expectedVersionsForForwardableAPI, commonResponse.find(forwardableAPIKey));
}
private void verifyApiKeysForTelemetry(ApiVersionsResponse response, int expectedCount) {
int count = 0;
for (ApiVersion version : response.data().apiKeys()) {
if (version.apiKey() == ApiKeys.GET_TELEMETRY_SUBSCRIPTIONS.id || version.apiKey() == ApiKeys.PUSH_TELEMETRY.id) {
count++;
}
}
assertEquals(expectedCount, count);
}
private HashSet<ApiKeys> apiKeysInResponse(ApiVersionsResponse apiVersions) {
HashSet<ApiKeys> apiKeys = new HashSet<>();
for (ApiVersion version : apiVersions.data().apiKeys()) {
apiKeys.add(ApiKeys.forId(version.apiKey()));
}
return apiKeys;
}
}
| ApiVersionsResponseTest |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractAutowireCapableBeanFactory.java | {
"start": 52341,
"end": 78317
} | class ____ the bean
* @param beanName the name of the bean
* @return the candidate constructors, or {@code null} if none specified
* @throws org.springframework.beans.BeansException in case of errors
* @see org.springframework.beans.factory.config.SmartInstantiationAwareBeanPostProcessor#determineCandidateConstructors
*/
protected Constructor<?> @Nullable [] determineConstructorsFromBeanPostProcessors(@Nullable Class<?> beanClass, String beanName)
throws BeansException {
if (beanClass != null && hasInstantiationAwareBeanPostProcessors()) {
for (SmartInstantiationAwareBeanPostProcessor bp : getBeanPostProcessorCache().smartInstantiationAware) {
Constructor<?>[] ctors = bp.determineCandidateConstructors(beanClass, beanName);
if (ctors != null) {
return ctors;
}
}
}
return null;
}
/**
* Instantiate the given bean using its default constructor.
* @param beanName the name of the bean
* @param mbd the bean definition for the bean
* @return a BeanWrapper for the new instance
*/
protected BeanWrapper instantiateBean(String beanName, RootBeanDefinition mbd) {
try {
Object beanInstance = getInstantiationStrategy().instantiate(mbd, beanName, this);
BeanWrapper bw = new BeanWrapperImpl(beanInstance);
initBeanWrapper(bw);
return bw;
}
catch (Throwable ex) {
throw new BeanCreationException(mbd.getResourceDescription(), beanName, ex.getMessage(), ex);
}
}
/**
* Instantiate the bean using a named factory method. The method may be static, if the
* mbd parameter specifies a class, rather than a factoryBean, or an instance variable
* on a factory object itself configured using Dependency Injection.
* @param beanName the name of the bean
* @param mbd the bean definition for the bean
* @param explicitArgs argument values passed in programmatically via the getBean method,
* or {@code null} if none (implying the use of constructor argument values from bean definition)
* @return a BeanWrapper for the new instance
* @see #getBean(String, Object[])
*/
protected BeanWrapper instantiateUsingFactoryMethod(
String beanName, RootBeanDefinition mbd, @Nullable Object @Nullable [] explicitArgs) {
return new ConstructorResolver(this).instantiateUsingFactoryMethod(beanName, mbd, explicitArgs);
}
/**
* "autowire constructor" (with constructor arguments by type) behavior.
* Also applied if explicit constructor argument values are specified,
* matching all remaining arguments with beans from the bean factory.
* <p>This corresponds to constructor injection: In this mode, a Spring
* bean factory is able to host components that expect constructor-based
* dependency resolution.
* @param beanName the name of the bean
* @param mbd the bean definition for the bean
* @param ctors the chosen candidate constructors
* @param explicitArgs argument values passed in programmatically via the getBean method,
* or {@code null} if none (implying the use of constructor argument values from bean definition)
* @return a BeanWrapper for the new instance
*/
protected BeanWrapper autowireConstructor(
String beanName, RootBeanDefinition mbd, Constructor<?> @Nullable [] ctors, @Nullable Object @Nullable [] explicitArgs) {
return new ConstructorResolver(this).autowireConstructor(beanName, mbd, ctors, explicitArgs);
}
/**
* Populate the bean instance in the given BeanWrapper with the property values
* from the bean definition.
* @param beanName the name of the bean
* @param mbd the bean definition for the bean
* @param bw the BeanWrapper with bean instance
*/
protected void populateBean(String beanName, RootBeanDefinition mbd, @Nullable BeanWrapper bw) {
if (bw == null) {
if (mbd.hasPropertyValues()) {
throw new BeanCreationException(
mbd.getResourceDescription(), beanName, "Cannot apply property values to null instance");
}
else {
// Skip property population phase for null instance.
return;
}
}
if (bw.getWrappedClass().isRecord()) {
if (mbd.hasPropertyValues()) {
throw new BeanCreationException(
mbd.getResourceDescription(), beanName, "Cannot apply property values to a record");
}
else {
// Skip property population phase for records since they are immutable.
return;
}
}
// Give any InstantiationAwareBeanPostProcessors the opportunity to modify the
// state of the bean before properties are set. This can be used, for example,
// to support styles of field injection.
if (!mbd.isSynthetic() && hasInstantiationAwareBeanPostProcessors()) {
for (InstantiationAwareBeanPostProcessor bp : getBeanPostProcessorCache().instantiationAware) {
if (!bp.postProcessAfterInstantiation(bw.getWrappedInstance(), beanName)) {
return;
}
}
}
PropertyValues pvs = (mbd.hasPropertyValues() ? mbd.getPropertyValues() : null);
int resolvedAutowireMode = mbd.getResolvedAutowireMode();
if (resolvedAutowireMode == AUTOWIRE_BY_NAME || resolvedAutowireMode == AUTOWIRE_BY_TYPE) {
MutablePropertyValues newPvs = new MutablePropertyValues(pvs);
// Add property values based on autowire by name if applicable.
if (resolvedAutowireMode == AUTOWIRE_BY_NAME) {
autowireByName(beanName, mbd, bw, newPvs);
}
// Add property values based on autowire by type if applicable.
if (resolvedAutowireMode == AUTOWIRE_BY_TYPE) {
autowireByType(beanName, mbd, bw, newPvs);
}
pvs = newPvs;
}
if (hasInstantiationAwareBeanPostProcessors()) {
if (pvs == null) {
pvs = mbd.getPropertyValues();
}
for (InstantiationAwareBeanPostProcessor bp : getBeanPostProcessorCache().instantiationAware) {
PropertyValues pvsToUse = bp.postProcessProperties(pvs, bw.getWrappedInstance(), beanName);
if (pvsToUse == null) {
return;
}
pvs = pvsToUse;
}
}
boolean needsDepCheck = (mbd.getDependencyCheck() != AbstractBeanDefinition.DEPENDENCY_CHECK_NONE);
if (needsDepCheck) {
PropertyDescriptor[] filteredPds = filterPropertyDescriptorsForDependencyCheck(bw, mbd.allowCaching);
checkDependencies(beanName, mbd, filteredPds, pvs);
}
if (pvs != null) {
applyPropertyValues(beanName, mbd, bw, pvs);
}
}
/**
* Fill in any missing property values with references to
* other beans in this factory if autowire is set to "byName".
* @param beanName the name of the bean we're wiring up.
* Useful for debugging messages; not used functionally.
* @param mbd bean definition to update through autowiring
* @param bw the BeanWrapper from which we can obtain information about the bean
* @param pvs the PropertyValues to register wired objects with
*/
protected void autowireByName(
String beanName, AbstractBeanDefinition mbd, BeanWrapper bw, MutablePropertyValues pvs) {
String[] propertyNames = unsatisfiedNonSimpleProperties(mbd, bw);
for (String propertyName : propertyNames) {
if (containsBean(propertyName)) {
Object bean = getBean(propertyName);
pvs.add(propertyName, bean);
registerDependentBean(propertyName, beanName);
if (logger.isTraceEnabled()) {
logger.trace("Added autowiring by name from bean name '" + beanName +
"' via property '" + propertyName + "' to bean named '" + propertyName + "'");
}
}
else {
if (logger.isTraceEnabled()) {
logger.trace("Not autowiring property '" + propertyName + "' of bean '" + beanName +
"' by name: no matching bean found");
}
}
}
}
/**
* Abstract method defining "autowire by type" (bean properties by type) behavior.
* <p>This is like PicoContainer default, in which there must be exactly one bean
* of the property type in the bean factory. This makes bean factories simple to
* configure for small namespaces, but doesn't work as well as standard Spring
* behavior for bigger applications.
* @param beanName the name of the bean to autowire by type
* @param mbd the merged bean definition to update through autowiring
* @param bw the BeanWrapper from which we can obtain information about the bean
* @param pvs the PropertyValues to register wired objects with
*/
protected void autowireByType(
String beanName, AbstractBeanDefinition mbd, BeanWrapper bw, MutablePropertyValues pvs) {
TypeConverter converter = getCustomTypeConverter();
if (converter == null) {
converter = bw;
}
String[] propertyNames = unsatisfiedNonSimpleProperties(mbd, bw);
Set<String> autowiredBeanNames = new LinkedHashSet<>(propertyNames.length * 2);
for (String propertyName : propertyNames) {
try {
PropertyDescriptor pd = bw.getPropertyDescriptor(propertyName);
// Don't try autowiring by type for type Object: never makes sense,
// even if it technically is an unsatisfied, non-simple property.
if (Object.class != pd.getPropertyType()) {
MethodParameter methodParam = BeanUtils.getWriteMethodParameter(pd);
// Do not allow eager init for type matching in case of a prioritized post-processor.
boolean eager = !(bw.getWrappedInstance() instanceof PriorityOrdered);
DependencyDescriptor desc = new AutowireByTypeDependencyDescriptor(methodParam, eager);
Object autowiredArgument = resolveDependency(desc, beanName, autowiredBeanNames, converter);
if (autowiredArgument != null) {
pvs.add(propertyName, autowiredArgument);
}
for (String autowiredBeanName : autowiredBeanNames) {
registerDependentBean(autowiredBeanName, beanName);
if (logger.isTraceEnabled()) {
logger.trace("Autowiring by type from bean name '" + beanName + "' via property '" +
propertyName + "' to bean named '" + autowiredBeanName + "'");
}
}
autowiredBeanNames.clear();
}
}
catch (BeansException ex) {
throw new UnsatisfiedDependencyException(mbd.getResourceDescription(), beanName, propertyName, ex);
}
}
}
/**
* Return an array of non-simple bean properties that are unsatisfied.
* These are probably unsatisfied references to other beans in the
* factory. Does not include simple properties like primitives or Strings.
* @param mbd the merged bean definition the bean was created with
* @param bw the BeanWrapper the bean was created with
* @return an array of bean property names
* @see org.springframework.beans.BeanUtils#isSimpleProperty
*/
protected String[] unsatisfiedNonSimpleProperties(AbstractBeanDefinition mbd, BeanWrapper bw) {
Set<String> result = new TreeSet<>();
PropertyValues pvs = mbd.getPropertyValues();
PropertyDescriptor[] pds = bw.getPropertyDescriptors();
for (PropertyDescriptor pd : pds) {
if (pd.getWriteMethod() != null && !isExcludedFromDependencyCheck(pd) && !pvs.contains(pd.getName()) &&
!BeanUtils.isSimpleProperty(pd.getPropertyType())) {
result.add(pd.getName());
}
}
return StringUtils.toStringArray(result);
}
/**
* Extract a filtered set of PropertyDescriptors from the given BeanWrapper,
* excluding ignored dependency types or properties defined on ignored dependency interfaces.
* @param bw the BeanWrapper the bean was created with
* @param cache whether to cache filtered PropertyDescriptors for the given bean Class
* @return the filtered PropertyDescriptors
* @see #isExcludedFromDependencyCheck
* @see #filterPropertyDescriptorsForDependencyCheck(org.springframework.beans.BeanWrapper)
*/
protected PropertyDescriptor[] filterPropertyDescriptorsForDependencyCheck(BeanWrapper bw, boolean cache) {
PropertyDescriptor[] filtered = this.filteredPropertyDescriptorsCache.get(bw.getWrappedClass());
if (filtered == null) {
filtered = filterPropertyDescriptorsForDependencyCheck(bw);
if (cache) {
PropertyDescriptor[] existing =
this.filteredPropertyDescriptorsCache.putIfAbsent(bw.getWrappedClass(), filtered);
if (existing != null) {
filtered = existing;
}
}
}
return filtered;
}
/**
* Extract a filtered set of PropertyDescriptors from the given BeanWrapper,
* excluding ignored dependency types or properties defined on ignored dependency interfaces.
* @param bw the BeanWrapper the bean was created with
* @return the filtered PropertyDescriptors
* @see #isExcludedFromDependencyCheck
*/
protected PropertyDescriptor[] filterPropertyDescriptorsForDependencyCheck(BeanWrapper bw) {
List<PropertyDescriptor> pds = new ArrayList<>(Arrays.asList(bw.getPropertyDescriptors()));
pds.removeIf(this::isExcludedFromDependencyCheck);
return pds.toArray(new PropertyDescriptor[0]);
}
/**
* Determine whether the given bean property is excluded from dependency checks.
* <p>This implementation excludes properties defined by CGLIB and
* properties whose type matches an ignored dependency type or which
* are defined by an ignored dependency interface.
* @param pd the PropertyDescriptor of the bean property
* @return whether the bean property is excluded
* @see #ignoreDependencyType(Class)
* @see #ignoreDependencyInterface(Class)
*/
protected boolean isExcludedFromDependencyCheck(PropertyDescriptor pd) {
return (AutowireUtils.isExcludedFromDependencyCheck(pd) ||
this.ignoredDependencyTypes.contains(pd.getPropertyType()) ||
AutowireUtils.isSetterDefinedInInterface(pd, this.ignoredDependencyInterfaces));
}
/**
* Perform a dependency check that all properties exposed have been set,
* if desired. Dependency checks can be objects (collaborating beans),
* simple (primitives and String), or all (both).
* @param beanName the name of the bean
* @param mbd the merged bean definition the bean was created with
* @param pds the relevant property descriptors for the target bean
* @param pvs the property values to be applied to the bean
* @see #isExcludedFromDependencyCheck(java.beans.PropertyDescriptor)
*/
protected void checkDependencies(
String beanName, AbstractBeanDefinition mbd, PropertyDescriptor[] pds, @Nullable PropertyValues pvs)
throws UnsatisfiedDependencyException {
int dependencyCheck = mbd.getDependencyCheck();
for (PropertyDescriptor pd : pds) {
if (pd.getWriteMethod() != null && (pvs == null || !pvs.contains(pd.getName()))) {
boolean isSimple = BeanUtils.isSimpleProperty(pd.getPropertyType());
boolean unsatisfied = (dependencyCheck == AbstractBeanDefinition.DEPENDENCY_CHECK_ALL) ||
(isSimple && dependencyCheck == AbstractBeanDefinition.DEPENDENCY_CHECK_SIMPLE) ||
(!isSimple && dependencyCheck == AbstractBeanDefinition.DEPENDENCY_CHECK_OBJECTS);
if (unsatisfied) {
throw new UnsatisfiedDependencyException(mbd.getResourceDescription(), beanName, pd.getName(),
"Set this property value or disable dependency checking for this bean.");
}
}
}
}
/**
* Apply the given property values, resolving any runtime references
* to other beans in this bean factory. Must use deep copy, so we
* don't permanently modify this property.
* @param beanName the bean name passed for better exception information
* @param mbd the merged bean definition
* @param bw the BeanWrapper wrapping the target object
* @param pvs the new property values
*/
protected void applyPropertyValues(String beanName, BeanDefinition mbd, BeanWrapper bw, PropertyValues pvs) {
if (pvs.isEmpty()) {
return;
}
MutablePropertyValues mpvs = null;
List<PropertyValue> original;
if (pvs instanceof MutablePropertyValues _mpvs) {
mpvs = _mpvs;
if (mpvs.isConverted()) {
// Shortcut: use the pre-converted values as-is.
try {
bw.setPropertyValues(mpvs);
return;
}
catch (BeansException ex) {
throw new BeanCreationException(
mbd.getResourceDescription(), beanName, "Error setting property values", ex);
}
}
original = mpvs.getPropertyValueList();
}
else {
original = Arrays.asList(pvs.getPropertyValues());
}
TypeConverter converter = getCustomTypeConverter();
if (converter == null) {
converter = bw;
}
BeanDefinitionValueResolver valueResolver = new BeanDefinitionValueResolver(this, beanName, mbd, converter);
// Create a deep copy, resolving any references for values.
List<PropertyValue> deepCopy = new ArrayList<>(original.size());
boolean resolveNecessary = false;
for (PropertyValue pv : original) {
if (pv.isConverted()) {
deepCopy.add(pv);
}
else {
String propertyName = pv.getName();
Object originalValue = pv.getValue();
if (originalValue == AutowiredPropertyMarker.INSTANCE) {
Method writeMethod = bw.getPropertyDescriptor(propertyName).getWriteMethod();
if (writeMethod == null) {
throw new IllegalArgumentException("Autowire marker for property without write method: " + pv);
}
originalValue = new DependencyDescriptor(new MethodParameter(writeMethod, 0), true);
}
Object resolvedValue = valueResolver.resolveValueIfNecessary(pv, originalValue);
Object convertedValue = resolvedValue;
boolean convertible = isConvertibleProperty(propertyName, bw);
if (convertible) {
convertedValue = convertForProperty(resolvedValue, propertyName, bw, converter);
}
// Possibly store converted value in merged bean definition,
// in order to avoid re-conversion for every created bean instance.
if (resolvedValue == originalValue) {
if (convertible) {
pv.setConvertedValue(convertedValue);
}
deepCopy.add(pv);
}
else if (convertible && originalValue instanceof TypedStringValue typedStringValue &&
!typedStringValue.isDynamic() &&
!(convertedValue instanceof Collection || ObjectUtils.isArray(convertedValue))) {
pv.setConvertedValue(convertedValue);
deepCopy.add(pv);
}
else {
resolveNecessary = true;
deepCopy.add(new PropertyValue(pv, convertedValue));
}
}
}
if (mpvs != null && !resolveNecessary) {
mpvs.setConverted();
}
// Set our (possibly massaged) deep copy.
try {
bw.setPropertyValues(new MutablePropertyValues(deepCopy));
}
catch (BeansException ex) {
throw new BeanCreationException(mbd.getResourceDescription(), beanName, ex.getMessage(), ex);
}
}
/**
* Determine whether the factory should cache a converted value for the given property.
*/
private boolean isConvertibleProperty(String propertyName, BeanWrapper bw) {
try {
return !PropertyAccessorUtils.isNestedOrIndexedProperty(propertyName) &&
BeanUtils.hasUniqueWriteMethod(bw.getPropertyDescriptor(propertyName));
}
catch (InvalidPropertyException ex) {
return false;
}
}
/**
* Convert the given value for the specified target property.
*/
private @Nullable Object convertForProperty(
@Nullable Object value, String propertyName, BeanWrapper bw, TypeConverter converter) {
if (converter instanceof BeanWrapperImpl beanWrapper) {
return beanWrapper.convertForProperty(value, propertyName);
}
else {
PropertyDescriptor pd = bw.getPropertyDescriptor(propertyName);
MethodParameter methodParam = BeanUtils.getWriteMethodParameter(pd);
return converter.convertIfNecessary(value, pd.getPropertyType(), methodParam);
}
}
/**
* Initialize the given bean instance, applying factory callbacks
* as well as init methods and bean post processors.
* <p>Called from {@link #createBean} for traditionally defined beans,
* and from {@link #initializeBean} for existing bean instances.
* @param beanName the bean name in the factory (for debugging purposes)
* @param bean the new bean instance we may need to initialize
* @param mbd the bean definition that the bean was created with
* (can also be {@code null}, if given an existing bean instance)
* @return the initialized bean instance (potentially wrapped)
* @see BeanNameAware
* @see BeanClassLoaderAware
* @see BeanFactoryAware
* @see #applyBeanPostProcessorsBeforeInitialization
* @see #invokeInitMethods
* @see #applyBeanPostProcessorsAfterInitialization
*/
@SuppressWarnings("deprecation")
protected Object initializeBean(String beanName, Object bean, @Nullable RootBeanDefinition mbd) {
// Skip initialization of a NullBean
if (bean.getClass() == NullBean.class) {
return bean;
}
invokeAwareMethods(beanName, bean);
Object wrappedBean = bean;
if (mbd == null || !mbd.isSynthetic()) {
wrappedBean = applyBeanPostProcessorsBeforeInitialization(wrappedBean, beanName);
}
try {
invokeInitMethods(beanName, wrappedBean, mbd);
}
catch (Throwable ex) {
throw new BeanCreationException(
(mbd != null ? mbd.getResourceDescription() : null), beanName, ex.getMessage(), ex);
}
if (mbd == null || !mbd.isSynthetic()) {
wrappedBean = applyBeanPostProcessorsAfterInitialization(wrappedBean, beanName);
}
return wrappedBean;
}
private void invokeAwareMethods(String beanName, Object bean) {
if (bean instanceof Aware) {
if (bean instanceof BeanNameAware beanNameAware) {
beanNameAware.setBeanName(beanName);
}
if (bean instanceof BeanClassLoaderAware beanClassLoaderAware) {
ClassLoader bcl = getBeanClassLoader();
if (bcl != null) {
beanClassLoaderAware.setBeanClassLoader(bcl);
}
}
if (bean instanceof BeanFactoryAware beanFactoryAware) {
beanFactoryAware.setBeanFactory(AbstractAutowireCapableBeanFactory.this);
}
}
}
/**
* Give a bean a chance to initialize itself after all its properties are set,
* and a chance to know about its owning bean factory (this object).
* <p>This means checking whether the bean implements {@link InitializingBean}
* or defines any custom init methods, and invoking the necessary callback(s)
* if it does.
* @param beanName the bean name in the factory (for debugging purposes)
* @param bean the new bean instance we may need to initialize
* @param mbd the merged bean definition that the bean was created with
* (can also be {@code null}, if given an existing bean instance)
* @throws Throwable if thrown by init methods or by the invocation process
* @see #invokeCustomInitMethod
*/
protected void invokeInitMethods(String beanName, Object bean, @Nullable RootBeanDefinition mbd)
throws Throwable {
boolean isInitializingBean = (bean instanceof InitializingBean);
if (isInitializingBean && (mbd == null || !mbd.hasAnyExternallyManagedInitMethod("afterPropertiesSet"))) {
if (logger.isTraceEnabled()) {
logger.trace("Invoking afterPropertiesSet() on bean with name '" + beanName + "'");
}
((InitializingBean) bean).afterPropertiesSet();
}
if (mbd != null && bean.getClass() != NullBean.class) {
String[] initMethodNames = mbd.getInitMethodNames();
if (initMethodNames != null) {
for (String initMethodName : initMethodNames) {
if (StringUtils.hasLength(initMethodName) &&
!(isInitializingBean && "afterPropertiesSet".equals(initMethodName)) &&
!mbd.hasAnyExternallyManagedInitMethod(initMethodName)) {
invokeCustomInitMethod(beanName, bean, mbd, initMethodName);
}
}
}
}
}
/**
* Invoke the specified custom init method on the given bean.
* <p>Called by {@link #invokeInitMethods(String, Object, RootBeanDefinition)}.
* <p>Can be overridden in subclasses for custom resolution of init methods
* with arguments.
* @see #invokeInitMethods
*/
protected void invokeCustomInitMethod(String beanName, Object bean, RootBeanDefinition mbd, String initMethodName)
throws Throwable {
Class<?> beanClass = bean.getClass();
MethodDescriptor descriptor = MethodDescriptor.create(beanName, beanClass, initMethodName);
String methodName = descriptor.methodName();
Method initMethod = (mbd.isNonPublicAccessAllowed() ?
BeanUtils.findMethod(descriptor.declaringClass(), methodName) :
ClassUtils.getMethodIfAvailable(beanClass, methodName));
if (initMethod == null) {
if (mbd.isEnforceInitMethod()) {
throw new BeanDefinitionValidationException("Could not find an init method named '" +
methodName + "' on bean with name '" + beanName + "'");
}
else {
if (logger.isTraceEnabled()) {
logger.trace("No default init method named '" + methodName +
"' found on bean with name '" + beanName + "'");
}
// Ignore non-existent default lifecycle methods.
return;
}
}
if (logger.isTraceEnabled()) {
logger.trace("Invoking init method '" + methodName + "' on bean with name '" + beanName + "'");
}
Method methodToInvoke = ClassUtils.getPubliclyAccessibleMethodIfPossible(initMethod, beanClass);
try {
ReflectionUtils.makeAccessible(methodToInvoke);
methodToInvoke.invoke(bean);
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
}
/**
* Applies the {@code postProcessAfterInitialization} callback of all
* registered BeanPostProcessors, giving them a chance to post-process the
* object obtained from FactoryBeans (for example, to auto-proxy them).
* @see #applyBeanPostProcessorsAfterInitialization
*/
@SuppressWarnings("deprecation")
@Override
protected Object postProcessObjectFromFactoryBean(Object object, String beanName) {
return applyBeanPostProcessorsAfterInitialization(object, beanName);
}
/**
* Overridden to clear FactoryBean instance cache as well.
*/
@Override
protected void removeSingleton(String beanName) {
super.removeSingleton(beanName);
this.factoryBeanInstanceCache.remove(beanName);
}
/**
* Overridden to clear FactoryBean instance cache as well.
*/
@Override
protected void clearSingletonCache() {
super.clearSingletonCache();
this.factoryBeanInstanceCache.clear();
}
/**
* Expose the logger to collaborating delegates.
* @since 5.0.7
*/
Log getLogger() {
return logger;
}
/**
* {@link RootBeanDefinition} subclass for {@code #createBean} calls with
* flexible selection of a Kotlin primary / single public / single non-public
* constructor candidate in addition to the default constructor.
* @see BeanUtils#getResolvableConstructor(Class)
*/
@SuppressWarnings("serial")
private static | of |
java | square__javapoet | src/main/java/com/squareup/javapoet/CodeBlock.java | {
"start": 15600,
"end": 16305
} | class ____ {
private final String delimiter;
private final Builder builder;
private boolean first = true;
CodeBlockJoiner(String delimiter, Builder builder) {
this.delimiter = delimiter;
this.builder = builder;
}
CodeBlockJoiner add(CodeBlock codeBlock) {
if (!first) {
builder.add(delimiter);
}
first = false;
builder.add(codeBlock);
return this;
}
CodeBlockJoiner merge(CodeBlockJoiner other) {
CodeBlock otherBlock = other.builder.build();
if (!otherBlock.isEmpty()) {
add(otherBlock);
}
return this;
}
CodeBlock join() {
return builder.build();
}
}
}
| CodeBlockJoiner |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DefaultInputDemuxer.java | {
"start": 1195,
"end": 2042
} | class ____ implements InputDemuxer {
String name;
InputStream input;
@Override
public void bindTo(Path path, Configuration conf) throws IOException {
if (name != null) { // re-binding before the previous one was consumed.
close();
}
name = path.getName();
input = new PossiblyDecompressedInputStream(path, conf);
return;
}
@Override
public Pair<String, InputStream> getNext() throws IOException {
if (name != null) {
Pair<String, InputStream> ret =
new Pair<String, InputStream>(name, input);
name = null;
input = null;
return ret;
}
return null;
}
@Override
public void close() throws IOException {
try {
if (input != null) {
input.close();
}
} finally {
name = null;
input = null;
}
}
}
| DefaultInputDemuxer |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/deployment/TestArchiveMatching.java | {
"start": 330,
"end": 1577
} | class ____ {
public static final String GROUP_ID = "io.quarkus";
public static final String ARTIFACT_ID = "test";
public static final String CLASSIFIER = "classifier";
@Test
public void testMatch() {
ArtifactKey key = GACT.fromString(GROUP_ID + ":" + ARTIFACT_ID);
ArtifactKey keyWithClassifier = GACT.fromString(GROUP_ID + ":" + ARTIFACT_ID + ":" + CLASSIFIER);
assertFalse(archiveMatches(key, GROUP_ID + ".different", Optional.empty(), Optional.empty()));
assertTrue(archiveMatches(key, GROUP_ID, Optional.empty(), Optional.empty()));
assertTrue(archiveMatches(key, GROUP_ID, Optional.of(ARTIFACT_ID), Optional.empty()));
assertFalse(archiveMatches(key, GROUP_ID, Optional.of(ARTIFACT_ID), Optional.of(CLASSIFIER)));
assertFalse(archiveMatches(key, GROUP_ID, Optional.of("test1"), Optional.empty()));
assertTrue(archiveMatches(keyWithClassifier, GROUP_ID, Optional.of(ARTIFACT_ID), Optional.of(CLASSIFIER)));
assertFalse(archiveMatches(keyWithClassifier, GROUP_ID, Optional.of("test1"), Optional.of(CLASSIFIER)));
assertFalse(archiveMatches(keyWithClassifier, GROUP_ID, Optional.of(ARTIFACT_ID), Optional.empty()));
}
}
| TestArchiveMatching |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/consumer/SingleInputGateFactory.java | {
"start": 19757,
"end": 20118
} | class ____ {
int numLocalChannels;
int numRemoteChannels;
int numUnknownChannels;
@Override
public String toString() {
return String.format(
"local: %s, remote: %s, unknown: %s",
numLocalChannels, numRemoteChannels, numUnknownChannels);
}
}
}
| ChannelStatistics |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/PartitionInfo.java | {
"start": 1317,
"end": 2676
} | class ____ {
@XmlElement(name = "resourceAvailable")
private ResourceInfo resourceAvailable;
public PartitionInfo() {
}
public PartitionInfo(ResourceInfo resourceAvailable) {
this.resourceAvailable = resourceAvailable;
}
public ResourceInfo getResourceAvailable() {
return resourceAvailable;
}
/**
* This method will generate a new PartitionInfo object based on two PartitionInfo objects.
* The combination process is mainly based on the Resources. Add method.
*
* @param left left PartitionInfo Object.
* @param right right PartitionInfo Object.
* @return new PartitionInfo Object.
*/
public static PartitionInfo addTo(PartitionInfo left, PartitionInfo right) {
Resource leftResource = Resource.newInstance(0, 0);
if (left != null && left.getResourceAvailable() != null) {
ResourceInfo leftResourceInfo = left.getResourceAvailable();
leftResource = leftResourceInfo.getResource();
}
Resource rightResource = Resource.newInstance(0, 0);
if (right != null && right.getResourceAvailable() != null) {
ResourceInfo rightResourceInfo = right.getResourceAvailable();
rightResource = rightResourceInfo.getResource();
}
Resource resource = Resources.addTo(leftResource, rightResource);
return new PartitionInfo(new ResourceInfo(resource));
}
}
| PartitionInfo |
java | netty__netty | codec-classes-quic/src/main/java/io/netty/handler/codec/quic/HmacSignQuicResetTokenGenerator.java | {
"start": 1002,
"end": 1456
} | class ____ implements QuicResetTokenGenerator {
static final QuicResetTokenGenerator INSTANCE = new HmacSignQuicResetTokenGenerator();
private HmacSignQuicResetTokenGenerator() {
}
@Override
public ByteBuffer newResetToken(ByteBuffer cid) {
ObjectUtil.checkNotNull(cid, "cid");
ObjectUtil.checkPositive(cid.remaining(), "cid");
return Hmac.sign(cid, Quic.RESET_TOKEN_LEN);
}
}
| HmacSignQuicResetTokenGenerator |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/AllocationFailuresResetIT.java | {
"start": 1683,
"end": 7721
} | class ____ extends ESIntegTestCase {
private static final String INDEX = "index-1";
private static final int SHARD = 0;
@Override
protected List<Class<? extends Plugin>> nodePlugins() {
return List.of(MockIndexEventListener.TestPlugin.class);
}
private void injectAllocationFailures(String node) {
internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node).setNewDelegate(new IndexEventListener() {
@Override
public void beforeIndexShardCreated(ShardRouting routing, Settings indexSettings) {
throw new RuntimeException("shard allocation failure");
}
});
}
private void removeAllocationFailuresInjection(String node) {
internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node).setNewDelegate(new IndexEventListener() {
});
}
private void awaitShardAllocMaxRetries() throws Exception {
var maxRetries = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(internalCluster().getDefaultSettings());
assertBusy(() -> {
var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState();
var index = state.getRoutingTable().index(INDEX);
assertNotNull(index);
var shard = index.shard(SHARD).primaryShard();
assertNotNull(shard);
var unassigned = shard.unassignedInfo();
assertNotNull(unassigned);
assertEquals(maxRetries.intValue(), unassigned.failedAllocations());
});
}
private void awaitShardAllocSucceed() throws Exception {
assertBusy(() -> {
var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState();
var index = state.getRoutingTable().index(INDEX);
assertNotNull(index);
var shard = index.shard(SHARD).primaryShard();
assertNotNull(shard);
assertTrue(shard.assignedToNode());
assertTrue(shard.started());
});
}
public void testResetAllocationFailuresOnNodeJoin() throws Exception {
var node1 = internalCluster().startNode();
injectAllocationFailures(node1);
prepareCreate(INDEX, indexSettings(1, 0)).execute();
awaitShardAllocMaxRetries();
removeAllocationFailuresInjection(node1);
try (var mockLog = MockLog.capture(RoutingNodes.class)) {
var shardId = internalCluster().clusterService().state().routingTable().index(INDEX).shard(SHARD).shardId();
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"log resetting failed allocations",
RoutingNodes.class.getName(),
Level.INFO,
Strings.format(RoutingNodes.RESET_FAILED_ALLOCATION_COUNTER_LOG_MSG, 1, List.of(shardId))
)
);
internalCluster().startNode();
awaitShardAllocSucceed();
mockLog.assertAllExpectationsMatched();
}
}
public void testResetRelocationFailuresOnNodeJoin() throws Exception {
String node1 = internalCluster().startNode();
createIndex(INDEX, 1, 0);
ensureGreen(INDEX);
final var failRelocation = new AtomicBoolean(true);
String node2 = internalCluster().startNode();
internalCluster().getInstance(MockIndexEventListener.TestEventListener.class, node2).setNewDelegate(new IndexEventListener() {
@Override
public void beforeIndexCreated(Index index, Settings indexSettings) {
if (failRelocation.get()) {
throw new RuntimeException("FAIL");
}
}
});
updateIndexSettings(Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + "._name", node1), INDEX);
ensureGreen(INDEX);
// await all relocation attempts are exhausted
var maxAttempts = MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.get(Settings.EMPTY);
assertBusy(() -> {
var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState();
var shard = state.routingTable().index(INDEX).shard(SHARD).primaryShard();
assertThat(shard, notNullValue());
assertThat(shard.relocationFailureInfo().failedRelocations(), equalTo(maxAttempts));
});
// ensure the shard remain started
var state = safeGet(clusterAdmin().prepareState(TEST_REQUEST_TIMEOUT).execute()).getState();
var shard = state.routingTable().index(INDEX).shard(SHARD).primaryShard();
assertThat(shard, notNullValue());
assertThat(shard.state(), equalTo(ShardRoutingState.STARTED));
assertThat(state.nodes().get(shard.currentNodeId()).getName(), equalTo(node1));
failRelocation.set(false);
// A new node joining should reset the counter and allow more relocation retries
try (var mockLog = MockLog.capture(RoutingNodes.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"log resetting failed relocations",
RoutingNodes.class.getName(),
Level.INFO,
Strings.format(RoutingNodes.RESET_FAILED_RELOCATION_COUNTER_LOG_MSG, 1, List.of(shard.shardId()))
)
);
internalCluster().startNode();
assertBusy(() -> {
var stateAfterNodeJoin = internalCluster().clusterService().state();
var relocatedShard = stateAfterNodeJoin.routingTable().index(INDEX).shard(SHARD).primaryShard();
assertThat(relocatedShard, notNullValue());
assertThat(stateAfterNodeJoin.nodes().get(relocatedShard.currentNodeId()).getName(), not(equalTo(node1)));
});
mockLog.assertAllExpectationsMatched();
}
}
}
| AllocationFailuresResetIT |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java | {
"start": 1642,
"end": 4074
} | class ____ extends AbstractScalarFunctionTestCase {
public ToTimeDurationTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
List<TestCaseSupplier> suppliers = new ArrayList<>();
suppliers.add(new TestCaseSupplier(List.of(TIME_DURATION), () -> {
Duration field = (Duration) randomLiteral(TIME_DURATION).value();
return new TestCaseSupplier.TestCase(
List.of(new TestCaseSupplier.TypedData(field, TIME_DURATION, "field").forceLiteral()),
matchesPattern("LiteralsEvaluator.*"),
TIME_DURATION,
equalTo(field)
).withoutEvaluator();
}));
for (EsqlDataTypeConverter.INTERVALS interval : TIME_DURATIONS) {
for (DataType inputType : List.of(KEYWORD, TEXT)) {
suppliers.add(new TestCaseSupplier(List.of(inputType), () -> {
BytesRef field = new BytesRef(
" ".repeat(randomIntBetween(0, 10)) + (randomBoolean() ? "" : "-") + randomIntBetween(0, Integer.MAX_VALUE) + " "
.repeat(randomIntBetween(1, 10)) + interval.toString() + " ".repeat(randomIntBetween(0, 10))
);
TemporalAmount result = EsqlDataTypeConverter.parseTemporalAmount(field.utf8ToString(), TIME_DURATION);
return new TestCaseSupplier.TestCase(
List.of(new TestCaseSupplier.TypedData(field, inputType, "field").forceLiteral()),
matchesPattern("LiteralsEvaluator.*"),
TIME_DURATION,
equalTo(result)
).withoutEvaluator();
}));
}
}
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new ToTimeDuration(source, args.get(0));
}
@Override
public void testSerializationOfSimple() {
assertTrue("Serialization test does not apply", true);
}
@Override
protected Expression serializeDeserializeExpression(Expression expression) {
// Can't be serialized
return expression;
}
}
| ToTimeDurationTests |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/lifecycle/internal/DefaultProjectArtifactFactory.java | {
"start": 1795,
"end": 6307
} | class ____ implements ProjectArtifactFactory {
private final ArtifactFactory artifactFactory;
@Inject
public DefaultProjectArtifactFactory(ArtifactFactory artifactFactory) {
this.artifactFactory = artifactFactory;
}
@Override
public Set<Artifact> createArtifacts(MavenProject project) throws InvalidDependencyVersionException {
return createArtifacts(artifactFactory, project.getDependencies(), null, null, project);
}
public static Set<Artifact> createArtifacts(
ArtifactFactory artifactFactory,
List<Dependency> dependencies,
String inheritedScope,
ArtifactFilter dependencyFilter,
MavenProject project)
throws InvalidDependencyVersionException {
Set<Artifact> artifacts = new LinkedHashSet<>();
for (Dependency d : dependencies) {
Artifact dependencyArtifact;
try {
dependencyArtifact = createDependencyArtifact(artifactFactory, d, inheritedScope, dependencyFilter);
} catch (InvalidVersionSpecificationException e) {
throw new InvalidDependencyVersionException(project.getId(), d, project.getFile(), e);
}
if (dependencyArtifact != null) {
artifacts.add(dependencyArtifact);
}
}
return artifacts;
}
private static Artifact createDependencyArtifact(
ArtifactFactory factory, Dependency dependency, String inheritedScope, ArtifactFilter inheritedFilter)
throws InvalidVersionSpecificationException {
String effectiveScope = getEffectiveScope(dependency.getScope(), inheritedScope);
if (effectiveScope == null) {
return null;
}
VersionRange versionRange = VersionRange.createFromVersionSpec(dependency.getVersion());
Artifact dependencyArtifact = factory.createDependencyArtifact(
dependency.getGroupId(),
dependency.getArtifactId(),
versionRange,
dependency.getType(),
dependency.getClassifier(),
effectiveScope,
dependency.isOptional());
if (inheritedFilter != null && !inheritedFilter.include(dependencyArtifact)) {
return null;
}
if (Artifact.SCOPE_SYSTEM.equals(effectiveScope)) {
dependencyArtifact.setFile(new File(dependency.getSystemPath()));
}
dependencyArtifact.setDependencyFilter(createDependencyFilter(dependency, inheritedFilter));
return dependencyArtifact;
}
private static String getEffectiveScope(String originalScope, String inheritedScope) {
String effectiveScope = Artifact.SCOPE_RUNTIME;
if (originalScope == null) {
originalScope = Artifact.SCOPE_COMPILE;
}
if (inheritedScope == null) {
// direct dependency retains its scope
effectiveScope = originalScope;
} else if (Artifact.SCOPE_TEST.equals(originalScope) || Artifact.SCOPE_PROVIDED.equals(originalScope)) {
// test and provided are not transitive, so exclude them
effectiveScope = null;
} else if (Artifact.SCOPE_SYSTEM.equals(originalScope)) {
// system scope come through unchanged...
effectiveScope = Artifact.SCOPE_SYSTEM;
} else if (Artifact.SCOPE_COMPILE.equals(originalScope) && Artifact.SCOPE_COMPILE.equals(inheritedScope)) {
// added to retain compile scope. Remove if you want to compile inherited as runtime
effectiveScope = Artifact.SCOPE_COMPILE;
} else if (Artifact.SCOPE_TEST.equals(inheritedScope)) {
effectiveScope = Artifact.SCOPE_TEST;
} else if (Artifact.SCOPE_PROVIDED.equals(inheritedScope)) {
effectiveScope = Artifact.SCOPE_PROVIDED;
}
return effectiveScope;
}
private static ArtifactFilter createDependencyFilter(Dependency dependency, ArtifactFilter inheritedFilter) {
ArtifactFilter effectiveFilter = inheritedFilter;
if (!dependency.getExclusions().isEmpty()) {
effectiveFilter = new ExclusionArtifactFilter(dependency.getExclusions());
if (inheritedFilter != null) {
effectiveFilter = new AndArtifactFilter(Arrays.asList(inheritedFilter, effectiveFilter));
}
}
return effectiveFilter;
}
}
| DefaultProjectArtifactFactory |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/support/GenericApplicationContextTests.java | {
"start": 31486,
"end": 31791
} | class ____ implements ProtocolResolver {
@Override
public Resource resolve(String location, ResourceLoader resourceLoader) {
if (location.startsWith("ping:")) {
return new ByteArrayResource(("pong:" + location.substring(5)).getBytes(UTF_8));
}
return null;
}
}
}
| PingPongProtocolResolver |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/deployment/application/executors/EmbeddedExecutor.java | {
"start": 2499,
"end": 9804
} | class ____ implements PipelineExecutor {
private static final Logger LOG = LoggerFactory.getLogger(EmbeddedExecutor.class);
private final ExecutorService executorService =
Executors.newFixedThreadPool(
1, new ExecutorThreadFactory("Flink-EmbeddedClusterExecutor-IO"));
public static final String NAME = "embedded";
private final Collection<JobID> submittedJobIds;
private final DispatcherGateway dispatcherGateway;
private final EmbeddedJobClientCreator jobClientCreator;
private final List<JobStatusChangedListener> jobStatusChangedListeners;
/**
* Creates a {@link EmbeddedExecutor}.
*
* @param submittedJobIds a list that is going to be filled with the job ids of the new jobs
* that will be submitted. This is essentially used to return the submitted job ids to the
* caller.
* @param dispatcherGateway the dispatcher of the cluster which is going to be used to submit
* jobs.
* @param configuration the flink application configuration
* @param jobClientCreator the job client creator
*/
public EmbeddedExecutor(
final Collection<JobID> submittedJobIds,
final DispatcherGateway dispatcherGateway,
final Configuration configuration,
final EmbeddedJobClientCreator jobClientCreator) {
this.submittedJobIds = checkNotNull(submittedJobIds);
this.dispatcherGateway = checkNotNull(dispatcherGateway);
this.jobClientCreator = checkNotNull(jobClientCreator);
this.jobStatusChangedListeners =
JobStatusChangedListenerUtils.createJobStatusChangedListeners(
Thread.currentThread().getContextClassLoader(),
configuration,
executorService);
}
@Override
public CompletableFuture<JobClient> execute(
final Pipeline pipeline,
final Configuration configuration,
ClassLoader userCodeClassloader)
throws Exception {
checkNotNull(pipeline);
checkNotNull(configuration);
final Optional<JobID> optJobId =
configuration
.getOptional(PipelineOptionsInternal.PIPELINE_FIXED_JOB_ID)
.map(JobID::fromHexString);
if (optJobId.isPresent() && submittedJobIds.contains(optJobId.get())) {
return getJobClientFuture(optJobId.get(), userCodeClassloader);
}
return submitAndGetJobClientFuture(pipeline, configuration, userCodeClassloader);
}
private CompletableFuture<JobClient> getJobClientFuture(
final JobID jobId, final ClassLoader userCodeClassloader) {
LOG.info("Job {} was recovered successfully.", jobId);
return CompletableFuture.completedFuture(
jobClientCreator.getJobClient(jobId, userCodeClassloader));
}
private CompletableFuture<JobClient> submitAndGetJobClientFuture(
final Pipeline pipeline,
final Configuration configuration,
final ClassLoader userCodeClassloader)
throws Exception {
final Duration timeout = configuration.get(ClientOptions.CLIENT_TIMEOUT);
final StreamGraph streamGraph =
PipelineExecutorUtils.getStreamGraph(pipeline, configuration);
final JobID actualJobId = streamGraph.getJobID();
this.submittedJobIds.add(actualJobId);
LOG.info("Job {} is submitted.", actualJobId);
if (LOG.isDebugEnabled()) {
LOG.debug("Effective Configuration: {}", configuration);
}
final CompletableFuture<JobID> jobSubmissionFuture =
submitJob(configuration, dispatcherGateway, streamGraph, timeout);
return jobSubmissionFuture
.thenApplyAsync(
FunctionUtils.uncheckedFunction(
jobId -> {
org.apache.flink.client.ClientUtils
.waitUntilJobInitializationFinished(
() ->
dispatcherGateway
.requestJobStatus(
jobId, timeout)
.get(),
() ->
dispatcherGateway
.requestJobResult(
jobId, timeout)
.get(),
userCodeClassloader);
return jobId;
}))
.thenApplyAsync(
jobID -> jobClientCreator.getJobClient(actualJobId, userCodeClassloader))
.whenCompleteAsync(
(jobClient, throwable) -> {
if (throwable == null) {
PipelineExecutorUtils.notifyJobStatusListeners(
pipeline, streamGraph, jobStatusChangedListeners);
} else {
LOG.error(
"Failed to submit job graph to application cluster",
throwable);
}
});
}
private static CompletableFuture<JobID> submitJob(
final Configuration configuration,
final DispatcherGateway dispatcherGateway,
final StreamGraph streamGraph,
final Duration rpcTimeout) {
checkNotNull(streamGraph);
LOG.info("Submitting Job with JobId={}.", streamGraph.getJobID());
return dispatcherGateway
.getBlobServerPort(rpcTimeout)
.thenCombine(
dispatcherGateway.getBlobServerAddress(rpcTimeout),
(blobServerPort, blobServerAddress) ->
new InetSocketAddress(
blobServerAddress.getHostName(), blobServerPort))
.thenCompose(
blobServerAddress -> {
try {
ClientUtils.extractAndUploadExecutionPlanFiles(
streamGraph,
() -> new BlobClient(blobServerAddress, configuration));
streamGraph.serializeUserDefinedInstances();
} catch (Exception e) {
throw new CompletionException(e);
}
return dispatcherGateway.submitJob(streamGraph, rpcTimeout);
})
.thenApply(ack -> streamGraph.getJobID());
}
}
| EmbeddedExecutor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/time/JavaPeriodGetDaysTest.java | {
"start": 5609,
"end": 6003
} | class ____ {
static {
long months = Period.ZERO.getMonths();
}
}
""")
.doTest();
}
@Test
public void getDaysInStaticBlock() {
compilationHelper
.addSourceLines(
"test/TestCase.java",
"""
package test;
import java.time.Period;
public | TestCase |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java | {
"start": 16767,
"end": 24149
} | enum ____ {
// Do not change the order of these flags we use
// the ordinal for encoding! Only append to the end!
Positions,
Offsets,
Payloads,
FieldStatistics,
TermStatistics
}
/**
* populates a request object (pre-populated with defaults) based on a parser.
*/
public static void parseRequest(TermVectorsRequest termVectorsRequest, XContentParser parser, RestApiVersion restApiVersion)
throws IOException {
XContentParser.Token token;
String currentFieldName = null;
List<String> fields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName != null) {
if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.START_ARRAY) {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
fields.add(parser.text());
}
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. field [fields] must be an array");
}
} else if (OFFSETS.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.offsets(parser.booleanValue());
} else if (POSITIONS.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.positions(parser.booleanValue());
} else if (PAYLOADS.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.payloads(parser.booleanValue());
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
termVectorsRequest.termStatistics(parser.booleanValue());
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
termVectorsRequest.fieldStatistics(parser.booleanValue());
} else if (DFS.match(currentFieldName, parser.getDeprecationHandler())) {
throw new IllegalArgumentException("distributed frequencies is not supported anymore for term vectors");
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
} else if (FILTER.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.filterSettings(readFilterSettings(parser));
} else if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
// the following is important for multi request parsing.
termVectorsRequest.index = parser.text();
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
if (termVectorsRequest.doc != null) {
throw new ElasticsearchParseException(
"failed to parse term vectors request. " + "either [id] or [doc] can be specified, but not both!"
);
}
termVectorsRequest.id = parser.text();
} else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) {
if (termVectorsRequest.id != null) {
throw new ElasticsearchParseException(
"failed to parse term vectors request. " + "either [id] or [doc] can be specified, but not both!"
);
}
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.routing = parser.text();
} else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.version = parser.longValue();
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.versionType = VersionType.fromString(parser.text());
} else {
throw new ElasticsearchParseException("failed to parse term vectors request. unknown field [{}]", currentFieldName);
}
}
}
if (fields.size() > 0) {
String[] fieldsAsArray = new String[fields.size()];
termVectorsRequest.selectedFields(fields.toArray(fieldsAsArray));
}
}
public static Map<String, String> readPerFieldAnalyzer(Map<String, Object> map) {
Map<String, String> mapStrStr = new HashMap<>();
for (Map.Entry<String, Object> e : map.entrySet()) {
if (e.getValue() instanceof String) {
mapStrStr.put(e.getKey(), (String) e.getValue());
} else {
throw new ElasticsearchParseException(
"expecting the analyzer at [{}] to be a String, but found [{}] instead",
e.getKey(),
e.getValue().getClass()
);
}
}
return mapStrStr;
}
private static FilterSettings readFilterSettings(XContentParser parser) throws IOException {
FilterSettings settings = new FilterSettings();
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (currentFieldName != null) {
if (currentFieldName.equals("max_num_terms")) {
settings.maxNumTerms = parser.intValue();
} else if (currentFieldName.equals("min_term_freq")) {
settings.minTermFreq = parser.intValue();
} else if (currentFieldName.equals("max_term_freq")) {
settings.maxTermFreq = parser.intValue();
} else if (currentFieldName.equals("min_doc_freq")) {
settings.minDocFreq = parser.intValue();
} else if (currentFieldName.equals("max_doc_freq")) {
settings.maxDocFreq = parser.intValue();
} else if (currentFieldName.equals("min_word_length")) {
settings.minWordLength = parser.intValue();
} else if (currentFieldName.equals("max_word_length")) {
settings.maxWordLength = parser.intValue();
} else {
throw new ElasticsearchParseException(
"failed to parse term vectors request. "
+ "the field [{}] is not valid for filter parameter for term vector request",
currentFieldName
);
}
}
}
return settings;
}
}
| Flag |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StringConcatToTextBlockTest.java | {
"start": 977,
"end": 1439
} | class ____ {
private final CompilationTestHelper testHelper =
CompilationTestHelper.newInstance(StringConcatToTextBlock.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(StringConcatToTextBlock.class, getClass());
@Test
public void negative() {
testHelper
.addSourceLines(
"Test.java",
"""
| StringConcatToTextBlockTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.