language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/RestTestTransformGlobalTeardown.java | {
"start": 736,
"end": 1070
} | interface ____ {
/**
* @param teardownNodeParent The parent of an existing "teardown" ObjectNode, null otherwise. If null implementations may create choose
* to create the section.
*/
ObjectNode transformTeardown(@Nullable ObjectNode teardownNodeParent);
}
| RestTestTransformGlobalTeardown |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/query/internal/property/RevisionPropertyPropertyName.java | {
"start": 415,
"end": 749
} | class ____ implements PropertyNameGetter {
private final String propertyName;
public RevisionPropertyPropertyName(String propertyName) {
this.propertyName = propertyName;
}
@Override
public String get(Configuration configuration) {
return configuration.getRevisionPropertyPath( propertyName );
}
}
| RevisionPropertyPropertyName |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/expression/SqlTuple.java | {
"start": 817,
"end": 3011
} | class ____ implements Expression, SqlTupleContainer, DomainResultProducer, Assignable {
private final List<? extends Expression> expressions;
private final MappingModelExpressible<?> valueMapping;
public SqlTuple(List<? extends Expression> expressions, MappingModelExpressible<?> valueMapping) {
this.expressions = expressions;
this.valueMapping = valueMapping;
}
@Override
public MappingModelExpressible<?> getExpressionType() {
return valueMapping;
}
public List<? extends Expression> getExpressions(){
return expressions;
}
@Override
public List<ColumnReference> getColumnReferences() {
// TODO: this operation is completely untypesafe
// since the List can totally contain
// Expressions which aren't ColumnReferences
return expressions.stream()
.map( expression -> (ColumnReference) expression ).toList();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitTuple( this );
}
@Override
public SqlTuple getSqlTuple() {
return this;
}
@Override
public DomainResult<?> createDomainResult(
String resultVariable,
DomainResultCreationState creationState) {
final SqmExpressible<?> expressible = (SqmExpressible<?>) valueMapping;
final int[] valuesArrayPositions = new int[expressions.size()];
for ( int i = 0; i < expressions.size(); i++ ) {
final Expression expression = expressions.get( i );
valuesArrayPositions[i] =
resolveSelection( creationState.getSqlAstCreationState(), expression )
.getValuesArrayPosition();
}
return new TupleResult<>( valuesArrayPositions, resultVariable, expressible.getExpressibleJavaType() );
}
private static SqlSelection resolveSelection(SqlAstCreationState creationState, Expression expression) {
return creationState.getSqlExpressionResolver()
.resolveSqlSelection(
expression,
expression.getExpressionType().getSingleJdbcMapping().getJdbcJavaType(),
null,
creationState.getCreationContext().getMappingMetamodel().getTypeConfiguration()
);
}
@Override
public void applySqlSelections(DomainResultCreationState creationState) {
throw new UnsupportedOperationException();
}
public static | SqlTuple |
java | elastic__elasticsearch | x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/BulkStaticMappingChallengeRestIT.java | {
"start": 684,
"end": 4219
} | class ____ extends BulkChallengeRestIT {
public BulkStaticMappingChallengeRestIT() {}
@Override
public void baselineMappings(XContentBuilder builder) throws IOException {
if (fullyDynamicMapping == false) {
builder.startObject()
.startObject("properties")
.startObject("@timestamp")
.field("type", "date")
.endObject()
.startObject("host.name")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("message")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("method")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("memory_usage_bytes")
.field("type", "long")
.field("ignore_malformed", randomBoolean())
.endObject()
.endObject()
.endObject();
} else {
// We want dynamic mapping, but we need host.name to be a keyword instead of text to support aggregations.
builder.startObject()
.startObject("properties")
.startObject("host.name")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.endObject()
.endObject();
}
}
@Override
public void contenderMappings(XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("subobjects", false);
if (fullyDynamicMapping == false) {
builder.startObject("properties")
.startObject("@timestamp")
.field("type", "date")
.endObject()
.startObject("host.name")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("message")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("method")
.field("type", "keyword")
.field("ignore_above", randomIntBetween(1000, 1200))
.endObject()
.startObject("memory_usage_bytes")
.field("type", "long")
.field("ignore_malformed", randomBoolean())
.endObject()
.endObject();
}
builder.endObject();
}
@Override
protected XContentBuilder generateDocument(final Instant timestamp) throws IOException {
return XContentFactory.jsonBuilder()
.startObject()
.field("@timestamp", DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(timestamp))
.field("host.name", randomFrom("foo", "bar", "baz"))
.field("message", randomFrom("a message", "another message", "still another message", "one more message"))
.field("method", randomFrom("put", "post", "get"))
.field("memory_usage_bytes", randomLongBetween(1000, 2000))
.endObject();
}
}
| BulkStaticMappingChallengeRestIT |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/header/writers/frameoptions/XFrameOptionsHeaderWriterTests.java | {
"start": 1044,
"end": 2193
} | class ____ {
private MockHttpServletRequest request = new MockHttpServletRequest();
private MockHttpServletResponse response = new MockHttpServletResponse();
private static final String XFRAME_OPTIONS_HEADER = "X-Frame-Options";
@Test
public void writeHeadersWhenWhiteList() {
WhiteListedAllowFromStrategy whitelist = new WhiteListedAllowFromStrategy(Arrays.asList("example.com"));
XFrameOptionsHeaderWriter writer = new XFrameOptionsHeaderWriter(whitelist);
writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeaderValue(XFrameOptionsHeaderWriter.XFRAME_OPTIONS_HEADER)).isEqualTo("DENY");
}
@Test
public void writeHeaderWhenNotPresent() {
WhiteListedAllowFromStrategy whitelist = new WhiteListedAllowFromStrategy(
Collections.singletonList("example.com"));
XFrameOptionsHeaderWriter writer = new XFrameOptionsHeaderWriter(whitelist);
String value = new String("value");
this.response.setHeader(XFRAME_OPTIONS_HEADER, value);
writer.writeHeaders(this.request, this.response);
assertThat(this.response.getHeader(XFRAME_OPTIONS_HEADER)).isSameAs(value);
}
}
| XFrameOptionsHeaderWriterTests |
java | apache__camel | components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/CometdsSpanDecorator.java | {
"start": 858,
"end": 1000
} | class ____ extends CometdSpanDecorator {
@Override
public String getComponent() {
return "cometds";
}
}
| CometdsSpanDecorator |
java | micronaut-projects__micronaut-core | jackson-core/src/main/java/io/micronaut/jackson/core/env/JsonPropertySourceLoader.java | {
"start": 1319,
"end": 3347
} | class ____ extends AbstractPropertySourceLoader {
/**
* File extension for property source loader.
*/
public static final String FILE_EXTENSION = "json";
public JsonPropertySourceLoader() {
}
public JsonPropertySourceLoader(boolean logEnabled) {
super(logEnabled);
}
@Override
public Set<String> getExtensions() {
return Collections.singleton(FILE_EXTENSION);
}
@Override
protected void processInput(String name, InputStream input, Map<String, Object> finalMap) throws IOException {
Map<String, Object> map = readJsonAsMap(input);
processMap(finalMap, map, "");
}
/**
* @param input The input stream
* @return map representation of the json
* @throws IOException If the input stream doesn't exist
*/
@SuppressWarnings("unchecked")
protected Map<String, Object> readJsonAsMap(InputStream input) throws IOException {
return (Map<String, Object>) unwrap(readJsonAsObject(input));
}
private JsonNode readJsonAsObject(InputStream input) throws IOException {
try (JsonParser parser = new JsonFactory().createParser(input)) {
return JsonNodeTreeCodec.getInstance().readTree(parser);
}
}
private Object unwrap(JsonNode value) {
if (value.isNumber()) {
return value.getNumberValue();
} else if (value.isNull()) {
return null;
} else if (value.isBoolean()) {
return value.getBooleanValue();
} else if (value.isArray()) {
var unwrapped = new ArrayList<>();
value.values().forEach(v -> unwrapped.add(unwrap(v)));
return unwrapped;
} else if (value.isObject()) {
var unwrapped = new LinkedHashMap<String, Object>();
value.entries().forEach(e -> unwrapped.put(e.getKey(), unwrap(e.getValue())));
return unwrapped;
} else {
return value.getStringValue();
}
}
}
| JsonPropertySourceLoader |
java | apache__kafka | server/src/test/java/org/apache/kafka/server/KRaftClusterTest.java | {
"start": 13421,
"end": 15351
} | class ____ implements ClientQuotaCallback, Reconfigurable {
// Default constructor needed for reflection object creation
public DummyClientQuotaCallback() {
}
public static final String DUMMY_CLIENT_QUOTA_CALLBACK_VALUE_CONFIG_KEY = "dummy.client.quota.callback.value";
private int value = 0;
@Override
public Map<String, String> quotaMetricTags(ClientQuotaType quotaType, KafkaPrincipal principal, String clientId) {
return Map.of();
}
@Override
public Double quotaLimit(ClientQuotaType quotaType, Map<String, String> metricTags) {
return 1.0;
}
@Override
public void updateQuota(ClientQuotaType quotaType, ClientQuotaEntity quotaEntity, double newValue) {
}
@Override
public void removeQuota(ClientQuotaType quotaType, ClientQuotaEntity quotaEntity) {
}
@Override
public boolean quotaResetRequired(ClientQuotaType quotaType) {
return true;
}
@Override
public boolean updateClusterMetadata(Cluster cluster) {
return false;
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> configs) {
Object newValue = configs.get(DUMMY_CLIENT_QUOTA_CALLBACK_VALUE_CONFIG_KEY);
if (newValue != null) {
value = Integer.parseInt(newValue.toString());
}
}
@Override
public Set<String> reconfigurableConfigs() {
return Set.of(DUMMY_CLIENT_QUOTA_CALLBACK_VALUE_CONFIG_KEY);
}
@Override
public void validateReconfiguration(Map<String, ?> configs) {
}
@Override
public void reconfigure(Map<String, ?> configs) {
configure(configs);
}
}
public static | DummyClientQuotaCallback |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/test/java/org/acme/ApplicationConfigResourceTest.java | {
"start": 210,
"end": 464
} | class ____ {
@Test
public void testAppConfigEndpoint() {
given()
.when().get("/app-config")
.then()
.statusCode(200)
.body(is("application:1.0.0-SNAPSHOT"));
}
} | ApplicationConfigResourceTest |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/filter/IncludePropsForSerTest.java | {
"start": 890,
"end": 1007
} | class ____ extends HashMap<String, String> { }
//allow use of @JsonIncludeProperties for properties
static | MyMap |
java | quarkusio__quarkus | extensions/devui/deployment-spi/src/main/java/io/quarkus/devui/spi/buildtime/jsonrpc/RecordedJsonRpcMethod.java | {
"start": 253,
"end": 905
} | class ____ extends AbstractJsonRpcMethod {
private RuntimeValue runtimeValue;
public RecordedJsonRpcMethod() {
}
public RecordedJsonRpcMethod(String methodName,
String description,
EnumSet<Usage> usage,
boolean mcpEnabledByDefault,
RuntimeValue runtimeValue) {
super(methodName, description, usage, mcpEnabledByDefault);
this.runtimeValue = runtimeValue;
}
public RuntimeValue getRuntimeValue() {
return runtimeValue;
}
public void setRuntimeValue(RuntimeValue runtimeValue) {
this.runtimeValue = runtimeValue;
}
} | RecordedJsonRpcMethod |
java | google__gson | gson/src/main/java/com/google/gson/TypeAdapter.java | {
"start": 2291,
"end": 5223
} | class ____ a compact JSON value.
*
* <p>The {@link #read(JsonReader) read()} method must read exactly one value and {@link
* #write(JsonWriter,Object) write()} must write exactly one value. For primitive types this is
* means readers should make exactly one call to {@code nextBoolean()}, {@code nextDouble()}, {@code
* nextInt()}, {@code nextLong()}, {@code nextString()} or {@code nextNull()}. Writers should make
* exactly one call to one of {@code value()} or {@code nullValue()}. For arrays, type adapters
* should start with a call to {@code beginArray()}, convert all elements, and finish with a call to
* {@code endArray()}. For objects, they should start with {@code beginObject()}, convert the
* object, and finish with {@code endObject()}. Failing to convert a value or converting too many
* values may cause the application to crash.
*
* <p>Type adapters should be prepared to read null from the stream and write it to the stream.
* Alternatively, they should use {@link #nullSafe()} method while registering the type adapter with
* Gson. If your {@code Gson} instance has been configured to {@link GsonBuilder#serializeNulls()},
* these nulls will be written to the final document. Otherwise the value (and the corresponding
* name when writing to a JSON object) will be omitted automatically. In either case your type
* adapter must handle null.
*
* <p>Type adapters should be stateless and thread-safe, otherwise the thread-safety guarantees of
* {@link Gson} might not apply.
*
* <p>To use a custom type adapter with Gson, you must <i>register</i> it with a {@link
* GsonBuilder}:
*
* <pre>{@code
* GsonBuilder builder = new GsonBuilder();
* builder.registerTypeAdapter(Point.class, new PointAdapter());
* // if PointAdapter didn't check for nulls in its read/write methods, you should instead use
* // builder.registerTypeAdapter(Point.class, new PointAdapter().nullSafe());
* ...
* Gson gson = builder.create();
* }</pre>
*
* @since 2.1
*/
// non-Javadoc:
//
// <h2>JSON Conversion</h2>
// <p>A type adapter registered with Gson is automatically invoked while serializing
// or deserializing JSON. However, you can also use type adapters directly to serialize
// and deserialize JSON. Here is an example for deserialization: <pre>{@code
// String json = "{'origin':'0,0','points':['1,2','3,4']}";
// TypeAdapter<Graph> graphAdapter = gson.getAdapter(Graph.class);
// Graph graph = graphAdapter.fromJson(json);
// }</pre>
// And an example for serialization: <pre>{@code
// Graph graph = new Graph(...);
// TypeAdapter<Graph> graphAdapter = gson.getAdapter(Graph.class);
// String json = graphAdapter.toJson(graph);
// }</pre>
//
// <p>Type adapters are <strong>type-specific</strong>. For example, a {@code
// TypeAdapter<Date>} can convert {@code Date} instances to JSON and JSON to
// instances of {@code Date}, but cannot convert any other types.
//
public abstract | to |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/xml/AbstractSimpleBeanDefinitionParser.java | {
"start": 1192,
"end": 1543
} | class ____ you want to create a single
* bean definition from a relatively simple custom XML element. The
* resulting {@code BeanDefinition} will be automatically
* registered with the relevant
* {@link org.springframework.beans.factory.support.BeanDefinitionRegistry}.
*
* <p>An example will hopefully make the use of this particular parser
* | when |
java | elastic__elasticsearch | modules/aggregations/src/main/java/org/elasticsearch/aggregations/metric/MatrixStatsResults.java | {
"start": 1079,
"end": 10147
} | class ____ implements Writeable {
/** object holding results - computes results in place */
protected final RunningStats results;
/** pearson product correlation coefficients */
protected final Map<String, HashMap<String, Double>> correlation;
/** Base ctor */
MatrixStatsResults() {
results = new RunningStats();
this.correlation = new HashMap<>();
}
/** creates and computes result from provided stats */
MatrixStatsResults(RunningStats stats) {
this.results = stats.clone();
this.correlation = new HashMap<>();
this.compute();
}
/** creates and computes the result from the provided stats, scaling as necessary given the sampling context */
MatrixStatsResults(RunningStats stats, SamplingContext samplingContext) {
this.results = stats.clone();
this.correlation = new HashMap<>();
this.compute();
// Note: it is important to scale counts AFTER compute as scaling before could introduce bias
this.results.docCount = samplingContext.scaleUp(this.results.docCount);
for (String field : this.results.counts.keySet()) {
this.results.counts.computeIfPresent(field, (k, v) -> samplingContext.scaleUp(v));
}
}
/** creates a results object from the given stream */
@SuppressWarnings("unchecked")
protected MatrixStatsResults(StreamInput in) {
try {
results = new RunningStats(in);
correlation = (Map<String, HashMap<String, Double>>) in.readGenericValue();
} catch (IOException e) {
throw new ElasticsearchException("Error trying to create multifield_stats results from stream input", e);
}
}
/** Marshalls MatrixStatsResults */
@Override
public void writeTo(StreamOutput out) throws IOException {
// marshall results
results.writeTo(out);
// marshall correlation
out.writeGenericValue(correlation);
}
/** return document count */
public final long getDocCount() {
return results.docCount;
}
/** return the field counts - not public, used for getProperty() */
protected Map<String, Long> getFieldCounts() {
return Collections.unmodifiableMap(results.counts);
}
/** return the fied count for the requested field */
public long getFieldCount(String field) {
if (results.counts.containsKey(field) == false) {
return 0;
}
return results.counts.get(field);
}
/** return the means - not public, used for getProperty() */
protected Map<String, Double> getMeans() {
return Collections.unmodifiableMap(results.means);
}
/** return the mean for the requested field */
public double getMean(String field) {
checkField(field, results.means);
return results.means.get(field);
}
/** return the variances - not public, used for getProperty() */
protected Map<String, Double> getVariances() {
return Collections.unmodifiableMap(results.variances);
}
/** return the variance for the requested field */
public double getVariance(String field) {
checkField(field, results.variances);
return results.variances.get(field);
}
/** return the skewness - not public, used for getProperty() */
protected Map<String, Double> getSkewness() {
return Collections.unmodifiableMap(results.skewness);
}
/** return the skewness for the requested field */
public double getSkewness(String field) {
checkField(field, results.skewness);
return results.skewness.get(field);
}
/** return the kurtosis */
protected Map<String, Double> getKurtosis() {
return Collections.unmodifiableMap(results.kurtosis);
}
/** return the kurtosis for the requested field */
public double getKurtosis(String field) {
checkField(field, results.kurtosis);
return results.kurtosis.get(field);
}
/** return the covariances as a map - not public, used for getProperty() */
protected Map<String, HashMap<String, Double>> getCovariances() {
return Collections.unmodifiableMap(results.covariances);
}
/** return the covariance between two fields */
public double getCovariance(String fieldX, String fieldY) {
if (fieldX.equals(fieldY)) {
checkField(fieldX, results.variances);
return results.variances.get(fieldX);
}
return getValFromUpperTriangularMatrix(results.covariances, fieldX, fieldY);
}
/** return the correlations as a map - not public, used for getProperty() */
protected Map<String, HashMap<String, Double>> getCorrelations() {
return Collections.unmodifiableMap(correlation);
}
/** return the correlation coefficient between two fields */
public Double getCorrelation(String fieldX, String fieldY) {
if (fieldX.equals(fieldY)) {
return 1.0;
}
return getValFromUpperTriangularMatrix(correlation, fieldX, fieldY);
}
/** return the value for two fields in an upper triangular matrix, regardless of row col location. */
static <M extends Map<String, Double>> double getValFromUpperTriangularMatrix(Map<String, M> map, String fieldX, String fieldY) {
// for the co-value to exist, one of the two (or both) fields has to be a row key
if (map.containsKey(fieldX) == false && map.containsKey(fieldY) == false) {
throw new IllegalArgumentException("neither field " + fieldX + " nor " + fieldY + " exist");
} else if (map.containsKey(fieldX)) {
// fieldX exists as a row key
if (map.get(fieldX).containsKey(fieldY)) {
// fieldY exists as a col key to fieldX
return map.get(fieldX).get(fieldY);
} else {
// otherwise fieldX is the col key to fieldY
return map.get(fieldY).get(fieldX);
}
} else if (map.containsKey(fieldY)) {
// fieldX did not exist as a row key, it must be a col key
return map.get(fieldY).get(fieldX);
}
throw new IllegalArgumentException("Coefficient not computed between fields: " + fieldX + " and " + fieldY);
}
private static void checkField(String field, Map<String, ?> map) {
if (field == null) {
throw new IllegalArgumentException("field name cannot be null");
}
if (map.containsKey(field) == false) {
throw new IllegalArgumentException("field " + field + " does not exist");
}
}
/** Computes final covariance, variance, and correlation */
private void compute() {
final double nM1 = results.docCount - 1D;
// compute final skewness and kurtosis
for (String fieldName : results.means.keySet()) {
final double var = results.variances.get(fieldName);
// update skewness
results.skewness.put(fieldName, Math.sqrt(results.docCount) * results.skewness.get(fieldName) / Math.pow(var, 1.5D));
// update kurtosis
results.kurtosis.put(fieldName, (double) results.docCount * results.kurtosis.get(fieldName) / (var * var));
// update variances
results.variances.put(fieldName, results.variances.get(fieldName) / nM1);
}
// compute final covariances and correlation
double cor;
for (Map.Entry<String, HashMap<String, Double>> row : results.covariances.entrySet()) {
final String rowName = row.getKey();
final HashMap<String, Double> covRow = row.getValue();
final HashMap<String, Double> corRow = new HashMap<>();
for (Map.Entry<String, Double> col : covRow.entrySet()) {
final String colName = col.getKey();
// update covariance
covRow.put(colName, covRow.get(colName) / nM1);
// update correlation
// if there is no variance in the data then correlation is NaN
if (results.variances.get(rowName) == 0d || results.variances.get(colName) == 0d) {
cor = Double.NaN;
} else {
final double corDen = Math.sqrt(results.variances.get(rowName)) * Math.sqrt(results.variances.get(colName));
cor = covRow.get(colName) / corDen;
}
corRow.put(colName, cor);
}
results.covariances.put(rowName, covRow);
correlation.put(rowName, corRow);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MatrixStatsResults that = (MatrixStatsResults) o;
return Objects.equals(results, that.results) && Objects.equals(correlation, that.correlation);
}
@Override
public int hashCode() {
return Objects.hash(results, correlation);
}
}
| MatrixStatsResults |
java | processing__processing4 | core/src/processing/event/KeyEvent.java | {
"start": 876,
"end": 1836
} | class ____ extends Event {
static public final int PRESS = 1;
static public final int RELEASE = 2;
static public final int TYPE = 3;
char key;
int keyCode;
boolean isAutoRepeat;
public KeyEvent(Object nativeObject,
long millis, int action, int modifiers,
char key, int keyCode) {
super(nativeObject, millis, action, modifiers);
this.flavor = KEY;
this.key = key;
this.keyCode = keyCode;
}
public KeyEvent(Object nativeObject,
long millis, int action, int modifiers,
char key, int keyCode, boolean isAutoRepeat) {
super(nativeObject, millis, action, modifiers);
this.flavor = KEY;
this.key = key;
this.keyCode = keyCode;
this.isAutoRepeat = isAutoRepeat;
}
public char getKey() {
return key;
}
public int getKeyCode() {
return keyCode;
}
public boolean isAutoRepeat() {
return isAutoRepeat;
}
} | KeyEvent |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/validation/AnyBindingMethodValidator.java | {
"start": 1612,
"end": 4390
} | class ____ implements ClearableCache {
private final ImmutableMap<XClassName, BindingMethodValidator> validators;
private final Map<XMethodElement, ValidationReport> reports = new HashMap<>();
@Inject
AnyBindingMethodValidator(ImmutableMap<XClassName, BindingMethodValidator> validators) {
this.validators = validators;
}
@Override
public void clearCache() {
reports.clear();
}
/** Returns the binding method annotations considered by this validator. */
public ImmutableSet<XClassName> methodAnnotations() {
return validators.keySet();
}
/**
* Returns {@code true} if {@code method} is annotated with at least one of {@link
* #methodAnnotations()}.
*/
public boolean isBindingMethod(XExecutableElement method) {
return hasAnyAnnotation(method, methodAnnotations());
}
/**
* Returns a validation report for a method.
*
* <ul>
* <li>Reports an error if {@code method} is annotated with more than one {@linkplain
* #methodAnnotations() binding method annotation}.
* <li>Validates {@code method} with the {@link BindingMethodValidator} for the single
* {@linkplain #methodAnnotations() binding method annotation}.
* </ul>
*
* @throws IllegalArgumentException if {@code method} is not annotated by any {@linkplain
* #methodAnnotations() binding method annotation}
*/
public ValidationReport validate(XMethodElement method) {
return reentrantComputeIfAbsent(reports, method, this::validateUncached);
}
/**
* Returns {@code true} if {@code method} was already {@linkplain #validate(XMethodElement)
* validated}.
*/
public boolean wasAlreadyValidated(XMethodElement method) {
return reports.containsKey(method);
}
private ValidationReport validateUncached(XMethodElement method) {
ValidationReport.Builder report = ValidationReport.about(method);
ImmutableSet<XClassName> bindingMethodAnnotations =
methodAnnotations().stream().filter(method::hasAnnotation).collect(toImmutableSet());
switch (bindingMethodAnnotations.size()) {
case 0:
throw new IllegalArgumentException(
String.format("%s has no binding method annotation", method));
case 1:
report.addSubreport(
validators.get(getOnlyElement(bindingMethodAnnotations)).validate(method));
break;
default:
report.addError(
String.format(
"%s is annotated with more than one of (%s)",
getSimpleName(method),
methodAnnotations().stream()
.map(XClassName::getCanonicalName)
.collect(joining(", "))),
method);
break;
}
return report.build();
}
}
| AnyBindingMethodValidator |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/benckmark/pool/Case_Concurrent_50.java | {
"start": 1018,
"end": 5114
} | class ____ extends TestCase {
private String jdbcUrl;
private String user;
private String password;
private String driverClass;
private int initialSize;
private int minIdle = 3;
private int maxIdle = 5;
private int maxActive = 10;
private String validationQuery = "SELECT 1";
private boolean testOnBorrow;
private long minEvictableIdleTimeMillis = 3000;
public final int LOOP_COUNT = 5;
public final int COUNT = 1000 * 10;
private final int THREAD_COUNT = 50;
protected void setUp() throws Exception {
jdbcUrl = "jdbc:fake:dragoon_v25masterdb";
user = "dragoon25";
password = "dragoon25";
driverClass = "com.alibaba.druid.mock.MockDriver";
}
public void test_0() throws Exception {
final DruidDataSource dataSource = new DruidDataSource();
dataSource.setInitialSize(initialSize);
dataSource.setMaxActive(maxActive);
dataSource.setMinIdle(minIdle);
dataSource.setMaxIdle(maxIdle);
dataSource.setPoolPreparedStatements(true);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setPoolPreparedStatements(true);
dataSource.setMaxWait(6000);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(testOnBorrow);
dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
for (int i = 0; i < LOOP_COUNT; ++i) {
p0(dataSource, "druid");
}
System.out.println();
}
public void test_1() throws Exception {
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setInitialSize(initialSize);
dataSource.setMaxActive(maxActive);
dataSource.setMinIdle(minIdle);
dataSource.setMaxIdle(maxIdle);
dataSource.setPoolPreparedStatements(true);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setPoolPreparedStatements(true);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(testOnBorrow);
dataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
for (int i = 0; i < LOOP_COUNT; ++i) {
p0(dataSource, "dbcp");
}
System.out.println();
}
private void p0(final DataSource dataSource, String name) throws Exception {
long startMillis = System.currentTimeMillis();
long startYGC = TestUtil.getYoungGC();
long startFullGC = TestUtil.getFullGC();
final CountDownLatch endLatch = new CountDownLatch(THREAD_COUNT);
for (int i = 0; i < THREAD_COUNT; ++i) {
Thread thread = new Thread() {
public void run() {
try {
for (int i = 0; i < COUNT; ++i) {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT 1");
Thread.sleep(0, 1000 * 100);
rs.close();
stmt.close();
conn.close();
}
} catch (Exception e) {
e.printStackTrace();
} finally {
endLatch.countDown();
}
}
};
thread.start();
}
endLatch.await();
long millis = System.currentTimeMillis() - startMillis;
long ygc = TestUtil.getYoungGC() - startYGC;
long fullGC = TestUtil.getFullGC() - startFullGC;
System.out.println(name + " millis : " + NumberFormat.getInstance().format(millis) + ", YGC " + ygc + " FGC "
+ fullGC);
}
}
| Case_Concurrent_50 |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/sym/ByteQuadsCanonicalizerTest.java | {
"start": 187,
"end": 604
} | class ____
{
@Test
void multiplyByFourFifths()
{
int i = 0;
for (; i >= 0; i += 7) {
int expected = (int) (i * 0.80);
int actual = ByteQuadsCanonicalizer.multiplyByFourFifths(i);
if (expected != actual) {
fail("Input for 80% of "+i+" differs: expected="+expected+", actual="+actual);
}
}
}
} | ByteQuadsCanonicalizerTest |
java | apache__logging-log4j2 | log4j-iostreams/src/test/java/org/apache/logging/log4j/io/LoggerPrintWriterTest.java | {
"start": 1041,
"end": 3838
} | class ____ extends AbstractLoggerWriterTest {
private PrintWriter print;
@Override
protected StringWriter createWriter() {
return new StringWriter();
}
@Override
protected Writer createWriterWrapper() {
this.print = IoBuilder.forLogger(getExtendedLogger())
.filter(this.wrapped)
.setLevel(LEVEL)
.buildPrintWriter();
return this.print;
}
@Test
public void testFormat() {
assertSame(this.print, this.print.format("[%s]", FIRST));
assertMessages();
this.print.println();
assertMessages("[" + FIRST + "]");
assertEquals("[" + FIRST + "]" + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_boolean() {
this.print.print(true);
assertMessages();
this.print.println();
assertMessages("true");
assertEquals("true" + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_char() {
for (final char c : FIRST.toCharArray()) {
this.print.print(c);
assertMessages();
}
this.print.println();
assertMessages(FIRST);
assertEquals(FIRST + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_CharacterArray() {
this.print.print(FIRST.toCharArray());
assertMessages();
this.print.println();
assertMessages(FIRST);
assertEquals(FIRST + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_int() {
this.print.print(12);
assertMessages();
this.print.println();
assertMessages("12");
assertEquals("12" + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_long() {
this.print.print(12L);
assertMessages();
this.print.println();
assertMessages("12");
assertEquals("12" + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_Object() {
this.print.print((Object) FIRST);
assertMessages();
this.print.println();
assertMessages(FIRST);
assertEquals(FIRST + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrint_String() {
this.print.print(FIRST);
assertMessages();
this.print.println();
assertMessages(FIRST);
assertEquals(FIRST + NEWLINE, this.wrapped.toString());
}
@Test
public void testPrintf() {
assertSame(this.print, this.print.printf("<<<%s>>>", FIRST));
assertMessages();
this.print.println();
assertMessages("<<<" + FIRST + ">>>");
assertEquals("<<<" + FIRST + ">>>" + NEWLINE, this.wrapped.toString());
}
}
| LoggerPrintWriterTest |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java | {
"start": 121380,
"end": 121500
} | class ____ extends BaseClass {
* // No 'inSubType2' field
* String inSubType1 = "type1";
* }
*
* | SubType1 |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/cache/DiskCache.java | {
"start": 251,
"end": 283
} | interface ____ {
/** An | DiskCache |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/collection/internal/StandardBagSemantics.java | {
"start": 598,
"end": 1387
} | class ____<E> extends AbstractBagSemantics<E> {
/**
* Singleton access
*/
public static final StandardBagSemantics<?> INSTANCE = new StandardBagSemantics<>();
private StandardBagSemantics() {
}
@Override
public CollectionClassification getCollectionClassification() {
return CollectionClassification.BAG;
}
@Override
public PersistentCollection<E> instantiateWrapper(
Object key,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentBag<>( session );
}
@Override
public PersistentCollection<E> wrap(
Collection<E> rawCollection,
CollectionPersister collectionDescriptor,
SharedSessionContractImplementor session) {
return new PersistentBag<>( session, rawCollection );
}
}
| StandardBagSemantics |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/util/iterable/Iterables.java | {
"start": 800,
"end": 1211
} | class ____ {
/** Flattens the two level {@code Iterable} into a single {@code Iterable}. Note that this pre-caches the values from the outer {@code
* Iterable}, but not the values from the inner one. */
public static <T> Iterable<T> flatten(Iterable<? extends Iterable<T>> inputs) {
Objects.requireNonNull(inputs);
return new FlattenedIterables<>(inputs);
}
static | Iterables |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/state/v2/StateDescriptor.java | {
"start": 1680,
"end": 1871
} | class ____<T> implements Serializable {
private static final long serialVersionUID = 1L;
/** An enumeration of the types of supported states. */
@Internal
public | StateDescriptor |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/tags/EvalTagTests.java | {
"start": 7761,
"end": 8313
} | class ____ {
public String method() {
return "foo";
}
@NumberFormat(style=Style.PERCENT)
public BigDecimal getFormattable() {
return new BigDecimal(".25");
}
public String html() {
return "<p>";
}
public String getBean() {
return "not the bean object";
}
public Object getNull() {
return null;
}
public String js() {
return "function foo() { alert(\"hi\") }";
}
public Map<String, Object> getMap() {
Map<String, Object> map = new HashMap<>();
map.put("key", "value");
return map;
}
}
}
| Bean |
java | spring-projects__spring-security | oauth2/oauth2-core/src/test/java/org/springframework/security/oauth2/core/TestOAuth2AccessTokens.java | {
"start": 838,
"end": 1341
} | class ____ {
private TestOAuth2AccessTokens() {
}
public static OAuth2AccessToken noScopes() {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "no-scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)));
}
public static OAuth2AccessToken scopes(String... scopes) {
return new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "scopes", Instant.now(),
Instant.now().plus(Duration.ofDays(1)), new HashSet<>(Arrays.asList(scopes)));
}
}
| TestOAuth2AccessTokens |
java | quarkusio__quarkus | extensions/funqy/funqy-amazon-lambda/runtime/src/main/java/io/quarkus/funqy/lambda/config/FunqyAmazonConfig.java | {
"start": 324,
"end": 513
} | interface ____ {
/**
* The advanced event handling config
*/
@WithName("advanced-event-handling")
AdvancedEventHandlingConfig advancedEventHandling();
}
| FunqyAmazonConfig |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Session.java | {
"start": 1399,
"end": 1921
} | class ____ {
/**
* Creates a session window. The boundary of session windows are defined by intervals of
* inactivity, i.e., a session window is closes if no event appears for a defined gap period.
*
* @param gap specifies how long (as interval of milliseconds) to wait for new data before
* closing the session window.
* @return a partially defined session window
*/
public static SessionWithGap withGap(Expression gap) {
return new SessionWithGap(gap);
}
}
| Session |
java | quarkusio__quarkus | independent-projects/qute/generator/src/main/java/io/quarkus/qute/generator/ExtensionMethodGenerator.java | {
"start": 55420,
"end": 55501
} | enum ____ {
BASE,
NAME,
ATTR,
EVAL
}
}
| ParamKind |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/AnnotationIntrospector.java | {
"start": 45996,
"end": 46648
} | class ____ introspect
*/
public Object findValueInstantiator(MapperConfig<?> config, AnnotatedClass ac) {
return null;
}
/**
* Method for finding Builder object to use for constructing
* value instance and binding data (sort of combining value
* instantiators that can construct, and deserializers
* that can bind data).
*<p>
* Note that unlike accessors for some helper Objects, this
* method does not allow returning instances: the reason is
* that builders have state, and a separate instance needs
* to be created for each deserialization call.
*
* @param ac Annotated | to |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/retryReasonCategories/UnknownSocketExceptionRetryReason.java | {
"start": 1119,
"end": 1561
} | class ____ extends
RetryReasonCategory {
@Override
Boolean canCapture(final Exception ex,
final Integer statusCode,
final String serverErrorMessage) {
if (ex instanceof SocketException) {
return true;
}
return false;
}
@Override
String getAbbreviation(final Integer statusCode,
final String serverErrorMessage) {
return SOCKET_EXCEPTION_ABBREVIATION;
}
}
| UnknownSocketExceptionRetryReason |
java | google__guava | android/guava/src/com/google/common/collect/CollectSpliterators.java | {
"start": 5600,
"end": 11388
} | class ____ implements Spliterator<T>, Consumer<T> {
@Nullable T holder = null;
@Override
public void accept(@ParametricNullness T t) {
this.holder = t;
}
@Override
public boolean tryAdvance(Consumer<? super T> action) {
while (fromSpliterator.tryAdvance(this)) {
try {
// The cast is safe because tryAdvance puts a T into `holder`.
T next = uncheckedCastNullableTToT(holder);
if (predicate.test(next)) {
action.accept(next);
return true;
}
} finally {
holder = null;
}
}
return false;
}
@Override
public @Nullable Spliterator<T> trySplit() {
Spliterator<T> fromSplit = fromSpliterator.trySplit();
return (fromSplit == null) ? null : filter(fromSplit, predicate);
}
@Override
public long estimateSize() {
return fromSpliterator.estimateSize() / 2;
}
@Override
public @Nullable Comparator<? super T> getComparator() {
return fromSpliterator.getComparator();
}
@Override
public int characteristics() {
return fromSpliterator.characteristics()
& (Spliterator.DISTINCT
| Spliterator.NONNULL
| Spliterator.ORDERED
| Spliterator.SORTED);
}
}
return new Splitr();
}
/**
* Returns a {@code Spliterator} that iterates over the elements of the spliterators generated by
* applying {@code function} to the elements of {@code fromSpliterator}.
*/
static <InElementT extends @Nullable Object, OutElementT extends @Nullable Object>
Spliterator<OutElementT> flatMap(
Spliterator<InElementT> fromSpliterator,
Function<? super InElementT, @Nullable Spliterator<OutElementT>> function,
int topCharacteristics,
long topSize) {
checkArgument(
(topCharacteristics & Spliterator.SUBSIZED) == 0,
"flatMap does not support SUBSIZED characteristic");
checkArgument(
(topCharacteristics & Spliterator.SORTED) == 0,
"flatMap does not support SORTED characteristic");
checkNotNull(fromSpliterator);
checkNotNull(function);
return new FlatMapSpliteratorOfObject<>(
null, fromSpliterator, function, topCharacteristics, topSize);
}
/**
* Returns a {@code Spliterator.OfInt} that iterates over the elements of the spliterators
* generated by applying {@code function} to the elements of {@code fromSpliterator}. (If {@code
* function} returns {@code null} for an input, it is replaced with an empty stream.)
*/
static <InElementT extends @Nullable Object> Spliterator.OfInt flatMapToInt(
Spliterator<InElementT> fromSpliterator,
Function<? super InElementT, Spliterator.@Nullable OfInt> function,
int topCharacteristics,
long topSize) {
checkArgument(
(topCharacteristics & Spliterator.SUBSIZED) == 0,
"flatMap does not support SUBSIZED characteristic");
checkArgument(
(topCharacteristics & Spliterator.SORTED) == 0,
"flatMap does not support SORTED characteristic");
checkNotNull(fromSpliterator);
checkNotNull(function);
return new FlatMapSpliteratorOfInt<>(
null, fromSpliterator, function, topCharacteristics, topSize);
}
/**
* Returns a {@code Spliterator.OfLong} that iterates over the elements of the spliterators
* generated by applying {@code function} to the elements of {@code fromSpliterator}. (If {@code
* function} returns {@code null} for an input, it is replaced with an empty stream.)
*/
static <InElementT extends @Nullable Object> Spliterator.OfLong flatMapToLong(
Spliterator<InElementT> fromSpliterator,
Function<? super InElementT, Spliterator.@Nullable OfLong> function,
int topCharacteristics,
long topSize) {
checkArgument(
(topCharacteristics & Spliterator.SUBSIZED) == 0,
"flatMap does not support SUBSIZED characteristic");
checkArgument(
(topCharacteristics & Spliterator.SORTED) == 0,
"flatMap does not support SORTED characteristic");
checkNotNull(fromSpliterator);
checkNotNull(function);
return new FlatMapSpliteratorOfLong<>(
null, fromSpliterator, function, topCharacteristics, topSize);
}
/**
* Returns a {@code Spliterator.OfDouble} that iterates over the elements of the spliterators
* generated by applying {@code function} to the elements of {@code fromSpliterator}. (If {@code
* function} returns {@code null} for an input, it is replaced with an empty stream.)
*/
static <InElementT extends @Nullable Object> Spliterator.OfDouble flatMapToDouble(
Spliterator<InElementT> fromSpliterator,
Function<? super InElementT, Spliterator.@Nullable OfDouble> function,
int topCharacteristics,
long topSize) {
checkArgument(
(topCharacteristics & Spliterator.SUBSIZED) == 0,
"flatMap does not support SUBSIZED characteristic");
checkArgument(
(topCharacteristics & Spliterator.SORTED) == 0,
"flatMap does not support SORTED characteristic");
checkNotNull(fromSpliterator);
checkNotNull(function);
return new FlatMapSpliteratorOfDouble<>(
null, fromSpliterator, function, topCharacteristics, topSize);
}
/**
* Implements the {@link Stream#flatMap} operation on spliterators.
*
* @param <InElementT> the element type of the input spliterator
* @param <OutElementT> the element type of the output spliterators
* @param <OutSpliteratorT> the type of the output spliterators
*/
@IgnoreJRERequirement // see earlier comment about redundancy
abstract static | Splitr |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxRefCount.java | {
"start": 3585,
"end": 4420
} | class ____<T> implements Consumer<Disposable> {
final FluxRefCount<? extends T> parent;
long subscribers;
boolean terminated;
boolean connected;
volatile @Nullable Disposable disconnect;
// https://github.com/uber/NullAway/issues/1157
@SuppressWarnings({"rawtypes", "DataFlowIssue"})
static final AtomicReferenceFieldUpdater<RefCountMonitor, @Nullable Disposable> DISCONNECT =
AtomicReferenceFieldUpdater.newUpdater(RefCountMonitor.class, Disposable.class, "disconnect");
RefCountMonitor(FluxRefCount<? extends T> parent) {
this.parent = parent;
}
@Override
public void accept(Disposable r) {
OperatorDisposables.replace(DISCONNECT, this, r);
}
void innerCancelled() {
parent.cancel(this);
}
void upstreamFinished() {
parent.terminated(this);
}
}
static final | RefCountMonitor |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/util/ExceptionHelperTest.java | {
"start": 930,
"end": 1772
} | class ____ extends RxJavaTest {
@Test
public void utilityClass() {
TestHelper.checkUtilityClass(ExceptionHelper.class);
}
@Test
public void addRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final AtomicReference<Throwable> error = new AtomicReference<>();
final TestException ex = new TestException();
Runnable r = new Runnable() {
@Override
public void run() {
assertTrue(ExceptionHelper.addThrowable(error, ex));
}
};
TestHelper.race(r, r);
}
}
@Test(expected = InternalError.class)
public void throwIfThrowable() throws Exception {
ExceptionHelper.<Exception>throwIfThrowable(new InternalError());
}
}
| ExceptionHelperTest |
java | google__error-prone | annotations/src/main/java/com/google/errorprone/annotations/ThreadSafe.java | {
"start": 2283,
"end": 2453
} | class ____ to make it
* immutable, and you should do that whenever possible (or at the least, make as much of the class
* be immutable). Otherwise, writing a thread-safe | is |
java | netty__netty | microbench/src/main/java/io/netty/microbench/util/AbstractMicrobenchmarkBase.java | {
"start": 1367,
"end": 1672
} | class ____ all JMH benchmarks.
*/
@Warmup(iterations = AbstractMicrobenchmarkBase.DEFAULT_WARMUP_ITERATIONS, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = AbstractMicrobenchmarkBase.DEFAULT_MEASURE_ITERATIONS, time = 1, timeUnit = TimeUnit.SECONDS)
@State(Scope.Thread)
public abstract | for |
java | quarkusio__quarkus | extensions/panache/mongodb-panache/runtime/src/main/java/io/quarkus/mongodb/panache/PanacheMongoEntity.java | {
"start": 633,
"end": 982
} | class ____ extends PanacheMongoEntityBase {
/**
* The auto-generated ID field.
* This field is set by Mongo when this entity is persisted.
*
* @see #persist()
*/
public ObjectId id;
@Override
public String toString() {
return this.getClass().getSimpleName() + "<" + id + ">";
}
}
| PanacheMongoEntity |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java | {
"start": 1653,
"end": 3385
} | class ____ extends AbstractRange<Double> {
double min;
double max;
DoubleTestRange(double min, double max) {
this.min = min;
this.max = max;
}
@Override
public Double getMin() {
return min;
}
@Override
protected void setMin(int dim, Object val) {
assert dim == 0;
double v = (Double) val;
if (min < v) {
max = v;
} else {
min = v;
}
}
@Override
public Double getMax() {
return max;
}
@Override
protected void setMax(int dim, Object val) {
assert dim == 0;
double v = (Double) val;
if (max > v) {
min = v;
} else {
max = v;
}
}
@Override
protected boolean isDisjoint(Range o) {
DoubleTestRange other = (DoubleTestRange) o;
return this.min > other.max || this.max < other.min;
}
@Override
protected boolean isWithin(Range o) {
DoubleTestRange other = (DoubleTestRange) o;
if ((this.min >= other.min && this.max <= other.max) == false) {
// not within:
return false;
}
return true;
}
@Override
protected boolean contains(Range o) {
DoubleTestRange other = (DoubleTestRange) o;
if ((this.min <= other.min && this.max >= other.max) == false) {
// not contains:
return false;
}
return true;
}
}
}
| DoubleTestRange |
java | elastic__elasticsearch | plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExamplePainlessAnnotation.java | {
"start": 530,
"end": 949
} | class ____ {
public static final String NAME = "example_annotation";
public int category;
public String message;
public ExamplePainlessAnnotation(int category, String message) {
this.category = category;
this.message = message;
}
public int getCategory() {
return category;
}
public String getMessage() {
return message;
}
}
| ExamplePainlessAnnotation |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/qualifiers/compose/CompositionTest.java | {
"start": 808,
"end": 1175
} | class ____ {
@Test
@Issue("#609")
public void testComposition() {
try (final ApplicationContext context = ApplicationContext.run()) {
final int result = context.getBean(Thing.class).getNumber();
Assertions.assertEquals(3, result, "Should have resolved 3 candidates for annotation qualifier");
}
}
}
| CompositionTest |
java | spring-projects__spring-security | test/src/test/java/org/springframework/security/test/context/support/WithAnonymousUserTests.java | {
"start": 1780,
"end": 1878
} | class ____ {
}
@WithAnonymousUser(setupBefore = TestExecutionEvent.TEST_METHOD)
private | Annotated |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/saml2/Saml2LogoutConfigurerTests.java | {
"start": 33161,
"end": 33761
} | class ____ {
static ObjectPostProcessor<Object> objectPostProcessor;
@Bean
SecurityFilterChain web(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize.anyRequest().authenticated())
.saml2Login(withDefaults())
.saml2Logout(withDefaults());
return http.build();
// @formatter:on
}
@Bean
static ObjectPostProcessor<Object> objectPostProcessor() {
return objectPostProcessor;
}
}
@Configuration
@EnableWebSecurity
@Import(Saml2LoginConfigBeans.class)
static | Saml2DefaultsWithObjectPostProcessorConfig |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindowedSerializer.java | {
"start": 1452,
"end": 3532
} | class ____ a windowed record. Must implement the {@link Serde} interface.
*/
public static final String WINDOWED_INNER_SERIALIZER_CLASS = "windowed.inner.serializer.class";
private final Logger log = LoggerFactory.getLogger(SessionWindowedSerializer.class);
private Serializer<T> inner;
// Default constructor needed by Kafka
public SessionWindowedSerializer() {}
public SessionWindowedSerializer(final Serializer<T> inner) {
this.inner = inner;
}
@SuppressWarnings({"deprecation", "unchecked"})
@Override
public void configure(final Map<String, ?> configs, final boolean isKey) {
String serializerConfigKey = WINDOWED_INNER_SERIALIZER_CLASS;
String serializerConfigValue = (String) configs.get(WINDOWED_INNER_SERIALIZER_CLASS);
if (serializerConfigValue == null) {
final String windowedInnerClassSerdeConfig = (String) configs.get(StreamsConfig.WINDOWED_INNER_CLASS_SERDE);
if (windowedInnerClassSerdeConfig != null) {
serializerConfigKey = StreamsConfig.WINDOWED_INNER_CLASS_SERDE;
serializerConfigValue = windowedInnerClassSerdeConfig;
log.warn("Config {} is deprecated. Please use {} instead.",
StreamsConfig.WINDOWED_INNER_CLASS_SERDE, WINDOWED_INNER_SERIALIZER_CLASS);
}
}
Serde<T> windowedInnerSerializerClass = null;
if (serializerConfigValue != null) {
try {
windowedInnerSerializerClass = Utils.newInstance(serializerConfigValue, Serde.class);
} catch (final ClassNotFoundException e) {
throw new ConfigException(serializerConfigKey, serializerConfigValue,
"Serde class " + serializerConfigValue + " could not be found.");
}
}
if (inner != null && serializerConfigValue != null) {
if (!inner.getClass().getName().equals(windowedInnerSerializerClass.serializer().getClass().getName())) {
throw new IllegalArgumentException("Inner | of |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/vertx/VertxTest.java | {
"start": 2729,
"end": 3443
} | class ____ implements Closeable {
@Override
public void close(Completable<Void> completion) {
if (closedCount.incrementAndGet() == 1) {
throw new RuntimeException("Don't be afraid");
} else {
completion.succeed();
}
}
}
VertxInternal vertx = (VertxInternal) Vertx.vertx();
vertx.addCloseHook(new Hook());
vertx.addCloseHook(new Hook());
// Now undeploy
vertx
.close()
.onComplete(onSuccess(v -> {
assertEquals(2, closedCount.get());
testComplete();
}));
await();
}
@Test
public void testCloseHookFailure2() throws Exception {
AtomicInteger closedCount = new AtomicInteger();
| Hook |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSequence_Test.java | {
"start": 969,
"end": 1330
} | class ____ extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.containsSequence(6L, 8L);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContainsSequence(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L));
}
}
| LongArrayAssert_containsSequence_Test |
java | apache__camel | components/camel-influxdb2/src/main/java/org/apache/camel/component/influxdb2/enums/Operation.java | {
"start": 893,
"end": 1218
} | enum ____ {
INSERT("INSERT"),
PING("PING"),
;
private final String operation;
Operation(String operation) {
this.operation = operation;
}
public String getOperation() {
return operation;
}
@Override
public String toString() {
return operation;
}
}
| Operation |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/id/sequences/entities/Store.java | {
"start": 548,
"end": 780
} | class ____ implements Serializable {
private Long id;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQ_STORE")
public Long getId() {
return id;
}
public void setId(Long long1) {
id = long1;
}
}
| Store |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoTypeExtractionTest.java | {
"start": 5556,
"end": 6379
} | class ____ {
private Collection<String> users;
private boolean favorited;
public boolean isFavorited() {
return favorited;
}
public void setFavorited(boolean favorited) {
this.favorited = favorited;
}
public Collection<String> getUsers() {
return users;
}
public void setUsers(Collection<String> users) {
this.users = users;
}
}
@Test
void testPojoWithGenericFields() {
TypeInformation<?> typeForClass = TypeExtractor.createTypeInfo(PojoWithGenericFields.class);
assertThat(typeForClass).isInstanceOf(PojoTypeInfo.class);
}
// in this test, the location of the getters and setters is mixed across the type hierarchy.
public static | PojoWithGenericFields |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/PathMatchingResourcePatternResolverTests.java | {
"start": 22371,
"end": 22841
} | class ____ sometimes run within a
// GraalVM native image which cannot support Path#toFile.
//
// See: https://github.com/spring-projects/spring-framework/issues/29243
if (resource instanceof FileSystemResource fileSystemResource) {
return fileSystemResource.getPath();
}
try {
// Fall back to URL in case the resource came from a JAR
return resource.getURL().getPath();
}
catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
}
| is |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/json/JsonObjectFunction.java | {
"start": 784,
"end": 2671
} | class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
protected final boolean colonSyntax;
public JsonObjectFunction(TypeConfiguration typeConfiguration, boolean colonSyntax) {
super(
"json_object",
FunctionKind.NORMAL,
new JsonObjectArgumentsValidator(),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
null
);
this.colonSyntax = colonSyntax;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( "json_object" );
char separator = '(';
if ( sqlAstArguments.isEmpty() ) {
sqlAppender.appendSql( separator );
}
else {
final JsonNullBehavior nullBehavior;
final int argumentsCount;
if ( ( sqlAstArguments.size() & 1 ) == 1 ) {
nullBehavior = (JsonNullBehavior) sqlAstArguments.get( sqlAstArguments.size() - 1 );
argumentsCount = sqlAstArguments.size() - 1;
}
else {
nullBehavior = JsonNullBehavior.NULL;
argumentsCount = sqlAstArguments.size();
}
for ( int i = 0; i < argumentsCount; i += 2 ) {
sqlAppender.appendSql( separator );
final SqlAstNode key = sqlAstArguments.get( i );
final SqlAstNode value = sqlAstArguments.get( i + 1 );
key.accept( walker );
if ( colonSyntax ) {
sqlAppender.appendSql( ':' );
}
else {
sqlAppender.appendSql( " value " );
}
renderValue( sqlAppender, value, walker );
separator = ',';
}
if ( nullBehavior == JsonNullBehavior.ABSENT ) {
sqlAppender.appendSql( " absent on null" );
}
}
sqlAppender.appendSql( ')' );
}
protected void renderValue(SqlAppender sqlAppender, SqlAstNode value, SqlAstTranslator<?> walker) {
value.accept( walker );
}
}
| JsonObjectFunction |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/rest/RestActionTestCase.java | {
"start": 1527,
"end": 1703
} | class ____ Rest*ActionTests. Provides access to a {@link RestController}
* that can be used to register individual REST actions, and test request handling.
*/
public abstract | for |
java | elastic__elasticsearch | x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationEvaluationWithSecurityIT.java | {
"start": 939,
"end": 4477
} | class ____ extends ESRestTestCase {
private static final String BASIC_AUTH_VALUE_SUPER_USER = UsernamePasswordToken.basicAuthHeaderValue(
"x_pack_rest_user",
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING
);
@Override
protected Settings restClientSettings() {
return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE_SUPER_USER).build();
}
private static void setupDataAccessRole(String index) throws IOException {
Request request = new Request("PUT", "/_security/role/test_data_access");
request.setJsonEntity(Strings.format("""
{ "indices" : [ { "names": ["%s"], "privileges": ["read"] } ]}
""", index));
client().performRequest(request);
}
private void setupUser(String user, List<String> roles) throws IOException {
String password = new String(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING.getChars());
Request request = new Request("PUT", "/_security/user/" + user);
request.setJsonEntity(Strings.format("""
{ "password" : "%s", "roles" : [ %s ]}
""", password, roles.stream().map(unquoted -> "\"" + unquoted + "\"").collect(Collectors.joining(", "))));
client().performRequest(request);
}
public void testEvaluate_withSecurity() throws Exception {
String index = "test_data";
Request createDoc = new Request("POST", index + "/_doc");
createDoc.setJsonEntity("""
{
"is_outlier": 0.0,
"ml.outlier_score": 1.0
}""");
client().performRequest(createDoc);
Request refreshRequest = new Request("POST", index + "/_refresh");
client().performRequest(refreshRequest);
setupDataAccessRole(index);
setupUser("ml_admin", Collections.singletonList("machine_learning_admin"));
setupUser("ml_admin_plus_data", Arrays.asList("machine_learning_admin", "test_data_access"));
String mlAdmin = UsernamePasswordToken.basicAuthHeaderValue("ml_admin", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING);
String mlAdminPlusData = UsernamePasswordToken.basicAuthHeaderValue(
"ml_admin_plus_data",
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING
);
Request evaluateRequest = buildRegressionEval(index, mlAdmin, mlAdminPlusData);
client().performRequest(evaluateRequest);
Request failingRequest = buildRegressionEval(index, mlAdminPlusData, mlAdmin);
expectThrows(ResponseException.class, () -> client().performRequest(failingRequest));
}
private static Request buildRegressionEval(String index, String primaryHeader, String secondaryHeader) {
Request evaluateRequest = new Request("POST", "_ml/data_frame/_evaluate");
evaluateRequest.setJsonEntity(Strings.format("""
{
"index": "%s",
"evaluation": {
"regression": {
"actual_field": "is_outlier",
"predicted_field": "ml.outlier_score"
}
}
}
""", index));
RequestOptions.Builder options = evaluateRequest.getOptions().toBuilder();
options.addHeader("Authorization", primaryHeader);
options.addHeader("es-secondary-authorization", secondaryHeader);
evaluateRequest.setOptions(options);
return evaluateRequest;
}
}
| ClassificationEvaluationWithSecurityIT |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/geo/MultiPolygonTest.java | {
"start": 266,
"end": 2265
} | class ____
extends TestCase {
public void test_geo() throws Exception {
String str = "{\n" +
" \"type\": \"MultiPolygon\",\n" +
" \"coordinates\": [\n" +
" [\n" +
" [\n" +
" [102.0, 2.0],\n" +
" [103.0, 2.0],\n" +
" [103.0, 3.0],\n" +
" [102.0, 3.0],\n" +
" [102.0, 2.0]\n" +
" ]\n" +
" ],\n" +
" [\n" +
" [\n" +
" [100.0, 0.0],\n" +
" [101.0, 0.0],\n" +
" [101.0, 1.0],\n" +
" [100.0, 1.0],\n" +
" [100.0, 0.0]\n" +
" ],\n" +
" [\n" +
" [100.2, 0.2],\n" +
" [100.2, 0.8],\n" +
" [100.8, 0.8],\n" +
" [100.8, 0.2],\n" +
" [100.2, 0.2]\n" +
" ]\n" +
" ]\n" +
" ]\n" +
"}";
Geometry geometry = JSON.parseObject(str, Geometry.class);
assertEquals(MultiPolygon.class, geometry.getClass());
assertEquals(
"{\"type\":\"MultiPolygon\",\"coordinates\":[[[[102.0,2.0],[103.0,2.0],[103.0,3.0],[102.0,3.0],[102.0,2.0]]],[[[100.0,0.0],[101.0,0.0],[101.0,1.0],[100.0,1.0],[100.0,0.0]],[[100.2,0.2],[100.2,0.8],[100.8,0.8],[100.8,0.2],[100.2,0.2]]]]}"
, JSON.toJSONString(geometry));
String str2 = JSON.toJSONString(geometry);
assertEquals(str2, JSON.toJSONString(JSON.parseObject(str2, Geometry.class)));
}
}
| MultiPolygonTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/server/namenode/NotReplicatedYetException.java | {
"start": 1145,
"end": 1322
} | class ____ extends IOException {
private static final long serialVersionUID = 1L;
public NotReplicatedYetException(String msg) {
super(msg);
}
}
| NotReplicatedYetException |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/csv/BindyComplexRegexSeparatorTest.java | {
"start": 1426,
"end": 2312
} | class ____ {
@DataField(pos = 1)
private String field1;
@DataField(pos = 2)
private String field2;
}
@Test
public void testUnmarshal() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:unmarshal", "header1,header2\n\"value1\",\"value,2\"");
MockEndpoint.assertIsSatisfied(context);
Example body = mock.getReceivedExchanges().get(0).getIn().getBody(Example.class);
assertEquals("value,2", body.field2);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:unmarshal").unmarshal().bindy(BindyType.Csv, Example.class).to("mock:result");
}
};
}
}
| Example |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/LockNotBeforeTry.java | {
"start": 2228,
"end": 8585
} | class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> LOCK =
instanceMethod().onDescendantOf("java.util.concurrent.locks.Lock").named("lock");
private static final Matcher<ExpressionTree> UNLOCK =
instanceMethod().onDescendantOf("java.util.concurrent.locks.Lock").named("unlock");
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!LOCK.matches(tree, state)) {
return NO_MATCH;
}
Tree parent = state.getPath().getParentPath().getLeaf();
if (!(parent instanceof StatementTree)) {
return NO_MATCH;
}
Tree enclosing = state.getPath().getParentPath().getParentPath().getLeaf();
if (!(enclosing instanceof BlockTree block)) {
return NO_MATCH;
}
int index = block.getStatements().indexOf(parent);
if (index + 1 < block.getStatements().size()) {
StatementTree nextStatement = block.getStatements().get(index + 1);
if (nextStatement instanceof TryTree) {
return NO_MATCH;
}
}
return describe(tree, state.getPath().getParentPath(), state);
}
private Description describe(
MethodInvocationTree lockInvocation, TreePath statementPath, VisitorState state) {
Tree lockStatement = statementPath.getLeaf();
ExpressionTree lockee = getReceiver(lockInvocation);
if (lockee == null) {
return NO_MATCH;
}
TryTree enclosingTry = state.findEnclosing(TryTree.class);
if (enclosingTry != null && releases(enclosingTry, lockee, state)) {
Description.Builder description = buildDescription(lockInvocation);
if (enclosingTry.getBlock().getStatements().indexOf(lockStatement) == 0) {
description.addFix(
SuggestedFix.builder()
.replace(lockStatement, "")
.prefixWith(enclosingTry, state.getSourceForNode(lockStatement))
.build());
}
return description
.setMessage(
String.format(
"Prefer obtaining the lock for %s outside the try block. That way, if #lock"
+ " throws, the lock is not erroneously released.",
state.getSourceForNode(getReceiver(lockInvocation))))
.build();
}
Tree enclosing = state.getPath().getParentPath().getParentPath().getLeaf();
if (!(enclosing instanceof BlockTree block)) {
return NO_MATCH;
}
int index = block.getStatements().indexOf(lockStatement);
// Scan through the enclosing statements
for (StatementTree statement : Iterables.skip(block.getStatements(), index + 1)) {
// ... for a try/finally which releases this lock.
if (statement instanceof TryTree tryTree && releases(tryTree, lockee, state)) {
int start = getStartPosition(statement);
int end = getStartPosition(tryTree.getBlock().getStatements().getFirst());
SuggestedFix fix =
SuggestedFix.builder()
.replace(start, end, "")
.postfixWith(
lockStatement, state.getSourceCode().subSequence(start, end).toString())
.build();
return buildDescription(lockInvocation)
.addFix(fix)
.setMessage(
"Prefer locking *immediately* before the try block which releases the lock to"
+ " avoid the possibility of any intermediate statements throwing.")
.build();
}
// ... or an unlock at the same level.
if (statement instanceof ExpressionStatementTree expressionStatementTree) {
ExpressionTree expression = expressionStatementTree.getExpression();
if (acquires(expression, lockee, state)) {
return buildDescription(lockInvocation)
.setMessage(
String.format(
"Did you forget to release the lock on %s?",
state.getSourceForNode(getReceiver(lockInvocation))))
.build();
}
if (releases(expression, lockee, state)) {
SuggestedFix fix =
SuggestedFix.builder()
.postfixWith(lockStatement, "try {")
.prefixWith(statement, "} finally {")
.postfixWith(statement, "}")
.build();
return buildDescription(lockInvocation)
.addFix(fix)
.setMessage(
String.format(
"Prefer releasing the lock on %s inside a finally block.",
state.getSourceForNode(getReceiver(lockInvocation))))
.build();
}
}
}
return NO_MATCH;
}
private static boolean releases(TryTree tryTree, ExpressionTree lockee, VisitorState state) {
if (tryTree.getFinallyBlock() == null) {
return false;
}
// False if a different lock was released, true if 'lockee' was released, null otherwise.
Boolean released =
new TreeScanner<Boolean, Void>() {
@Override
public @Nullable Boolean reduce(Boolean r1, Boolean r2) {
return r1 == null ? r2 : (r2 == null ? null : r1 && r2);
}
@Override
public Boolean visitMethodInvocation(MethodInvocationTree node, Void unused) {
if (UNLOCK.matches(node, state)) {
return releases(node, lockee, state);
}
return super.visitMethodInvocation(node, null);
}
}.scan(tryTree.getFinallyBlock(), null);
return released == null ? false : released;
}
private static boolean releases(ExpressionTree node, ExpressionTree lockee, VisitorState state) {
if (!UNLOCK.matches(node, state)) {
return false;
}
ExpressionTree receiver = getReceiver(node);
return receiver != null
&& UNLOCK.matches(node, state)
&& state.getSourceForNode(receiver).equals(state.getSourceForNode(lockee));
}
private static boolean acquires(ExpressionTree node, ExpressionTree lockee, VisitorState state) {
if (!LOCK.matches(node, state)) {
return false;
}
ExpressionTree receiver = getReceiver(node);
return receiver != null
&& LOCK.matches(node, state)
&& state.getSourceForNode(receiver).equals(state.getSourceForNode(lockee));
}
}
| LockNotBeforeTry |
java | apache__logging-log4j2 | log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/JsonTemplateLayout.java | {
"start": 13252,
"end": 20862
} | class ____ implements org.apache.logging.log4j.core.util.Builder<JsonTemplateLayout> {
@PluginConfiguration
private Configuration configuration;
@PluginBuilderAttribute
private Charset charset = JsonTemplateLayoutDefaults.getCharset();
@PluginBuilderAttribute
private boolean locationInfoEnabled = JsonTemplateLayoutDefaults.isLocationInfoEnabled();
@PluginBuilderAttribute
private boolean stackTraceEnabled = JsonTemplateLayoutDefaults.isStackTraceEnabled();
@PluginBuilderAttribute
private String eventTemplate = JsonTemplateLayoutDefaults.getEventTemplate();
@PluginBuilderAttribute
private String eventTemplateUri = JsonTemplateLayoutDefaults.getEventTemplateUri();
@PluginBuilderAttribute
private String eventTemplateRootObjectKey = JsonTemplateLayoutDefaults.getEventTemplateRootObjectKey();
@PluginElement("EventTemplateAdditionalField")
private EventTemplateAdditionalField[] eventTemplateAdditionalFields;
@PluginBuilderAttribute
private String stackTraceElementTemplate = JsonTemplateLayoutDefaults.getStackTraceElementTemplate();
@PluginBuilderAttribute
private String stackTraceElementTemplateUri = JsonTemplateLayoutDefaults.getStackTraceElementTemplateUri();
@PluginBuilderAttribute
private String eventDelimiter = JsonTemplateLayoutDefaults.getEventDelimiter();
@PluginBuilderAttribute
private boolean nullEventDelimiterEnabled = JsonTemplateLayoutDefaults.isNullEventDelimiterEnabled();
@PluginBuilderAttribute
private int maxStringLength = JsonTemplateLayoutDefaults.getMaxStringLength();
@PluginBuilderAttribute
private String truncatedStringSuffix = JsonTemplateLayoutDefaults.getTruncatedStringSuffix();
@PluginBuilderAttribute
private RecyclerFactory recyclerFactory = JsonTemplateLayoutDefaults.getRecyclerFactory();
private Builder() {
// Do nothing.
}
public Configuration getConfiguration() {
return configuration;
}
public Builder setConfiguration(final Configuration configuration) {
this.configuration = configuration;
return this;
}
public Charset getCharset() {
return charset;
}
public Builder setCharset(final Charset charset) {
this.charset = charset;
return this;
}
public boolean isLocationInfoEnabled() {
return locationInfoEnabled;
}
public Builder setLocationInfoEnabled(final boolean locationInfoEnabled) {
this.locationInfoEnabled = locationInfoEnabled;
return this;
}
public boolean isStackTraceEnabled() {
return stackTraceEnabled;
}
public Builder setStackTraceEnabled(final boolean stackTraceEnabled) {
this.stackTraceEnabled = stackTraceEnabled;
return this;
}
public String getEventTemplate() {
return eventTemplate;
}
public Builder setEventTemplate(final String eventTemplate) {
this.eventTemplate = eventTemplate;
return this;
}
public String getEventTemplateUri() {
return eventTemplateUri;
}
public Builder setEventTemplateUri(final String eventTemplateUri) {
this.eventTemplateUri = eventTemplateUri;
return this;
}
public String getEventTemplateRootObjectKey() {
return eventTemplateRootObjectKey;
}
public Builder setEventTemplateRootObjectKey(final String eventTemplateRootObjectKey) {
this.eventTemplateRootObjectKey = eventTemplateRootObjectKey;
return this;
}
public EventTemplateAdditionalField[] getEventTemplateAdditionalFields() {
return eventTemplateAdditionalFields;
}
public Builder setEventTemplateAdditionalFields(
final EventTemplateAdditionalField[] eventTemplateAdditionalFields) {
this.eventTemplateAdditionalFields = eventTemplateAdditionalFields;
return this;
}
public String getStackTraceElementTemplate() {
return stackTraceElementTemplate;
}
public Builder setStackTraceElementTemplate(final String stackTraceElementTemplate) {
this.stackTraceElementTemplate = stackTraceElementTemplate;
return this;
}
public String getStackTraceElementTemplateUri() {
return stackTraceElementTemplateUri;
}
public Builder setStackTraceElementTemplateUri(final String stackTraceElementTemplateUri) {
this.stackTraceElementTemplateUri = stackTraceElementTemplateUri;
return this;
}
public String getEventDelimiter() {
return eventDelimiter;
}
public Builder setEventDelimiter(final String eventDelimiter) {
this.eventDelimiter = eventDelimiter;
return this;
}
public boolean isNullEventDelimiterEnabled() {
return nullEventDelimiterEnabled;
}
public Builder setNullEventDelimiterEnabled(final boolean nullEventDelimiterEnabled) {
this.nullEventDelimiterEnabled = nullEventDelimiterEnabled;
return this;
}
public int getMaxStringLength() {
return maxStringLength;
}
public Builder setMaxStringLength(final int maxStringLength) {
this.maxStringLength = maxStringLength;
return this;
}
public String getTruncatedStringSuffix() {
return truncatedStringSuffix;
}
public Builder setTruncatedStringSuffix(final String truncatedStringSuffix) {
this.truncatedStringSuffix = truncatedStringSuffix;
return this;
}
public RecyclerFactory getRecyclerFactory() {
return recyclerFactory;
}
public Builder setRecyclerFactory(final RecyclerFactory recyclerFactory) {
this.recyclerFactory = recyclerFactory;
return this;
}
@Override
public JsonTemplateLayout build() {
validate();
return new JsonTemplateLayout(this);
}
private void validate() {
Objects.requireNonNull(configuration, "configuration");
if (Strings.isBlank(eventTemplate) && Strings.isBlank(eventTemplateUri)) {
throw new IllegalArgumentException("both eventTemplate and eventTemplateUri are blank");
}
if (stackTraceEnabled
&& Strings.isBlank(stackTraceElementTemplate)
&& Strings.isBlank(stackTraceElementTemplateUri)) {
throw new IllegalArgumentException(
"both stackTraceElementTemplate and stackTraceElementTemplateUri are blank");
}
if (maxStringLength <= 0) {
throw new IllegalArgumentException(
"was expecting a non-zero positive maxStringLength: " + maxStringLength);
}
Objects.requireNonNull(truncatedStringSuffix, "truncatedStringSuffix");
Objects.requireNonNull(recyclerFactory, "recyclerFactory");
}
}
@Plugin(name = "EventTemplateAdditionalField", category = Node.CATEGORY, printObject = true)
public static final | Builder |
java | hibernate__hibernate-orm | hibernate-scan-jandex/src/main/java/org/hibernate/archive/scan/spi/NonClassFileArchiveEntryHandler.java | {
"start": 520,
"end": 601
} | class ____...) entries within an archive
*
* @author Steve Ebersole
*/
public | file |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextCustomizerFactories.java | {
"start": 3549,
"end": 4245
} | class ____ does <strong>not</strong> inherit factories from
* a superclass or enclosing class.
* <p>Can be set to {@link MergeMode#REPLACE_DEFAULTS REPLACE_DEFAULTS} to
* have locally declared factories replace the default factories.
* <p>The mode is ignored if factories are inherited from a superclass or
* enclosing class.
* <p>Defaults to {@link MergeMode#MERGE_WITH_DEFAULTS MERGE_WITH_DEFAULTS}.
* @see MergeMode
*/
MergeMode mergeMode() default MergeMode.MERGE_WITH_DEFAULTS;
/**
* Enumeration of <em>modes</em> that dictate whether explicitly declared
* factories are merged with the default factories when
* {@code @ContextCustomizerFactories} is declared on a | that |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_uin57.java | {
"start": 3247,
"end": 3657
} | class ____ {
@JSONField(name = "dx")
private int x;
@JSONField(name = "dy")
private int y;
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
}
public static | Box |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadataTests.java | {
"start": 564,
"end": 2461
} | class ____ extends ESTestCase {
public void testStatusComination() {
SingleNodeShutdownMetadata.Status status;
status = SingleNodeShutdownMetadata.Status.combine(
SingleNodeShutdownMetadata.Status.NOT_STARTED,
SingleNodeShutdownMetadata.Status.IN_PROGRESS,
SingleNodeShutdownMetadata.Status.STALLED
);
assertEquals(status, SingleNodeShutdownMetadata.Status.STALLED);
status = SingleNodeShutdownMetadata.Status.combine(
SingleNodeShutdownMetadata.Status.NOT_STARTED,
SingleNodeShutdownMetadata.Status.IN_PROGRESS,
SingleNodeShutdownMetadata.Status.NOT_STARTED
);
assertEquals(status, SingleNodeShutdownMetadata.Status.IN_PROGRESS);
status = SingleNodeShutdownMetadata.Status.combine(
SingleNodeShutdownMetadata.Status.NOT_STARTED,
SingleNodeShutdownMetadata.Status.NOT_STARTED,
SingleNodeShutdownMetadata.Status.NOT_STARTED
);
assertEquals(status, SingleNodeShutdownMetadata.Status.NOT_STARTED);
status = SingleNodeShutdownMetadata.Status.combine(
SingleNodeShutdownMetadata.Status.IN_PROGRESS,
SingleNodeShutdownMetadata.Status.IN_PROGRESS,
SingleNodeShutdownMetadata.Status.COMPLETE
);
assertEquals(status, SingleNodeShutdownMetadata.Status.IN_PROGRESS);
status = SingleNodeShutdownMetadata.Status.combine(
SingleNodeShutdownMetadata.Status.COMPLETE,
SingleNodeShutdownMetadata.Status.COMPLETE,
SingleNodeShutdownMetadata.Status.COMPLETE
);
assertEquals(status, SingleNodeShutdownMetadata.Status.COMPLETE);
status = SingleNodeShutdownMetadata.Status.combine();
assertEquals(status, SingleNodeShutdownMetadata.Status.COMPLETE);
}
}
| SingleNodeShutdownMetadataTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/process/internal/ScanningCoordinator.java | {
"start": 10598,
"end": 10977
} | class ____ package names
for ( PackageDescriptor packageDescriptor : scanResult.getLocatedPackages() ) {
managedResources.addAnnotatedPackageName( packageDescriptor.getName() );
unresolvedListedClassNames.remove( packageDescriptor.getName() );
}
for ( String unresolvedListedClassName : unresolvedListedClassNames ) {
// because the explicit list can contain both | and |
java | apache__dubbo | dubbo-plugin/dubbo-qos/src/main/java/org/apache/dubbo/qos/server/QosBindException.java | {
"start": 892,
"end": 1044
} | class ____ extends RuntimeException {
public QosBindException(String message, Throwable cause) {
super(message, cause);
}
}
| QosBindException |
java | apache__camel | components/camel-crypto-pgp/src/main/java/org/apache/camel/converter/crypto/PGPKeyAccessDataFormat.java | {
"start": 34341,
"end": 34877
} | class ____ {
private final InputStream decryptedData;
private final PGPPublicKeyEncryptedData pbe;
DecryptedDataAndPPublicKeyEncryptedData(InputStream decryptedData, PGPPublicKeyEncryptedData pbe) {
this.decryptedData = decryptedData;
this.pbe = pbe;
}
public InputStream getDecryptedData() {
return decryptedData;
}
public PGPPublicKeyEncryptedData getPbe() {
return pbe;
}
}
}
| DecryptedDataAndPPublicKeyEncryptedData |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/vendor/InformixExceptionSorterTest.java | {
"start": 184,
"end": 2009
} | class ____ extends PoolTestCase {
public void test_informix() throws Exception {
InformixExceptionSorter sorter = new InformixExceptionSorter();
assertEquals(false, sorter.isExceptionFatal(new SQLException()));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -710)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79716)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79730)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79734)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79735)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79736)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79756)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79757)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79758)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79759)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79760)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79788)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79811)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79812)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79836)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79837)));
assertEquals(true, sorter.isExceptionFatal(new SQLException("", "", -79879)));
assertEquals(false, sorter.isExceptionFatal(new SQLException("", "", 100)));
}
}
| InformixExceptionSorterTest |
java | apache__rocketmq | test/src/test/java/org/apache/rocketmq/test/container/PopSlaveActingMasterIT.java | {
"start": 2451,
"end": 21714
} | class ____ extends ContainerIntegrationTestBase {
private static final String CONSUME_GROUP = PopSlaveActingMasterIT.class.getSimpleName() + "_Consumer";
private final static int MESSAGE_COUNT = 16;
private final Random random = new Random();
private static DefaultMQProducer producer;
private final static String MESSAGE_STRING = RandomStringUtils.random(1024);
private static final byte[] MESSAGE_BODY = MESSAGE_STRING.getBytes(StandardCharsets.UTF_8);
private final BrokerConfig brokerConfig = new BrokerConfig();
public PopSlaveActingMasterIT() {
}
void createTopic(String topic) {
createTopicTo(master1With3Replicas, topic, 1, 1);
createTopicTo(master2With3Replicas, topic, 1, 1);
createTopicTo(master3With3Replicas, topic, 1, 1);
}
@BeforeClass
public static void beforeClass() throws Throwable {
producer = createProducer(PopSlaveActingMasterIT.class.getSimpleName() + "_PRODUCER");
producer.setSendMsgTimeout(5000);
producer.start();
}
@AfterClass
public static void afterClass() throws Exception {
producer.shutdown();
}
@Test
public void testLocalActing_ackSlave() throws Exception {
String topic = PopSlaveActingMasterIT.class.getSimpleName() + random.nextInt(65535);
createTopic(topic);
String retryTopic = KeyBuilder.buildPopRetryTopic(topic, CONSUME_GROUP, brokerConfig.isEnableRetryTopicV2());
createTopic(retryTopic);
this.switchPop(topic);
producer.getDefaultMQProducerImpl().getmQClientFactory().updateTopicRouteInfoFromNameServer(topic);
MessageQueue messageQueue = new MessageQueue(topic, master1With3Replicas.getBrokerConfig().getBrokerName(), 0);
int sendSuccess = 0;
for (int i = 0; i < MESSAGE_COUNT; i++) {
Message msg = new Message(topic, MESSAGE_BODY);
SendResult sendResult = producer.send(msg, messageQueue);
if (sendResult.getSendStatus() == SendStatus.SEND_OK) {
sendSuccess++;
}
}
final int finalSendSuccess = sendSuccess;
await().atMost(Duration.ofMinutes(1)).until(() -> finalSendSuccess >= MESSAGE_COUNT);
isolateBroker(master1With3Replicas);
DefaultMQPushConsumer consumer = createPushConsumer(CONSUME_GROUP);
consumer.subscribe(topic, "*");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
List<String> consumedMessages = new CopyOnWriteArrayList<>();
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
msgs.forEach(msg -> {
consumedMessages.add(msg.getMsgId());
});
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
consumer.setClientRebalance(false);
consumer.start();
await().atMost(Duration.ofMinutes(1)).until(() -> consumedMessages.size() >= MESSAGE_COUNT);
consumer.shutdown();
List<String> retryMsgList = new CopyOnWriteArrayList<>();
DefaultMQPushConsumer pushConsumer = createPushConsumer(CONSUME_GROUP);
pushConsumer.subscribe(retryTopic, "*");
pushConsumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt msg : msgs) {
retryMsgList.add(new String(msg.getBody()));
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
pushConsumer.start();
Thread.sleep(10000L);
assertThat(retryMsgList.size()).isEqualTo(0);
cancelIsolatedBroker(master1With3Replicas);
awaitUntilSlaveOK();
pushConsumer.shutdown();
}
@Test
public void testLocalActing_notAckSlave() throws Exception {
String topic = PopSlaveActingMasterIT.class.getSimpleName() + random.nextInt(65535);
createTopic(topic);
String retryTopic = KeyBuilder.buildPopRetryTopic(topic, CONSUME_GROUP, brokerConfig.isEnableRetryTopicV2());
createTopic(retryTopic);
this.switchPop(topic);
producer.getDefaultMQProducerImpl().getmQClientFactory().updateTopicRouteInfoFromNameServer(topic);
Set<String> sendToIsolateMsgSet = new HashSet<>();
MessageQueue messageQueue = new MessageQueue(topic, master1With3Replicas.getBrokerConfig().getBrokerName(), 0);
int sendSuccess = 0;
for (int i = 0; i < MESSAGE_COUNT; i++) {
Message msg = new Message(topic, MESSAGE_BODY);
SendResult sendResult = producer.send(msg, messageQueue);
if (sendResult.getSendStatus() == SendStatus.SEND_OK) {
sendToIsolateMsgSet.add(new String(msg.getBody()));
sendSuccess++;
}
}
System.out.printf("send success %d%n", sendSuccess);
final int finalSendSuccess = sendSuccess;
await().atMost(Duration.ofMinutes(1)).until(() -> finalSendSuccess >= MESSAGE_COUNT);
isolateBroker(master1With3Replicas);
System.out.printf("isolate master1%n");
DefaultMQPushConsumer consumer = createPushConsumer(CONSUME_GROUP);
consumer.subscribe(topic, "*");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
consumer.setPopInvisibleTime(5000L);
List<String> consumedMessages = new CopyOnWriteArrayList<>();
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
msgs.forEach(msg -> {
msg.setReconsumeTimes(0);
consumedMessages.add(msg.getMsgId());
});
return ConsumeConcurrentlyStatus.RECONSUME_LATER;
});
consumer.setClientRebalance(false);
consumer.start();
await().atMost(Duration.ofMinutes(1)).until(() -> consumedMessages.size() >= MESSAGE_COUNT);
consumer.shutdown();
List<String> retryMsgList = new CopyOnWriteArrayList<>();
DefaultMQPushConsumer pushConsumer = createPushConsumer(CONSUME_GROUP);
pushConsumer.subscribe(retryTopic, "*");
pushConsumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
pushConsumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt msg : msgs) {
retryMsgList.add(new String(msg.getBody()));
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
pushConsumer.start();
AtomicInteger failCnt = new AtomicInteger(0);
await().atMost(Duration.ofMinutes(3)).pollInterval(Duration.ofSeconds(10)).until(() -> {
if (retryMsgList.size() < MESSAGE_COUNT) {
return false;
}
for (String msgBodyString : retryMsgList) {
if (!sendToIsolateMsgSet.contains(msgBodyString)) {
return false;
}
}
return true;
});
cancelIsolatedBroker(master1With3Replicas);
awaitUntilSlaveOK();
pushConsumer.shutdown();
}
@Test
public void testRemoteActing_ackSlave() throws Exception {
String topic = PopSlaveActingMasterIT.class.getSimpleName() + random.nextInt(65535);
createTopic(topic);
String retryTopic = KeyBuilder.buildPopRetryTopic(topic, CONSUME_GROUP, brokerConfig.isEnableRetryTopicV2());
createTopic(retryTopic);
switchPop(topic);
producer.getDefaultMQProducerImpl().getmQClientFactory().updateTopicRouteInfoFromNameServer(topic);
MessageQueue messageQueue = new MessageQueue(topic, master1With3Replicas.getBrokerConfig().getBrokerName(), 0);
int sendSuccess = 0;
for (int i = 0; i < MESSAGE_COUNT; i++) {
Message msg = new Message(topic, MESSAGE_BODY);
SendResult sendResult = producer.send(msg, messageQueue);
if (sendResult.getSendStatus() == SendStatus.SEND_OK) {
sendSuccess++;
}
}
final int finalSendSuccess = sendSuccess;
await().atMost(Duration.ofMinutes(1)).until(() -> finalSendSuccess >= MESSAGE_COUNT);
isolateBroker(master1With3Replicas);
isolateBroker(master2With3Replicas);
brokerContainer2.removeBroker(new BrokerIdentity(
master2With3Replicas.getBrokerConfig().getBrokerClusterName(),
master2With3Replicas.getBrokerConfig().getBrokerName(),
master2With3Replicas.getBrokerConfig().getBrokerId()));
DefaultMQPushConsumer consumer = createPushConsumer(CONSUME_GROUP);
consumer.subscribe(topic, "*");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
List<String> consumedMessages = new CopyOnWriteArrayList<>();
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
msgs.forEach(msg -> {
consumedMessages.add(msg.getMsgId());
});
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
consumer.setClientRebalance(false);
consumer.start();
await().atMost(Duration.ofMinutes(2)).until(() -> consumedMessages.size() >= MESSAGE_COUNT);
consumer.shutdown();
List<String> retryMsgList = new CopyOnWriteArrayList<>();
DefaultMQPushConsumer pushConsumer = createPushConsumer(CONSUME_GROUP);
pushConsumer.subscribe(retryTopic, "*");
pushConsumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt msg : msgs) {
retryMsgList.add(new String(msg.getBody()));
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
pushConsumer.start();
Thread.sleep(10000);
assertThat(retryMsgList.size()).isEqualTo(0);
cancelIsolatedBroker(master1With3Replicas);
//Add back master
master2With3Replicas = brokerContainer2.addBroker(buildConfigContext(master2With3Replicas.getBrokerConfig(), master2With3Replicas.getMessageStoreConfig()));
master2With3Replicas.start();
cancelIsolatedBroker(master2With3Replicas);
awaitUntilSlaveOK();
Thread.sleep(10000);
assertThat(retryMsgList.size()).isEqualTo(0);
pushConsumer.shutdown();
}
@Test
public void testRemoteActing_notAckSlave_getFromLocal() throws Exception {
String topic = PopSlaveActingMasterIT.class.getSimpleName() + random.nextInt(65535);
createTopic(topic);
this.switchPop(topic);
String retryTopic = KeyBuilder.buildPopRetryTopic(topic, CONSUME_GROUP, brokerConfig.isEnableRetryTopicV2());
createTopic(retryTopic);
producer.getDefaultMQProducerImpl().getmQClientFactory().updateTopicRouteInfoFromNameServer(topic);
Set<String> sendToIsolateMsgSet = new HashSet<>();
MessageQueue messageQueue = new MessageQueue(topic, master1With3Replicas.getBrokerConfig().getBrokerName(), 0);
int sendSuccess = 0;
for (int i = 0; i < MESSAGE_COUNT; i++) {
Message msg = new Message(topic, MESSAGE_BODY);
SendResult sendResult = producer.send(msg, messageQueue);
if (sendResult.getSendStatus() == SendStatus.SEND_OK) {
sendToIsolateMsgSet.add(new String(msg.getBody()));
sendSuccess++;
}
}
final int finalSendSuccess = sendSuccess;
await().atMost(Duration.ofMinutes(1)).until(() -> finalSendSuccess >= MESSAGE_COUNT);
isolateBroker(master1With3Replicas);
isolateBroker(master2With3Replicas);
brokerContainer2.removeBroker(new BrokerIdentity(
master2With3Replicas.getBrokerConfig().getBrokerClusterName(),
master2With3Replicas.getBrokerConfig().getBrokerName(),
master2With3Replicas.getBrokerConfig().getBrokerId()));
DefaultMQPushConsumer consumer = createPushConsumer(CONSUME_GROUP);
consumer.subscribe(topic, "*");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
List<String> consumedMessages = new CopyOnWriteArrayList<>();
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
msgs.forEach(msg -> {
consumedMessages.add(msg.getMsgId());
});
return ConsumeConcurrentlyStatus.RECONSUME_LATER;
});
consumer.setClientRebalance(false);
consumer.start();
await().atMost(Duration.ofMinutes(3)).until(() -> consumedMessages.size() >= MESSAGE_COUNT);
consumer.shutdown();
List<String> retryMsgList = new CopyOnWriteArrayList<>();
DefaultMQPushConsumer pushConsumer = createPushConsumer(CONSUME_GROUP);
pushConsumer.subscribe(retryTopic, "*");
pushConsumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt msg : msgs) {
retryMsgList.add(new String(msg.getBody()));
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
pushConsumer.start();
await().atMost(Duration.ofMinutes(1)).until(() -> {
if (retryMsgList.size() < MESSAGE_COUNT) {
return false;
}
for (String msgBodyString : retryMsgList) {
if (!sendToIsolateMsgSet.contains(msgBodyString)) {
return false;
}
}
return true;
});
cancelIsolatedBroker(master1With3Replicas);
//Add back master
master2With3Replicas = brokerContainer2.addBroker(buildConfigContext(master2With3Replicas.getBrokerConfig(), master2With3Replicas.getMessageStoreConfig()));
master2With3Replicas.start();
cancelIsolatedBroker(master2With3Replicas);
awaitUntilSlaveOK();
pushConsumer.shutdown();
}
@Test
public void testRemoteActing_notAckSlave_getFromRemote() throws Exception {
String topic = PopSlaveActingMasterIT.class.getSimpleName() + random.nextInt(65535);
createTopic(topic);
this.switchPop(topic);
String retryTopic = KeyBuilder.buildPopRetryTopic(topic, CONSUME_GROUP, brokerConfig.isEnableRetryTopicV2());
createTopic(retryTopic);
producer.getDefaultMQProducerImpl().getmQClientFactory().updateTopicRouteInfoFromNameServer(topic);
Set<String> sendToIsolateMsgSet = new HashSet<>();
MessageQueue messageQueue = new MessageQueue(topic, master1With3Replicas.getBrokerConfig().getBrokerName(), 0);
int sendSuccess = 0;
for (int i = 0; i < MESSAGE_COUNT; i++) {
Message msg = new Message(topic, MESSAGE_BODY);
SendResult sendResult = producer.send(msg, messageQueue);
if (sendResult.getSendStatus() == SendStatus.SEND_OK) {
sendToIsolateMsgSet.add(new String(msg.getBody()));
sendSuccess++;
}
}
final int finalSendSuccess = sendSuccess;
await().atMost(Duration.ofMinutes(1)).until(() -> finalSendSuccess >= MESSAGE_COUNT);
isolateBroker(master1With3Replicas);
isolateBroker(master2With3Replicas);
brokerContainer2.removeBroker(new BrokerIdentity(
master2With3Replicas.getBrokerConfig().getBrokerClusterName(),
master2With3Replicas.getBrokerConfig().getBrokerName(),
master2With3Replicas.getBrokerConfig().getBrokerId()));
BrokerController slave1InBrokerContainer3 = getSlaveFromContainerByName(brokerContainer3, master1With3Replicas.getBrokerConfig().getBrokerName());
isolateBroker(slave1InBrokerContainer3);
brokerContainer3.removeBroker(new BrokerIdentity(
slave1InBrokerContainer3.getBrokerConfig().getBrokerClusterName(),
slave1InBrokerContainer3.getBrokerConfig().getBrokerName(),
slave1InBrokerContainer3.getBrokerConfig().getBrokerId()));
DefaultMQPushConsumer consumer = createPushConsumer(CONSUME_GROUP);
consumer.subscribe(topic, "*");
consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_FIRST_OFFSET);
List<String> consumedMessages = new CopyOnWriteArrayList<>();
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
msgs.forEach(msg -> {
consumedMessages.add(msg.getMsgId());
});
return ConsumeConcurrentlyStatus.RECONSUME_LATER;
});
consumer.setClientRebalance(false);
consumer.start();
await().atMost(Duration.ofMinutes(1)).until(() -> consumedMessages.size() >= MESSAGE_COUNT);
consumer.shutdown();
List<String> retryMsgList = new CopyOnWriteArrayList<>();
DefaultMQPushConsumer pushConsumer = createPushConsumer(CONSUME_GROUP);
pushConsumer.subscribe(retryTopic, "*");
pushConsumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> {
for (MessageExt msg : msgs) {
retryMsgList.add(new String(msg.getBody()));
}
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
pushConsumer.start();
Thread.sleep(10000);
await().atMost(Duration.ofMinutes(1)).until(() -> {
if (retryMsgList.size() < MESSAGE_COUNT) {
return false;
}
for (String msgBodyString : retryMsgList) {
if (!sendToIsolateMsgSet.contains(msgBodyString)) {
return false;
}
}
return true;
});
cancelIsolatedBroker(master1With3Replicas);
//Add back master
master2With3Replicas = brokerContainer2.addBroker(buildConfigContext(master2With3Replicas.getBrokerConfig(), master2With3Replicas.getMessageStoreConfig()));
master2With3Replicas.start();
cancelIsolatedBroker(master2With3Replicas);
//Add back slave1 to container3
slave1InBrokerContainer3 = brokerContainer3.addBroker(buildConfigContext(slave1InBrokerContainer3.getBrokerConfig(), slave1InBrokerContainer3.getMessageStoreConfig()));
slave1InBrokerContainer3.start();
cancelIsolatedBroker(slave1InBrokerContainer3);
awaitUntilSlaveOK();
pushConsumer.shutdown();
}
private void switchPop(String topic) throws Exception {
for (BrokerContainer brokerContainer : brokerContainerList) {
for (InnerBrokerController master : brokerContainer.getMasterBrokers()) {
String brokerAddr = master.getBrokerAddr();
defaultMQAdminExt.setMessageRequestMode(brokerAddr, topic, CONSUME_GROUP, MessageRequestMode.POP, 8, 60_000);
}
for (InnerSalveBrokerController slave : brokerContainer.getSlaveBrokers()) {
defaultMQAdminExt.setMessageRequestMode(slave.getBrokerAddr(), topic, CONSUME_GROUP, MessageRequestMode.POP, 8, 60_000);
}
}
}
}
| PopSlaveActingMasterIT |
java | apache__flink | flink-filesystems/flink-s3-fs-base/src/main/java/org/apache/flink/fs/s3/common/token/DynamicTemporaryAWSCredentialsProvider.java | {
"start": 1733,
"end": 2912
} | class ____ implements AWSCredentialsProvider {
public static final String NAME = DynamicTemporaryAWSCredentialsProvider.class.getName();
public static final String COMPONENT = "Dynamic session credentials for Flink";
private static final Logger LOG =
LoggerFactory.getLogger(DynamicTemporaryAWSCredentialsProvider.class);
public DynamicTemporaryAWSCredentialsProvider() {}
public DynamicTemporaryAWSCredentialsProvider(URI uri, Configuration conf) {}
@Override
public AWSCredentials getCredentials() throws SdkBaseException {
Credentials credentials = AbstractS3DelegationTokenReceiver.getCredentials();
if (credentials == null) {
throw new NoAwsCredentialsException(COMPONENT);
}
LOG.debug("Providing session credentials");
return new BasicSessionCredentials(
credentials.getAccessKeyId(),
credentials.getSecretAccessKey(),
credentials.getSessionToken());
}
@Override
public void refresh() {
// Intentionally blank. Credentials are updated by S3DelegationTokenReceiver
}
}
| DynamicTemporaryAWSCredentialsProvider |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RecordCreatorFactory.java | {
"start": 3499,
"end": 4021
} | class ____ implements RecordCreator<CNAMERecord, Name> {
/**
* Creates a CNAME record creator.
*/
public CNAMERecordCreator() {
}
/**
* Creates a DNS CNAME record.
*
* @param name the record name.
* @param target the record target/value.
* @return an A record.
*/
@Override public CNAMERecord create(Name name, Name target) {
return new CNAMERecord(name, DClass.IN, ttl, target);
}
}
/**
* A TXT Record creator.
*/
static | CNAMERecordCreator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/inlinedirtychecking/DirtyCheckPrivateUnMappedCollectionTest.java | {
"start": 2633,
"end": 3311
} | class ____ {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
private String name;
@Convert(converter = TagAttributeConverter.class)
private List<Tag> tags = new ArrayList<>();
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
public void addTag(Tag tag) {
this.tags.add( tag );
}
}
@Entity(name = "Measurement")
public static | AbstractMeasurement |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/ProactiveAuthHttpPolicyCustomForbiddenExHandlerTest.java | {
"start": 2593,
"end": 3302
} | class ____ {
public static final String CUSTOM_FORBIDDEN_EXCEPTION_HANDLER = CustomForbiddenFailureHandler.class.getName();
public void init(@Observes Router router) {
router.route().failureHandler(new Handler<RoutingContext>() {
@Override
public void handle(RoutingContext event) {
if (event.failure() instanceof ForbiddenException) {
event.response().setStatusCode(FORBIDDEN.getStatusCode()).end(CUSTOM_FORBIDDEN_EXCEPTION_HANDLER);
} else {
event.next();
}
}
});
}
}
}
| CustomForbiddenFailureHandler |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java | {
"start": 2148,
"end": 22169
} | class ____ {
/** Pattern used for each entry. */
public static final String ENTRY_PATTERN = "(%s=%s)";
/** String to return when a source is null. */
@VisibleForTesting
public static final String NULL_SOURCE = "()";
private IOStatisticsBinding() {
}
/**
* Create IOStatistics from a storage statistics instance.
*
* This will be updated as the storage statistics change.
* @param storageStatistics source data.
* @return an IO statistics source.
*/
public static IOStatistics fromStorageStatistics(
StorageStatistics storageStatistics) {
DynamicIOStatisticsBuilder builder = dynamicIOStatistics();
Iterator<StorageStatistics.LongStatistic> it = storageStatistics
.getLongStatistics();
while (it.hasNext()) {
StorageStatistics.LongStatistic next = it.next();
builder.withLongFunctionCounter(next.getName(),
k -> storageStatistics.getLong(k));
}
return builder.build();
}
/**
* Create a builder for dynamic IO Statistics.
* @return a builder to be completed.
*/
public static DynamicIOStatisticsBuilder dynamicIOStatistics() {
return new DynamicIOStatisticsBuilder();
}
/**
* Get the shared instance of the immutable empty statistics
* object.
* @return an empty statistics object.
*/
public static IOStatistics emptyStatistics() {
return EmptyIOStatistics.getInstance();
}
/**
* Get the shared instance of the immutable empty statistics
* store.
* @return an empty statistics object.
*/
public static IOStatisticsStore emptyStatisticsStore() {
return EmptyIOStatisticsStore.getInstance();
}
/**
* Take an IOStatistics instance and wrap it in a source.
* @param statistics statistics.
* @return a source which will return the values
*/
public static IOStatisticsSource wrap(IOStatistics statistics) {
return new SourceWrappedStatistics(statistics);
}
/**
* Create a builder for an {@link IOStatisticsStore}.
*
* @return a builder instance.
*/
public static IOStatisticsStoreBuilder iostatisticsStore() {
return new IOStatisticsStoreBuilderImpl();
}
/**
* Convert an entry to the string format used in logging.
*
* @param entry entry to evaluate
* @param <E> entry type
* @return formatted string
*/
public static <E> String entryToString(
final Map.Entry<String, E> entry) {
return entryToString(entry.getKey(), entry.getValue());
}
/**
* Convert entry values to the string format used in logging.
*
* @param <E> type of values.
* @param name statistic name
* @param value stat value
* @return formatted string
*/
public static <E> String entryToString(
final String name, final E value) {
return String.format(
ENTRY_PATTERN,
name,
value);
}
/**
* Copy into the dest map all the source entries.
* The destination is cleared first.
* @param <E> entry type
* @param dest destination of the copy
* @param source source
* @param copyFn function to copy entries
* @return the destination.
*/
private static <E> Map<String, E> copyMap(
Map<String, E> dest,
Map<String, E> source,
Function<E, E> copyFn) {
// we have to clone the values so that they aren't
// bound to the original values
dest.clear();
source.forEach((key, current) ->
dest.put(key, copyFn.apply(current)));
return dest;
}
/**
* A passthrough copy operation suitable for immutable
* types, including numbers.
*
* @param <E> type of values.
* @param src source object
* @return the source object
*/
public static <E extends Serializable> E passthroughFn(E src) {
return src;
}
/**
* Take a snapshot of a supplied map, where the copy option simply
* uses the existing value.
*
* For this to be safe, the map must refer to immutable objects.
* @param source source map
* @param <E> type of values.
* @return a new map referencing the same values.
*/
public static <E extends Serializable> Map<String, E> snapshotMap(
Map<String, E> source) {
return snapshotMap(source,
IOStatisticsBinding::passthroughFn);
}
/**
* Take a snapshot of a supplied map, using the copy function
* to replicate the source values.
* @param source source map
* @param copyFn function to copy the value
* @param <E> type of values.
* @return a concurrent hash map referencing the same values.
*/
public static <E extends Serializable>
ConcurrentHashMap<String, E> snapshotMap(
Map<String, E> source,
Function<E, E> copyFn) {
ConcurrentHashMap<String, E> dest = new ConcurrentHashMap<>();
copyMap(dest, source, copyFn);
return dest;
}
/**
* Aggregate two maps so that the destination.
* @param <E> type of values
* @param dest destination map.
* @param other other map
* @param aggregateFn function to aggregate the values.
* @param copyFn function to copy the value
*/
public static <E> void aggregateMaps(
Map<String, E> dest,
Map<String, E> other,
BiFunction<E, E, E> aggregateFn,
Function<E, E> copyFn) {
// scan through the other hand map; copy
// any values not in the left map,
// aggregate those for which there is already
// an entry
other.entrySet().forEach(entry -> {
String key = entry.getKey();
E rVal = entry.getValue();
E lVal = dest.get(key);
if (lVal == null) {
dest.put(key, copyFn.apply(rVal));
} else {
dest.put(key, aggregateFn.apply(lVal, rVal));
}
});
}
/**
* Aggregate two counters.
* @param l left value
* @param r right value
* @return the aggregate value
*/
public static Long aggregateCounters(Long l, Long r) {
return Math.max(l, 0) + Math.max(r, 0);
}
/**
* Add two gauges.
* @param l left value
* @param r right value
* @return aggregate value
*/
public static Long aggregateGauges(Long l, Long r) {
return l + r;
}
/**
* Aggregate two minimum values.
* @param l left
* @param r right
* @return the new minimum.
*/
public static Long aggregateMinimums(Long l, Long r) {
if (l == MIN_UNSET_VALUE) {
return r;
} else if (r == MIN_UNSET_VALUE) {
return l;
} else {
return Math.min(l, r);
}
}
/**
* Aggregate two maximum values.
* @param l left
* @param r right
* @return the new minimum.
*/
public static Long aggregateMaximums(Long l, Long r) {
if (l == MIN_UNSET_VALUE) {
return r;
} else if (r == MIN_UNSET_VALUE) {
return l;
} else {
return Math.max(l, r);
}
}
/**
* Aggregate the mean statistics.
* This returns a new instance.
* @param l left value
* @param r right value
* @return aggregate value
*/
public static MeanStatistic aggregateMeanStatistics(
MeanStatistic l, MeanStatistic r) {
MeanStatistic res = l.copy();
res.add(r);
return res;
}
/**
* Update a maximum value tracked in an atomic long.
* This is thread safe -it uses compareAndSet to ensure
* that Thread T1 whose sample is greater than the current
* value never overwrites an update from thread T2 whose
* sample was also higher -and which completed first.
* @param dest destination for all changes.
* @param sample sample to update.
*/
public static void maybeUpdateMaximum(AtomicLong dest, long sample) {
boolean done;
do {
long current = dest.get();
if (sample > current) {
done = dest.compareAndSet(current, sample);
} else {
done = true;
}
} while (!done);
}
/**
* Update a maximum value tracked in an atomic long.
* This is thread safe -it uses compareAndSet to ensure
* that Thread T1 whose sample is greater than the current
* value never overwrites an update from thread T2 whose
* sample was also higher -and which completed first.
* @param dest destination for all changes.
* @param sample sample to update.
*/
public static void maybeUpdateMinimum(AtomicLong dest, long sample) {
boolean done;
do {
long current = dest.get();
if (current == MIN_UNSET_VALUE || sample < current) {
done = dest.compareAndSet(current, sample);
} else {
done = true;
}
} while (!done);
}
/**
* Given an IOException raising function/lambda expression,
* return a new one which wraps the inner and tracks
* the duration of the operation, including whether
* it passes/fails.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param inputFn input function
* @param <A> type of argument to the input function.
* @param <B> return type.
* @return a new function which tracks duration and failure.
*/
public static <A, B> FunctionRaisingIOE<A, B> trackFunctionDuration(
@Nullable DurationTrackerFactory factory,
String statistic,
FunctionRaisingIOE<A, B> inputFn) {
return (x) -> {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
return inputFn.apply(x);
} catch (IOException | RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after the catch() call would have
// set the failed flag.
tracker.close();
}
};
}
/**
* Given a java function/lambda expression,
* return a new one which wraps the inner and tracks
* the duration of the operation, including whether
* it passes/fails.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param inputFn input function
* @param <A> type of argument to the input function.
* @param <B> return type.
* @return a new function which tracks duration and failure.
*/
public static <A, B> Function<A, B> trackJavaFunctionDuration(
@Nullable DurationTrackerFactory factory,
String statistic,
Function<A, B> inputFn) {
return (x) -> {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
return inputFn.apply(x);
} catch (RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after the catch() call would have
// set the failed flag.
tracker.close();
}
};
}
/**
* Given an IOException raising callable/lambda expression,
* execute it and update the relevant statistic.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @param <B> return type.
* @return the result of the operation.
* @throws IOException raised on errors performing I/O.
*/
public static <B> B trackDuration(
DurationTrackerFactory factory,
String statistic,
CallableRaisingIOE<B> input) throws IOException {
return trackDurationOfOperation(factory, statistic, input).apply();
}
/**
* Given an IOException raising callable/lambda expression,
* execute it and update the relevant statistic.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @throws IOException IO failure.
*/
public static void trackDurationOfInvocation(
DurationTrackerFactory factory,
String statistic,
InvocationRaisingIOE input) throws IOException {
measureDurationOfInvocation(factory, statistic, input);
}
/**
* Given an IOException raising callable/lambda expression,
* execute it and update the relevant statistic,
* returning the measured duration.
*
* {@link #trackDurationOfInvocation(DurationTrackerFactory, String, InvocationRaisingIOE)}
* with the duration returned for logging etc.; added as a new
* method to avoid linking problems with any code calling the existing
* method.
*
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @return the duration of the operation, as measured by the duration tracker.
* @throws IOException IO failure.
*/
public static Duration measureDurationOfInvocation(
DurationTrackerFactory factory,
String statistic,
InvocationRaisingIOE input) throws IOException {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
input.apply();
} catch (IOException | RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after the catch() call would have
// set the failed flag.
tracker.close();
}
return tracker.asDuration();
}
/**
* Given an IOException raising callable/lambda expression,
* return a new one which wraps the inner and tracks
* the duration of the operation, including whether
* it passes/fails.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @param <B> return type.
* @return a new callable which tracks duration and failure.
*/
public static <B> CallableRaisingIOE<B> trackDurationOfOperation(
@Nullable DurationTrackerFactory factory,
String statistic,
CallableRaisingIOE<B> input) {
return () -> {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
return invokeTrackingDuration(tracker, input);
};
}
/**
* Given an IOException raising callable/lambda expression,
* execute it, updating the tracker on success/failure.
* @param tracker duration tracker.
* @param input input callable.
* @param <B> return type.
* @return the result of the invocation
* @throws IOException on failure.
*/
public static <B> B invokeTrackingDuration(
final DurationTracker tracker,
final CallableRaisingIOE<B> input)
throws IOException {
try {
// exec the input function and return its value
return input.apply();
} catch (IOException | RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after the catch() call would have
// set the failed flag.
tracker.close();
}
}
/**
* Given an IOException raising Consumer,
* return a new one which wraps the inner and tracks
* the duration of the operation, including whether
* it passes/fails.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @param <B> return type.
* @return a new consumer which tracks duration and failure.
*/
public static <B> ConsumerRaisingIOE<B> trackDurationConsumer(
@Nullable DurationTrackerFactory factory,
String statistic,
ConsumerRaisingIOE<B> input) {
return (B t) -> {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
input.accept(t);
} catch (IOException | RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after the catch() call would have
// set the failed flag.
tracker.close();
}
};
}
/**
* Given a callable/lambda expression,
* return a new one which wraps the inner and tracks
* the duration of the operation, including whether
* it passes/fails.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @param <B> return type.
* @return a new callable which tracks duration and failure.
*/
public static <B> Callable<B> trackDurationOfCallable(
@Nullable DurationTrackerFactory factory,
String statistic,
Callable<B> input) {
return () -> {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
return input.call();
} catch (RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after any catch() call will have
// set the failed flag.
tracker.close();
}
};
}
/**
* Given a Java supplier, evaluate it while
* tracking the duration of the operation and success/failure.
* @param factory factory of duration trackers
* @param statistic statistic key
* @param input input callable.
* @param <B> return type.
* @return the output of the supplier.
*/
public static <B> B trackDurationOfSupplier(
@Nullable DurationTrackerFactory factory,
String statistic,
Supplier<B> input) {
// create the tracker outside try-with-resources so
// that failures can be set in the catcher.
DurationTracker tracker = createTracker(factory, statistic);
try {
// exec the input function and return its value
return input.get();
} catch (RuntimeException e) {
// input function failed: note it
tracker.failed();
// and rethrow
throw e;
} finally {
// update the tracker.
// this is called after any catch() call will have
// set the failed flag.
tracker.close();
}
}
/**
* Create the tracker. If the factory is null, a stub
* tracker is returned.
* @param factory tracker factory
* @param statistic statistic to track
* @return a duration tracker.
*/
public static DurationTracker createTracker(
@Nullable final DurationTrackerFactory factory,
final String statistic) {
return factory != null
? factory.trackDuration(statistic)
: STUB_DURATION_TRACKER;
}
/**
* Create a DurationTrackerFactory which aggregates the tracking
* of two other factories.
* @param first first tracker factory
* @param second second tracker factory
* @return a factory
*/
public static DurationTrackerFactory pairedTrackerFactory(
final DurationTrackerFactory first,
final DurationTrackerFactory second) {
return new PairedDurationTrackerFactory(first, second);
}
/**
* Publish the IOStatistics as a set of storage statistics.
* This is dynamic.
* @param name storage statistics name.
* @param scheme FS scheme; may be null.
* @param source IOStatistics source.
* @return a dynamic storage statistics object.
*/
public static StorageStatistics publishAsStorageStatistics(
String name, String scheme, IOStatistics source) {
return new StorageStatisticsFromIOStatistics(name, scheme, source);
}
}
| IOStatisticsBinding |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/notify/NotifyCenter.java | {
"start": 12652,
"end": 13517
} | class ____ type of the event type.
* @param factory publisher factory.
* @param queueMaxSize the publisher's queue max size.
*/
public static EventPublisher registerToPublisher(final Class<? extends Event> eventType,
final EventPublisherFactory factory, final int queueMaxSize) {
if (ClassUtils.isAssignableFrom(SlowEvent.class, eventType)) {
return INSTANCE.sharePublisher;
}
final String topic = ClassUtils.getCanonicalName(eventType);
synchronized (NotifyCenter.class) {
// MapUtils.computeIfAbsent is a unsafe method.
MapUtil.computeIfAbsent(INSTANCE.publisherMap, topic, factory, eventType, queueMaxSize);
}
return INSTANCE.publisherMap.get(topic);
}
/**
* Register publisher.
*
* @param eventType | Instances |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/PreemptionContractPBImpl.java | {
"start": 1707,
"end": 7273
} | class ____ extends PreemptionContract {
PreemptionContractProto proto = PreemptionContractProto.getDefaultInstance();
PreemptionContractProto.Builder builder = null;
boolean viaProto = false;
private Set<PreemptionContainer> containers;
private List<PreemptionResourceRequest> resources;
public PreemptionContractPBImpl() {
builder = PreemptionContractProto.newBuilder();
}
public PreemptionContractPBImpl(PreemptionContractProto proto) {
this.proto = proto;
viaProto = true;
}
public synchronized PreemptionContractProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void mergeLocalToBuilder() {
if (this.resources != null) {
addResourcesToProto();
}
if (this.containers != null) {
addContainersToProto();
}
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = PreemptionContractProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public synchronized Set<PreemptionContainer> getContainers() {
initPreemptionContainers();
return containers;
}
@Override
public synchronized void setContainers(
final Set<PreemptionContainer> containers) {
if (null == containers) {
builder.clearContainer();
}
this.containers = containers;
}
@Override
public synchronized List<PreemptionResourceRequest> getResourceRequest() {
initPreemptionResourceRequests();
return resources;
}
@Override
public synchronized void setResourceRequest(
final List<PreemptionResourceRequest> req) {
if (null == resources) {
builder.clearResource();
}
this.resources = req;
}
private void initPreemptionResourceRequests() {
if (resources != null) {
return;
}
PreemptionContractProtoOrBuilder p = viaProto ? proto : builder;
List<PreemptionResourceRequestProto> list = p.getResourceList();
resources = new ArrayList<PreemptionResourceRequest>();
for (PreemptionResourceRequestProto rr : list) {
resources.add(convertFromProtoFormat(rr));
}
}
private void addResourcesToProto() {
maybeInitBuilder();
builder.clearResource();
if (null == resources) {
return;
}
Iterable<PreemptionResourceRequestProto> iterable =
new Iterable<PreemptionResourceRequestProto>() {
@Override
public Iterator<PreemptionResourceRequestProto> iterator() {
return new Iterator<PreemptionResourceRequestProto>() {
Iterator<PreemptionResourceRequest> iter = resources.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public PreemptionResourceRequestProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllResource(iterable);
}
private void initPreemptionContainers() {
if (containers != null) {
return;
}
PreemptionContractProtoOrBuilder p = viaProto ? proto : builder;
List<PreemptionContainerProto> list = p.getContainerList();
containers = new HashSet<PreemptionContainer>();
for (PreemptionContainerProto c : list) {
containers.add(convertFromProtoFormat(c));
}
}
private void addContainersToProto() {
maybeInitBuilder();
builder.clearContainer();
if (null == containers) {
return;
}
Iterable<PreemptionContainerProto> iterable =
new Iterable<PreemptionContainerProto>() {
@Override
public Iterator<PreemptionContainerProto> iterator() {
return new Iterator<PreemptionContainerProto>() {
Iterator<PreemptionContainer> iter = containers.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public PreemptionContainerProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllContainer(iterable);
}
private PreemptionContainerPBImpl convertFromProtoFormat(PreemptionContainerProto p) {
return new PreemptionContainerPBImpl(p);
}
private PreemptionContainerProto convertToProtoFormat(PreemptionContainer t) {
return ((PreemptionContainerPBImpl)t).getProto();
}
private PreemptionResourceRequestPBImpl convertFromProtoFormat(PreemptionResourceRequestProto p) {
return new PreemptionResourceRequestPBImpl(p);
}
private PreemptionResourceRequestProto convertToProtoFormat(PreemptionResourceRequest t) {
return ((PreemptionResourceRequestPBImpl)t).getProto();
}
}
| PreemptionContractPBImpl |
java | elastic__elasticsearch | x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java | {
"start": 34321,
"end": 46472
} | class ____ {
private final Map<IndexMetadata, Long> additionalIndices;
private final DataStream updatedDataStream;
private SingleForecast(Map<IndexMetadata, Long> additionalIndices, DataStream updatedDataStream) {
this.additionalIndices = additionalIndices;
this.updatedDataStream = updatedDataStream;
}
public void applyRouting(RoutingTable.Builder routing) {
additionalIndices.keySet().forEach(indexMetadata -> routing.addAsNew(indexMetadata));
}
public void applyMetadata(Metadata.Builder metadataBuilder) {
@FixForMultiProject
final ProjectId projectId = ProjectId.DEFAULT;
ProjectMetadata.Builder projectBuilder = metadataBuilder.getProject(projectId);
if (projectBuilder == null) {
projectBuilder = ProjectMetadata.builder(projectId);
metadataBuilder.put(projectBuilder);
}
applyProjectMetadata(projectBuilder);
}
public void applyProjectMetadata(ProjectMetadata.Builder projectBuilder) {
additionalIndices.keySet().forEach(imd -> projectBuilder.put(imd, false));
projectBuilder.put(updatedDataStream);
}
public void applySize(Map<String, Long> builder, RoutingTable updatedRoutingTable) {
for (Map.Entry<IndexMetadata, Long> entry : additionalIndices.entrySet()) {
List<ShardRouting> shardRoutings = updatedRoutingTable.allShards(entry.getKey().getIndex().getName());
long size = entry.getValue() / shardRoutings.size();
shardRoutings.forEach(s -> builder.put(ClusterInfo.shardIdentifierFromRouting(s), size));
}
}
}
public AllocationState forecast(long forecastWindow, long now) {
if (forecastWindow == 0) {
return this;
}
// for now we only look at data-streams. We might want to also detect alias based time-based indices.
DataStreamMetadata dataStreamMetadata = state.metadata().getProject().custom(DataStreamMetadata.TYPE);
if (dataStreamMetadata == null) {
return this;
}
List<SingleForecast> singleForecasts = dataStreamMetadata.dataStreams()
.keySet()
.stream()
.map(state.metadata().getProject().getIndicesLookup()::get)
.map(DataStream.class::cast)
.map(ds -> forecast(state.metadata(), ds, forecastWindow, now))
.filter(Objects::nonNull)
.toList();
if (singleForecasts.isEmpty()) {
return this;
}
Metadata.Builder metadataBuilder = Metadata.builder(state.metadata());
RoutingTable.Builder routingTableBuilder = RoutingTable.builder(shardRoutingRoleStrategy, state.routingTable());
Map<String, Long> sizeBuilder = new HashMap<>();
singleForecasts.forEach(p -> p.applyMetadata(metadataBuilder));
singleForecasts.forEach(p -> p.applyRouting(routingTableBuilder));
RoutingTable routingTable = routingTableBuilder.build();
singleForecasts.forEach(p -> p.applySize(sizeBuilder, routingTable));
ClusterState forecastClusterState = ClusterState.builder(state).metadata(metadataBuilder).routingTable(routingTable).build();
ClusterInfo forecastInfo = new ExtendedClusterInfo(Collections.unmodifiableMap(sizeBuilder), AllocationState.this.info);
return new AllocationState(
forecastClusterState,
allocationDeciders,
shardRoutingRoleStrategy,
diskThresholdSettings,
forecastInfo,
shardSizeInfo,
nodes,
roles
);
}
private SingleForecast forecast(Metadata metadata, DataStream stream, long forecastWindow, long now) {
List<Index> indices = stream.getIndices();
if (dataStreamAllocatedToNodes(metadata, indices) == false) return null;
long minCreationDate = Long.MAX_VALUE;
long totalSize = 0;
int count = 0;
while (count < indices.size()) {
++count;
IndexMetadata indexMetadata = metadata.getProject().index(indices.get(indices.size() - count));
long creationDate = indexMetadata.getCreationDate();
if (creationDate < 0) {
return null;
}
minCreationDate = Math.min(minCreationDate, creationDate);
totalSize += state.getRoutingTable().allShards(indexMetadata.getIndex().getName()).stream().mapToLong(this::sizeOf).sum();
// we terminate loop after collecting data to ensure we consider at least the forecast window (and likely some more).
if (creationDate <= now - forecastWindow) {
break;
}
}
if (totalSize == 0) {
return null;
}
// round up
long avgSizeCeil = (totalSize - 1) / count + 1;
long actualWindow = now - minCreationDate;
if (actualWindow == 0) {
return null;
}
// rather than simulate rollover, we copy the index meta data and do minimal adjustments.
long scaledTotalSize;
int numberNewIndices;
if (actualWindow > forecastWindow) {
scaledTotalSize = BigInteger.valueOf(totalSize)
.multiply(BigInteger.valueOf(forecastWindow))
.divide(BigInteger.valueOf(actualWindow))
.longValueExact();
// round up
numberNewIndices = (int) Math.min((scaledTotalSize - 1) / avgSizeCeil + 1, indices.size());
if (scaledTotalSize == 0) {
return null;
}
} else {
numberNewIndices = count;
scaledTotalSize = totalSize;
}
IndexMetadata writeIndex = metadata.getProject().index(stream.getWriteIndex());
Map<IndexMetadata, Long> newIndices = new HashMap<>();
for (int i = 0; i < numberNewIndices; ++i) {
final String uuid = UUIDs.randomBase64UUID();
final Tuple<String, Long> rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration(
state.metadata().getProject(),
stream.getDataComponent()
);
stream = stream.unsafeRollover(
new Index(rolledDataStreamInfo.v1(), uuid),
rolledDataStreamInfo.v2(),
null,
stream.getAutoShardingEvent()
);
// this unintentionally copies the in-sync allocation ids too. This has the fortunate effect of these indices
// not being regarded new by the disk threshold decider, thereby respecting the low watermark threshold even for primaries.
// This is highly desirable so fixing this to clear the in-sync allocation ids will require a more elaborate solution,
// ensuring at least that when replicas are involved, we still respect the low watermark. This is therefore left as is
// for now with the intention to fix in a follow-up.
IndexMetadata newIndex = IndexMetadata.builder(writeIndex)
.index(stream.getWriteIndex().getName())
.settings(Settings.builder().put(writeIndex.getSettings()).put(IndexMetadata.SETTING_INDEX_UUID, uuid))
.build();
long size = Math.min(avgSizeCeil, scaledTotalSize - (avgSizeCeil * i));
assert size > 0;
newIndices.put(newIndex, size);
}
return new SingleForecast(newIndices, stream);
}
/**
* Check that at least one shard is on the set of nodes. If they are all unallocated, we do not want to make any prediction to not
* hit the wrong policy.
* @param indices the indices of the data stream, in original order from data stream meta.
* @return true if the first allocated index is allocated only to the set of nodes.
*/
private boolean dataStreamAllocatedToNodes(Metadata metadata, List<Index> indices) {
for (int i = 0; i < indices.size(); ++i) {
IndexMetadata indexMetadata = metadata.getProject().index(indices.get(indices.size() - i - 1));
Set<Boolean> inNodes = state.getRoutingTable()
.allShards(indexMetadata.getIndex().getName())
.stream()
.map(ShardRouting::currentNodeId)
.filter(Objects::nonNull)
.map(nodeIds::contains)
.collect(Collectors.toSet());
if (inNodes.contains(false)) {
return false;
}
if (inNodes.contains(true)) {
return true;
}
}
return false;
}
// for tests
ClusterState state() {
return state;
}
ClusterInfo info() {
return info;
}
private static ClusterState removeNodeLockFilters(ClusterState state) {
ClusterState.Builder builder = ClusterState.builder(state);
builder.metadata(removeNodeLockFilters(state.metadata()));
return builder.build();
}
private static Metadata removeNodeLockFilters(Metadata metadata) {
@FixForMultiProject
final ProjectMetadata updatedProject = removeNodeLockFilters(metadata.getProject());
return Metadata.builder(metadata).put(updatedProject).build();
}
private static ProjectMetadata removeNodeLockFilters(ProjectMetadata project) {
ProjectMetadata.Builder builder = ProjectMetadata.builder(project);
project.stream()
.filter(AllocationState::isNodeLocked)
.map(AllocationState::removeNodeLockFilters)
.forEach(imd -> builder.put(imd, false));
return builder.build();
}
private static IndexMetadata removeNodeLockFilters(IndexMetadata indexMetadata) {
Settings settings = indexMetadata.getSettings();
settings = removeNodeLockFilters(settings, REMOVE_NODE_LOCKED_FILTER_INITIAL, indexMetadata.getInitialRecoveryFilters());
settings = removeNodeLockFilters(settings, REMOVE_NODE_LOCKED_FILTER_REQUIRE, indexMetadata.requireFilters());
settings = removeNodeLockFilters(settings, REMOVE_NODE_LOCKED_FILTER_INCLUDE, indexMetadata.includeFilters());
return IndexMetadata.builder(indexMetadata).settings(settings).build();
}
private static Settings removeNodeLockFilters(Settings settings, Predicate<String> predicate, DiscoveryNodeFilters filters) {
// only filter if it is a single node filter - otherwise removing it risks narrowing legal nodes for OR filters.
if (filters != null && filters.isSingleNodeFilter()) {
return settings.filter(predicate);
} else {
return settings;
}
}
private static boolean isNodeLocked(IndexMetadata indexMetadata) {
return isNodeLocked(indexMetadata.requireFilters())
|| isNodeLocked(indexMetadata.includeFilters())
|| isNodeLocked(indexMetadata.getInitialRecoveryFilters());
}
private static boolean isNodeLocked(DiscoveryNodeFilters filters) {
return filters != null && filters.isSingleNodeFilter();
}
private static | SingleForecast |
java | apache__camel | core/camel-support/src/main/java/org/apache/camel/support/ExpressionAdapter.java | {
"start": 1294,
"end": 2190
} | class ____ extends ExpressionSupport {
private TypeConverter converter;
@Override
public void init(CamelContext context) {
super.init(context);
this.converter = context.getTypeConverter();
}
@Override
protected String assertionFailureMessage(Exchange exchange) {
return toString();
}
@Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
Object value = evaluate(exchange);
if (Object.class == type) {
// do not use type converter if type is Object (optimize)
return (T) value;
}
if (converter != null) {
// optimized to use converter from init
return converter.convertTo(type, exchange, value);
} else {
return exchange.getContext().getTypeConverter().convertTo(type, exchange, value);
}
}
}
| ExpressionAdapter |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/extends_with_constructor/Teacher.java | {
"start": 744,
"end": 1232
} | class ____ {
private int id;
private String name;
private List<StudentConstructor> students;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<StudentConstructor> getStudents() {
return students;
}
public void setStudents(List<StudentConstructor> students) {
this.students = students;
}
}
| Teacher |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ReflectionUtils.java | {
"start": 57991,
"end": 59568
} | interface ____ which to find the methods; never {@code null}
* @param predicate the method filter; never {@code null}
* @return an immutable list of all such methods found; never {@code null}
* @see HierarchyTraversalMode#TOP_DOWN
* @see #findMethods(Class, Predicate, HierarchyTraversalMode)
*/
public static List<Method> findMethods(Class<?> clazz, Predicate<Method> predicate) {
return findMethods(clazz, predicate, TOP_DOWN);
}
/**
* @see org.junit.platform.commons.support.ReflectionSupport#findMethods(Class, Predicate, org.junit.platform.commons.support.HierarchyTraversalMode)
*/
public static List<Method> findMethods(Class<?> clazz, Predicate<Method> predicate,
HierarchyTraversalMode traversalMode) {
return streamMethods(clazz, predicate, traversalMode).toList();
}
/**
* @since 1.10
* @see org.junit.platform.commons.support.ReflectionSupport#streamMethods(Class, Predicate, org.junit.platform.commons.support.HierarchyTraversalMode)
*/
@API(status = INTERNAL, since = "1.10")
public static Stream<Method> streamMethods(Class<?> clazz, Predicate<Method> predicate,
HierarchyTraversalMode traversalMode) {
Preconditions.notNull(clazz, "Class must not be null");
Preconditions.notNull(predicate, "Predicate must not be null");
Preconditions.notNull(traversalMode, "HierarchyTraversalMode must not be null");
// @formatter:off
return findAllMethodsInHierarchy(clazz, traversalMode).stream()
.filter(predicate)
.distinct();
// @formatter:on
}
/**
* Find all non-synthetic methods in the superclass and | in |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/AclInfo.java | {
"start": 2471,
"end": 4163
} | class ____ {
private String resource;
private List<String> actions;
private List<String> sourceIps;
private String decision;
public static PolicyEntryInfo of(String resource, List<String> actions, List<String> sourceIps,
String decision) {
PolicyEntryInfo policyEntryInfo = new PolicyEntryInfo();
policyEntryInfo.setResource(resource);
policyEntryInfo.setActions(actions);
policyEntryInfo.setSourceIps(sourceIps);
policyEntryInfo.setDecision(decision);
return policyEntryInfo;
}
public String getResource() {
return resource;
}
public void setResource(String resource) {
this.resource = resource;
}
public List<String> getActions() {
return actions;
}
public void setActions(List<String> actions) {
this.actions = actions;
}
public List<String> getSourceIps() {
return sourceIps;
}
public void setSourceIps(List<String> sourceIps) {
this.sourceIps = sourceIps;
}
public String getDecision() {
return decision;
}
public void setDecision(String decision) {
this.decision = decision;
}
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public List<PolicyInfo> getPolicies() {
return policies;
}
public void setPolicies(List<PolicyInfo> policies) {
this.policies = policies;
}
}
| PolicyEntryInfo |
java | quarkusio__quarkus | extensions/grpc/runtime/src/test/java/io/quarkus/grpc/runtime/ClientAndServerCallsTest.java | {
"start": 7894,
"end": 8679
} | class ____ {
FailingService service = new FailingService();
Uni<String> propagateFailure(String s) {
return ClientCalls.oneToOne(s, (i, o) -> ServerCalls.oneToOne(i, o, null, service::propagateFailure));
}
Uni<String> immediateFailure(String s) {
return ClientCalls.oneToOne(s, (i, o) -> ServerCalls.oneToOne(i, o, null, service::immediateFailure));
}
Uni<String> illegalArgumentException(String s) {
return ClientCalls.oneToOne(s, (i, o) -> ServerCalls.oneToOne(i, o, null, service::illegalArgumentException));
}
Uni<String> npe(String s) {
return ClientCalls.oneToOne(s, (i, o) -> ServerCalls.oneToOne(i, o, null, service::npe));
}
}
}
| FailingServiceClient |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/FormLoginConfigurerTests.java | {
"start": 24900,
"end": 25682
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().authenticated()
)
.formLogin((formLogin) -> formLogin
.loginProcessingUrl("/loginCheck")
.loginPage("/login")
.defaultSuccessUrl("/", true)
.permitAll()
)
.logout((logout) -> logout
.logoutSuccessUrl("/login")
.logoutUrl("/logout")
.deleteCookies("JSESSIONID")
);
// @formatter:on
return http.build();
}
@Bean
UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(PasswordEncodedUser.user());
}
}
@Configuration
@EnableWebSecurity
static | FormLoginLoginProcessingUrlInLambdaConfig |
java | quarkusio__quarkus | test-framework/junit5-internal/src/main/java/io/quarkus/test/ExportUtil.java | {
"start": 625,
"end": 3698
} | class ____ {
static final String APPLICATION_PROPERTIES = "application.properties";
private ExportUtil() {
}
static void exportToQuarkusDeploymentPath(JavaArchive archive) throws IOException {
String exportPath = System.getProperty("quarkus.deploymentExportPath");
if (exportPath == null) {
return;
}
File exportDir = new File(exportPath);
if (exportDir.exists()) {
if (!exportDir.isDirectory()) {
throw new IllegalStateException("Export path is not a directory: " + exportPath);
}
try (Stream<Path> stream = Files.walk(exportDir.toPath())) {
stream.sorted(Comparator.reverseOrder()).map(Path::toFile)
.forEach(File::delete);
}
} else if (!exportDir.mkdirs()) {
throw new IllegalStateException("Export path could not be created: " + exportPath);
}
File exportFile = new File(exportDir, archive.getName());
archive.as(ZipExporter.class).exportTo(exportFile);
}
static void mergeCustomApplicationProperties(JavaArchive archive, Properties customApplicationProperties)
throws IOException {
Node applicationProperties = archive.get(APPLICATION_PROPERTIES);
if (applicationProperties != null) {
// Merge the existing "application.properties" asset and overriden config properties
// Overriden properties take precedence
Properties mergedProperties = new Properties();
Asset asset = applicationProperties.getAsset();
if (asset instanceof StringAsset strAsset) {
mergedProperties.load(new StringReader(strAsset.getSource()));
} else {
try (InputStream in = asset.openStream()) {
mergedProperties.load(in);
}
}
customApplicationProperties.forEach(mergedProperties::put);
if (Boolean.parseBoolean(System.getProperty("quarkus.test.log-merged-properties"))) {
System.out.println("Merged config properties:\n"
+ mergedProperties.keySet().stream().map(Object::toString).collect(Collectors.joining("\n")));
} else {
System.out.println(
"NOTE: overrideConfigKey() and application.properties were merged; use quarkus.test.log-merged-properties=true to list the specific values");
}
deleteApplicationProperties(archive);
archive.add(new PropertiesAsset(mergedProperties), APPLICATION_PROPERTIES);
} else {
archive.add(new PropertiesAsset(customApplicationProperties), APPLICATION_PROPERTIES);
}
}
static void deleteApplicationProperties(JavaArchive archive) {
// MemoryMapArchiveBase#addAsset(ArchivePath,Asset) does not overwrite the existing node correctly
// https://github.com/shrinkwrap/shrinkwrap/issues/179
archive.delete(APPLICATION_PROPERTIES);
}
}
| ExportUtil |
java | google__guice | core/test/com/google/inject/spi/InjectorSpiTest.java | {
"start": 1526,
"end": 2320
} | class
____ = injector.getExistingBinding(Key.get(Foo.class));
assertNotNull(binding);
assertEquals(Foo.class, binding.getKey().getTypeLiteral().getRawType());
// 2) Provider<Foo> class (should already exist, because Baz @Injects it).
// the assertTrue is a bit stricter than necessary, but makes sure this works for pre-existing
// Provider bindings
assertTrue(injector.getAllBindings().containsKey(Key.get(new TypeLiteral<Provider<Foo>>() {})));
binding = injector.getExistingBinding(Key.get(new TypeLiteral<Provider<Foo>>() {}));
assertNotNull(binding);
assertEquals(Provider.class, binding.getKey().getTypeLiteral().getRawType());
assertEquals(Foo.class, ((Provider) binding.getProvider().get()).get().getClass());
// 3) non-Provider Baz. | binding |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/window/processors/AbstractSyncStateWindowAggProcessor.java | {
"start": 4032,
"end": 5034
} | class ____ implements Supplier<Boolean>, Serializable {
private static final long serialVersionUID = 1L;
private final int indexOfCountStar;
private WindowIsEmptySupplier(int indexOfCountStar, WindowAssigner assigner) {
if (assigner instanceof SliceAssigners.HoppingSliceAssigner) {
checkArgument(
indexOfCountStar >= 0,
"Hopping window requires a COUNT(*) in the aggregate functions.");
}
this.indexOfCountStar = indexOfCountStar;
}
@Override
public Boolean get() {
if (indexOfCountStar < 0) {
return false;
}
try {
RowData acc = aggregator.getAccumulators();
return acc == null || acc.getLong(indexOfCountStar) == 0;
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
}
}
}
| WindowIsEmptySupplier |
java | quarkusio__quarkus | extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/devmode/ConfigDescription.java | {
"start": 119,
"end": 3517
} | class ____ implements Comparable<ConfigDescription> {
private String name;
private String description;
private String defaultValue;
private ConfigValue configValue;
private boolean autoFromDevServices = false;
private String typeName;
private List<String> allowedValues;
private String configPhase;
private boolean wildcardEntry = false;
public ConfigDescription() {
}
public ConfigDescription(String name, boolean wildcardEntry) {
this.name = name;
this.wildcardEntry = wildcardEntry;
}
public ConfigDescription(final String name, final String description, final String defaultValue,
final boolean autoFromDevServices, String typeName, List<String> allowedValues, String configPhase) {
this.name = name;
this.description = description;
this.defaultValue = defaultValue;
this.autoFromDevServices = autoFromDevServices;
this.typeName = typeName;
this.allowedValues = allowedValues;
this.configPhase = configPhase;
}
public ConfigDescription(
final String name,
final String description,
final String defaultValue,
final ConfigValue configValue) {
this.name = name;
this.description = description;
this.defaultValue = defaultValue;
this.configValue = configValue;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(final String description) {
this.description = description;
}
public String getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(final String defaultValue) {
this.defaultValue = defaultValue;
}
public ConfigValue getConfigValue() {
return configValue;
}
public void setConfigValue(final ConfigValue configValue) {
this.configValue = configValue;
}
public boolean isAutoFromDevServices() {
return autoFromDevServices;
}
public void setAutoFromDevServices(boolean autoFromDevServices) {
this.autoFromDevServices = autoFromDevServices;
}
public String getTypeName() {
return typeName;
}
public void setTypeName(String typeName) {
this.typeName = typeName;
}
public List<String> getAllowedValues() {
return allowedValues;
}
public void setAllowedValues(List<String> allowedValues) {
this.allowedValues = allowedValues;
}
public String getConfigPhase() {
return configPhase;
}
public void setConfigPhase(String configPhase) {
this.configPhase = configPhase;
}
public boolean isWildcardEntry() {
return wildcardEntry;
}
public ConfigDescription setWildcardEntry(boolean wildcardEntry) {
this.wildcardEntry = wildcardEntry;
return this;
}
@Override
public int compareTo(ConfigDescription o) {
int ordinal = Integer.compare(o.configValue.getConfigSourceOrdinal(), this.configValue.getConfigSourceOrdinal());
if (ordinal == 0) {
return this.configValue.getName().compareTo(o.configValue.getName());
}
return ordinal;
}
}
| ConfigDescription |
java | spring-projects__spring-security | ldap/src/test/java/org/springframework/security/ldap/SpringSecurityAuthenticationSourceTests.java | {
"start": 1824,
"end": 4460
} | class ____ {
@BeforeEach
@AfterEach
public void clearContext() {
SecurityContextHolder.clearContext();
}
@Test
public void principalAndCredentialsAreEmptyWithNoAuthentication() {
AuthenticationSource source = new SpringSecurityAuthenticationSource();
assertThat(source.getPrincipal()).isEqualTo("");
assertThat(source.getCredentials()).isEqualTo("");
}
@Test
public void principalIsEmptyForAnonymousUser() {
AuthenticationSource source = new SpringSecurityAuthenticationSource();
SecurityContextHolder.getContext()
.setAuthentication(
new AnonymousAuthenticationToken("key", "anonUser", AuthorityUtils.createAuthorityList("ignored")));
assertThat(source.getPrincipal()).isEqualTo("");
}
@Test
public void getPrincipalRejectsNonLdapUserDetailsObject() {
AuthenticationSource source = new SpringSecurityAuthenticationSource();
SecurityContextHolder.getContext().setAuthentication(new TestingAuthenticationToken(new Object(), "password"));
assertThatIllegalArgumentException().isThrownBy(source::getPrincipal);
}
@Test
public void expectedCredentialsAreReturned() {
AuthenticationSource source = new SpringSecurityAuthenticationSource();
SecurityContextHolder.getContext().setAuthentication(new TestingAuthenticationToken(new Object(), "password"));
assertThat(source.getCredentials()).isEqualTo("password");
}
@Test
public void expectedPrincipalIsReturned() {
LdapUserDetailsImpl.Essence user = new LdapUserDetailsImpl.Essence();
user.setUsername("joe");
user.setDn(LdapNameBuilder.newInstance("uid=joe,ou=users").build());
AuthenticationSource source = new SpringSecurityAuthenticationSource();
SecurityContextHolder.getContext()
.setAuthentication(new TestingAuthenticationToken(user.createUserDetails(), null));
assertThat(source.getPrincipal()).isEqualTo("uid=joe,ou=users");
}
@Test
public void getPrincipalWhenCustomSecurityContextHolderStrategyThenExpectedPrincipalIsReturned() {
LdapUserDetailsImpl.Essence user = new LdapUserDetailsImpl.Essence();
user.setUsername("joe");
user.setDn(LdapNameBuilder.newInstance("uid=joe,ou=users").build());
SecurityContextHolderStrategy strategy = mock(SecurityContextHolderStrategy.class);
given(strategy.getContext())
.willReturn(new SecurityContextImpl(new TestingAuthenticationToken(user.createUserDetails(), null)));
SpringSecurityAuthenticationSource source = new SpringSecurityAuthenticationSource();
source.setSecurityContextHolderStrategy(strategy);
assertThat(source.getPrincipal()).isEqualTo("uid=joe,ou=users");
verify(strategy).getContext();
}
}
| SpringSecurityAuthenticationSourceTests |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/authentication/AbstractOAuth2TokenAuthenticationToken.java | {
"start": 1866,
"end": 2118
} | class ____<T extends OAuth2Token>
extends AbstractAuthenticationToken {
private static final long serialVersionUID = 620L;
private Object principal;
private Object credentials;
private T token;
/**
* Sub- | AbstractOAuth2TokenAuthenticationToken |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/SqmCorrelatedJoin.java | {
"start": 232,
"end": 313
} | interface ____<L,R> extends SqmCorrelation<L, R>, SqmJoin<L, R> {
}
| SqmCorrelatedJoin |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/reflection/MetaClassTest.java | {
"start": 1115,
"end": 1906
} | class ____ {
@Test
void shouldTestDataTypeOfGenericMethod() {
ReflectorFactory reflectorFactory = new DefaultReflectorFactory();
MetaClass meta = MetaClass.forClass(GenericConcrete.class, reflectorFactory);
assertEquals(Long.class, meta.getGetterType("id"));
assertEquals(Long.class, meta.getSetterType("id"));
}
@Test
void shouldThrowReflectionExceptionGetGetterType() {
try {
ReflectorFactory reflectorFactory = new DefaultReflectorFactory();
MetaClass meta = MetaClass.forClass(RichType.class, reflectorFactory);
meta.getGetterType("aString");
fail("should have thrown ReflectionException");
} catch (ReflectionException expected) {
assertEquals(
"There is no getter for property named \'aString\' in \' | MetaClassTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/pack/defaultpar/Mouse.java | {
"start": 267,
"end": 528
} | class ____ {
private Integer id;
private String name;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Mouse |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/logaggregation/tracker/NMLogAggregationStatusTracker.java | {
"start": 7781,
"end": 9058
} | class ____ extends TimerTask {
@Override
public void run() {
rollLogAggregationStatus();
}
}
private void rollLogAggregationStatus() {
// When we call rollLogAggregationStatus, basically fetch all
// cached log aggregation status and delete the out-of-timeout period
// log aggregation status, we should block the rollLogAggregationStatus
// calls as well as pullCachedLogAggregationReports call. So, the
// writeLocker is used here.
this.writeLocker.lock();
try {
long currentTimeStamp = System.currentTimeMillis();
LOG.info("Rolling over the cached log aggregation status.");
Iterator<Entry<ApplicationId, AppLogAggregationStatusForRMRecovery>> it
= recoveryStatuses.entrySet().iterator();
while (it.hasNext()) {
Entry<ApplicationId, AppLogAggregationStatusForRMRecovery> tracker =
it.next();
// the application has finished.
if (nmContext.getApplications().get(tracker.getKey()) == null) {
if (currentTimeStamp - tracker.getValue().getLastModifiedTime()
> rollingInterval) {
it.remove();
}
}
}
} finally {
this.writeLocker.unlock();
}
}
private static | LogAggregationStatusRoller |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java | {
"start": 9461,
"end": 11231
} | class ____ return true whenever isEnabled is
// called on blockScanner
BlockScanner mockScanner = Mockito.mock(BlockScanner.class);
Mockito.when(mockScanner.isEnabled()).thenReturn(true);
dn0.setBlockScanner(mockScanner);
Path filePath = new Path("test.dat");
FSDataOutputStream out = fs.create(filePath, (short) 1);
out.write(1);
out.hflush();
out.close();
// Corrupt the metadata file. Insert all 0's in the type and
// bytesPerChecksum files of the metadata header.
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, filePath);
File metadataFile = cluster.getBlockMetadataFile(0, block);
RandomAccessFile raFile = new RandomAccessFile(metadataFile, "rw");
raFile.seek(2);
raFile.writeByte(0);
raFile.writeInt(0);
raFile.close();
String datanodeId0 = dn0.getDatanodeUuid();
LocatedBlock lb = DFSTestUtil.getAllBlocks(fs, filePath).get(0);
String storageId = lb.getStorageIDs()[0];
cluster.startDataNodes(conf, 1, true, null, null);
DataNode dn1 = null;
for (int i = 0; i < cluster.getDataNodes().size(); i++) {
if (!cluster.getDataNodes().get(i).equals(datanodeId0)) {
dn1 = cluster.getDataNodes().get(i);
break;
}
}
DatanodeDescriptor dnd1 =
NameNodeAdapter.getDatanode(cluster.getNamesystem(),
dn1.getDatanodeId());
dn0.transferBlock(block, new DatanodeInfo[]{dnd1},
new StorageType[]{StorageType.DISK}, new String[0]);
// Sleep for 1 second so the DataTrasnfer daemon can start transfer.
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// Do nothing
}
Mockito.verify(mockScanner).markSuspectBlock(Mockito.eq(storageId),
Mockito.eq(block));
}
}
| and |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/batch/ValuesBatchRestoreTest.java | {
"start": 1222,
"end": 1509
} | class ____ extends BatchRestoreTestBase {
public ValuesBatchRestoreTest() {
super(BatchExecValues.class);
}
@Override
public List<TableTestProgram> programs() {
return Collections.singletonList(ValuesTestPrograms.VALUES_TEST);
}
}
| ValuesBatchRestoreTest |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/util/TestContextResourceUtils.java | {
"start": 3565,
"end": 6961
} | class ____ which the paths are associated
* @param preservePlaceholders {@code true} if placeholders should be preserved
* @param paths the paths to be converted
* @return a new array of converted resource paths
* @since 5.2
* @see #convertToResources
* @see ResourceUtils#CLASSPATH_URL_PREFIX
* @see ResourceUtils#FILE_URL_PREFIX
*/
public static String[] convertToClasspathResourcePaths(Class<?> clazz, boolean preservePlaceholders, String... paths) {
String[] convertedPaths = new String[paths.length];
for (int i = 0; i < paths.length; i++) {
String path = paths[i];
// Absolute path
if (path.startsWith(SLASH)) {
convertedPaths[i] = ResourceUtils.CLASSPATH_URL_PREFIX + path;
}
// Relative path
else if (!ResourcePatternUtils.isUrl(path)) {
convertedPaths[i] = ResourceUtils.CLASSPATH_URL_PREFIX + SLASH +
ClassUtils.classPackageAsResourcePath(clazz) + SLASH + path;
}
// URL
else {
convertedPaths[i] = path;
}
if (!(preservePlaceholders && PLACEHOLDER_PATTERN.matcher(convertedPaths[i]).matches())) {
convertedPaths[i] = StringUtils.cleanPath(convertedPaths[i]);
}
}
return convertedPaths;
}
/**
* Convert the supplied paths to an array of {@link Resource} handles using
* the given {@link ResourceLoader}.
* @param resourceLoader the {@code ResourceLoader} to use to convert the paths
* @param paths the paths to be converted
* @return a new array of resources
* @see #convertToResourceList(ResourceLoader, String...)
* @see #convertToClasspathResourcePaths
*/
public static Resource[] convertToResources(ResourceLoader resourceLoader, String... paths) {
return stream(resourceLoader, paths).toArray(Resource[]::new);
}
/**
* Convert the supplied paths to a list of {@link Resource} handles using
* the given {@link ResourceLoader}.
* @param resourceLoader the {@code ResourceLoader} to use to convert the paths
* @param paths the paths to be converted
* @return a new, mutable list of resources
* @since 4.2
* @see #convertToResources(ResourceLoader, String...)
* @see #convertToClasspathResourcePaths
*/
public static List<Resource> convertToResourceList(ResourceLoader resourceLoader, String... paths) {
return stream(resourceLoader, paths).collect(Collectors.toCollection(ArrayList::new));
}
/**
* Convert the supplied paths to a list of {@link Resource} handles using the given
* {@link ResourceLoader} and {@link Environment}.
* @param resourceLoader the {@code ResourceLoader} to use to convert the paths
* @param environment the {@code Environment} to use to resolve property placeholders
* in the paths
* @param paths the paths to be converted
* @return a new, mutable list of resources
* @since 6.2
* @see #convertToResources(ResourceLoader, String...)
* @see #convertToClasspathResourcePaths
* @see Environment#resolveRequiredPlaceholders(String)
*/
public static List<Resource> convertToResourceList(
ResourceLoader resourceLoader, Environment environment, String... paths) {
return Arrays.stream(paths)
.map(environment::resolveRequiredPlaceholders)
.map(resourceLoader::getResource)
.collect(Collectors.toCollection(ArrayList::new));
}
private static Stream<Resource> stream(ResourceLoader resourceLoader, String... paths) {
return Arrays.stream(paths).map(resourceLoader::getResource);
}
}
| with |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/transport/LinkedProjectConfigService.java | {
"start": 703,
"end": 849
} | interface ____ {
/**
* Interface for providing a {@link LinkedProjectConfigService} instance via SPI.
*/
| LinkedProjectConfigService |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/source/internal/hbm/PluralAttributeElementSourceBasicImpl.java | {
"start": 815,
"end": 3476
} | class ____
extends AbstractHbmSourceNode
implements PluralAttributeElementSourceBasic, RelationalValueSourceContainer {
private final PluralAttributeSource pluralAttributeSource;
private final HibernateTypeSourceImpl typeSource;
private final List<RelationalValueSource> valueSources;
public PluralAttributeElementSourceBasicImpl(
MappingDocument sourceMappingDocument,
PluralAttributeSource pluralAttributeSource,
final JaxbHbmBasicCollectionElementType jaxbElement) {
super( sourceMappingDocument );
this.pluralAttributeSource = pluralAttributeSource;
this.typeSource = new HibernateTypeSourceImpl( jaxbElement );
this.valueSources = RelationalValueSourceHelper.buildValueSources(
sourceMappingDocument(),
null,
new RelationalValueSourceHelper.AbstractColumnsAndFormulasSource() {
@Override
public XmlElementMetadata getSourceType() {
return XmlElementMetadata.ELEMENT;
}
@Override
public String getSourceName() {
return null;
}
@Override
public String getColumnAttribute() {
return jaxbElement.getColumnAttribute();
}
@Override
public String getFormulaAttribute() {
return jaxbElement.getFormulaAttribute();
}
@Override
public List getColumnOrFormulaElements() {
return jaxbElement.getColumnOrFormula();
}
@Override
public Boolean isNullable() {
return !jaxbElement.isNotNull();
}
@Override
public boolean isUnique() {
return jaxbElement.isUnique();
}
@Override
public SizeSource getSizeSource() {
return Helper.interpretSizeSource(
jaxbElement.getLength(),
jaxbElement.getScale(),
jaxbElement.getPrecision()
);
}
}
);
}
@Override
public PluralAttributeElementNature getNature() {
return PluralAttributeElementNature.BASIC;
}
@Override
public List<RelationalValueSource> getRelationalValueSources() {
return valueSources;
}
@Override
public boolean areValuesIncludedInInsertByDefault() {
return true;
}
@Override
public boolean areValuesIncludedInUpdateByDefault() {
return true;
}
@Override
public boolean areValuesNullableByDefault() {
return true;
}
@Override
public HibernateTypeSourceImpl getExplicitHibernateTypeSource() {
return typeSource;
}
@Override
public AttributePath getAttributePath() {
return pluralAttributeSource.getAttributePath();
}
@Override
public boolean isCollectionElement() {
return true;
}
@Override
public MetadataBuildingContext getBuildingContext() {
return metadataBuildingContext();
}
}
| PluralAttributeElementSourceBasicImpl |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/event/TransactionalApplicationListenerSynchronization.java | {
"start": 4443,
"end": 5668
} | class ____<AE extends ApplicationEvent>
extends TransactionalApplicationListenerSynchronization<AE>
implements org.springframework.transaction.reactive.TransactionSynchronization {
public ReactiveSynchronization(AE event, TransactionalApplicationListener<AE> listener,
List<TransactionalApplicationListener.SynchronizationCallback> callbacks) {
super(event, listener, callbacks);
}
@Override
public Mono<Void> beforeCommit(boolean readOnly) {
if (getTransactionPhase() == TransactionPhase.BEFORE_COMMIT) {
return Mono.fromRunnable(this::processEventWithCallbacks);
}
return Mono.empty();
}
@Override
public Mono<Void> afterCompletion(int status) {
TransactionPhase phase = getTransactionPhase();
if (phase == TransactionPhase.AFTER_COMMIT && status == STATUS_COMMITTED) {
return Mono.fromRunnable(this::processEventWithCallbacks);
}
else if (phase == TransactionPhase.AFTER_ROLLBACK && status == STATUS_ROLLED_BACK) {
return Mono.fromRunnable(this::processEventWithCallbacks);
}
else if (phase == TransactionPhase.AFTER_COMPLETION) {
return Mono.fromRunnable(this::processEventWithCallbacks);
}
return Mono.empty();
}
}
}
| ReactiveSynchronization |
java | micronaut-projects__micronaut-core | management/src/main/java/io/micronaut/management/endpoint/loggers/LoggerConfiguration.java | {
"start": 862,
"end": 2512
} | class ____ {
private static final String CONFIGURED_LEVEL = "configuredLevel";
private static final String EFFECTIVE_LEVEL = "effectiveLevel";
private final String name;
private final io.micronaut.logging.LogLevel configuredLevel;
private final io.micronaut.logging.LogLevel effectiveLevel;
/**
* @param name the logger name
* @param configuredLevel the configured {@link io.micronaut.logging.LogLevel}
* @param effectiveLevel the effective {@link io.micronaut.logging.LogLevel}
*/
public LoggerConfiguration(String name,
io.micronaut.logging.LogLevel configuredLevel,
io.micronaut.logging.LogLevel effectiveLevel) {
this.name = name;
this.configuredLevel = configuredLevel;
this.effectiveLevel = effectiveLevel;
}
/**
* @return the logger name
*/
public String getName() {
return name;
}
/**
* @return the configured {@link io.micronaut.logging.LogLevel}
*/
public io.micronaut.logging.LogLevel configuredLevel() {
return configuredLevel;
}
/**
* @return the effective {@link io.micronaut.logging.LogLevel}
*/
public io.micronaut.logging.LogLevel effectiveLevel() {
return effectiveLevel;
}
/**
* @return a Map of data to emit (less the name)
*/
public Map<String, Object> getData() {
Map<String, Object> data = new LinkedHashMap<>(2);
data.put(CONFIGURED_LEVEL, configuredLevel());
data.put(EFFECTIVE_LEVEL, effectiveLevel());
return data;
}
}
| LoggerConfiguration |
java | apache__camel | components/camel-mongodb/src/test/java/org/apache/camel/component/mongodb/integration/MongoDbCredentialsFromUriConnectionIT.java | {
"start": 1319,
"end": 5777
} | class ____ extends MongoDbOperationsIT {
protected static final String AUTH_SOURCE_USER = "auth-source-user";
protected static final String AUTH_SOURCE_PASSWORD = "auth-source-password";
@Override
public void doPreSetup() throws Exception {
// create user in db
super.doPreSetup();
createAuthorizationUser();
createAuthorizationUser(dbName, AUTH_SOURCE_USER, AUTH_SOURCE_PASSWORD);
}
@BeforeEach
void checkDocuments() {
Assumptions.assumeTrue(0 == testCollection.countDocuments(), "The collection should have no documents");
}
@Test
public void testCountOperationAuthUser() {
Object result = template.requestBody("direct:testAuthSource", "irrelevantBody");
assertTrue(result instanceof Long, "Result is not of type Long");
assertEquals(0L, result, "Test collection should not contain any records");
// Insert a record and test that the endpoint now returns 1
testCollection.insertOne(Document.parse("{a:60}"));
result = template.requestBody("direct:testAuthSource", "irrelevantBody");
assertTrue(result instanceof Long, "Result is not of type Long");
assertEquals(1L, result, "Test collection should contain 1 record");
testCollection.deleteOne(new Document());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
String uriHostnameOnly = String.format("mongodb:mongo?hosts=%s&", service.getConnectionAddress());
//connecting with credentials for created user
String uriWithCredentials = String.format("%susername=%s&password=%s&", uriHostnameOnly, USER, PASSWORD);
String uriWithAuthSource = String.format(
"%susername=%s&password=%s&authSource=%s&",
uriHostnameOnly, AUTH_SOURCE_USER, AUTH_SOURCE_PASSWORD, dbName);
from("direct:count").to(
uriHostnameOnly + "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=count&dynamicity=true");
from("direct:insert")
.to(uriWithCredentials
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=insert");
from("direct:testStoreOidOnInsert")
.to(uriHostnameOnly
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=insert")
.setBody()
.header(MongoDbConstants.OID);
from("direct:save")
.to(uriWithCredentials
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=save");
from("direct:testStoreOidOnSave")
.to(uriWithCredentials
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=save")
.setBody()
.header(MongoDbConstants.OID);
from("direct:update")
.to(uriWithCredentials
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=update");
from("direct:remove")
.to(uriWithCredentials
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=remove");
from("direct:aggregate").to(
uriHostnameOnly + "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=aggregate");
from("direct:getDbStats").to(uriWithCredentials + "database={{mongodb.testDb}}&operation=getDbStats");
from("direct:getColStats").to(
uriWithCredentials + "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=getColStats");
from("direct:command").to(uriWithCredentials + "database={{mongodb.testDb}}&operation=command");
from("direct:testAuthSource")
.to(uriWithAuthSource
+ "database={{mongodb.testDb}}&collection={{mongodb.testCollection}}&operation=count&dynamicity=true");
}
};
}
}
| MongoDbCredentialsFromUriConnectionIT |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/sql/parser/ExportAndParameterizedVisitor4db2TestCase.java | {
"start": 987,
"end": 2601
} | class ____ extends TestCase {
public void testParameterizedVisitor() {
Object[][] sqlAndExpectedCases = {
{"select XMLSERIALIZE(content fld1 as varchar(2000) ) fld1 from test_tab1 where name='1' ", 1},
{"select XMLSERIALIZE(fld1 as varchar(2000) ) fld1 from test_tab1 where name='1' ", 1},
{"select fld as b from test_tab1 where name='1' ", 1},
};
final DbType[] dbTypes = { DbType.db2 };
for (DbType dbType : dbTypes) {
System.out.println("dbType:" + dbType);
for (Object[] arr : sqlAndExpectedCases) {
final String sql = (String) arr[0];
StringBuilder out = new StringBuilder();
final SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql, dbType);
final ParameterizedVisitor pVisitor = (ParameterizedVisitor) ExportParameterVisitorUtils.createExportParameterVisitor(out, dbType);
final SQLStatement parseStatement = parser.parseStatement();
parseStatement.accept(pVisitor);
final ExportParameterVisitor vistor2 = (ExportParameterVisitor) pVisitor;
System.out.println("before:" + sql);
System.out.println("after:" + out);
System.out.println("size:" + vistor2.getParameters());
final int expectedSize = arr.length > 1 ? (Integer) arr[1] : 0;
Assert.assertEquals(expectedSize, vistor2.getParameters().size());
}
}
}
}
| ExportAndParameterizedVisitor4db2TestCase |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringTryCatchMustHaveExceptionConfiguredTest.java | {
"start": 1232,
"end": 2241
} | class ____ extends ContextTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
try {
createSpringCamelContext(this,
"org/apache/camel/spring/processor/SpringTryCatchMustHaveExceptionConfiguredTest.xml");
fail("Should have thrown exception");
} catch (Exception e) {
FailedToCreateRouteException ftcre = assertIsInstanceOf(FailedToCreateRouteException.class, e);
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, ftcre.getCause());
assertEquals("At least one Exception must be configured to catch", iae.getMessage());
}
// return a working context instead, to let this test pass
return createSpringCamelContext(this, "org/apache/camel/spring/processor/convertBody.xml");
}
@Test
public void testTryCatchMustHaveExceptionConfigured() {
// noop
}
}
| SpringTryCatchMustHaveExceptionConfiguredTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.