language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SequenceFileAsBinaryOutputFormat.java | {
"start": 1838,
"end": 2134
} | class ____ extends org.apache.hadoop.mapreduce
.lib.output.SequenceFileAsBinaryOutputFormat.WritableValueBytes {
public WritableValueBytes() {
super();
}
public WritableValueBytes(BytesWritable value) {
super(value);
}
}
/**
* Set the key | WritableValueBytes |
java | apache__flink | flink-yarn/src/test/java/org/apache/flink/yarn/YarnTestUtils.java | {
"start": 1312,
"end": 1752
} | class ____ {
static boolean isHadoopVersionGreaterThanOrEquals(final int major, final int minor) {
final String[] splitVersion = VersionInfo.getVersion().split("\\.");
final int[] versions = Arrays.stream(splitVersion).mapToInt(Integer::parseInt).toArray();
return versions[0] >= major && versions[1] >= minor;
}
private YarnTestUtils() {
throw new UnsupportedOperationException("This | YarnTestUtils |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/AdminAuthorizedServlet.java | {
"start": 1116,
"end": 1577
} | class ____ extends DefaultServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// Do the authorization
if (HttpServer2.hasAdministratorAccess(getServletContext(), request,
response)) {
// Authorization is done. Just call super.
super.doGet(request, response);
}
}
}
| AdminAuthorizedServlet |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/context/properties/ConfigurationPropertiesReportEndpointSerializationTests.java | {
"start": 18717,
"end": 18928
} | class ____ {
@Bean
// gh-11037
@ConfigurationProperties("cycle")
Cycle cycle() {
return new Cycle();
}
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties
static | CycleConfig |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/error/ParseErrorTest_21.java | {
"start": 2298,
"end": 2382
} | class ____ {
public Map<String, Child> value;
}
public static | Model |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java | {
"start": 7565,
"end": 7929
} | enum ____ implements Function<Publisher<?>, Single<?>> {
INSTANCE;
@Override
public Single<?> apply(Publisher<?> source) {
return RxReactiveStreams.toSingle(source);
}
}
/**
* An adapter {@link Function} to adopt a {@link Publisher} to {@link Completable}.
*/
public | PublisherToRxJava1SingleAdapter |
java | apache__camel | components/camel-pqc/src/test/java/org/apache/camel/component/pqc/PQCSignatureSLHDSATest.java | {
"start": 1694,
"end": 3705
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:sign")
protected MockEndpoint resultSign;
@EndpointInject("mock:verify")
protected MockEndpoint resultVerify;
@Produce("direct:sign")
protected ProducerTemplate templateSign;
public PQCSignatureSLHDSATest() throws NoSuchAlgorithmException {
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:sign").to("pqc:sign?operation=sign").to("mock:sign").to("pqc:verify?operation=verify")
.to("mock:verify");
}
};
}
@BeforeAll
public static void startup() throws Exception {
Security.addProvider(new BouncyCastleProvider());
}
@Test
void testSignAndVerify() throws Exception {
resultSign.expectedMessageCount(1);
resultVerify.expectedMessageCount(1);
templateSign.sendBody("Hello");
resultSign.assertIsSatisfied();
resultVerify.assertIsSatisfied();
assertTrue(resultVerify.getExchanges().get(0).getMessage().getHeader(PQCConstants.VERIFY, Boolean.class));
}
@BindToRegistry("Keypair")
public KeyPair setKeyPair() throws NoSuchAlgorithmException, NoSuchProviderException, InvalidAlgorithmParameterException {
KeyPairGenerator kpGen = KeyPairGenerator.getInstance(PQCSignatureAlgorithms.SLHDSA.getAlgorithm(),
PQCSignatureAlgorithms.SLHDSA.getBcProvider());
kpGen.initialize(SLHDSAParameterSpec.slh_dsa_sha2_128s);
KeyPair kp = kpGen.generateKeyPair();
return kp;
}
@BindToRegistry("Signer")
public Signature getSigner() throws NoSuchAlgorithmException, NoSuchProviderException {
Signature mlDsa = Signature.getInstance(PQCSignatureAlgorithms.SLHDSA.getAlgorithm(),
PQCSignatureAlgorithms.SLHDSA.getBcProvider());
return mlDsa;
}
}
| PQCSignatureSLHDSATest |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/request/completion/GoogleVertexAiUnifiedChatCompletionRequestTests.java | {
"start": 5073,
"end": 5553
} | class ____ extends GoogleVertexAiUnifiedChatCompletionRequest {
GoogleVertexAiUnifiedChatCompletionWithoutAuthRequest(UnifiedChatInput unifiedChatInput, GoogleVertexAiChatCompletionModel model) {
super(unifiedChatInput, model);
}
@Override
public void decorateWithAuth(HttpPost httpPost) {
httpPost.setHeader(HttpHeaders.AUTHORIZATION, AUTH_HEADER_VALUE);
}
}
}
| GoogleVertexAiUnifiedChatCompletionWithoutAuthRequest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeFlatMapIterableFlowable.java | {
"start": 1949,
"end": 8197
} | class ____<T, R>
extends BasicIntQueueSubscription<R>
implements MaybeObserver<T> {
private static final long serialVersionUID = -8938804753851907758L;
final Subscriber<? super R> downstream;
final Function<? super T, ? extends Iterable<? extends R>> mapper;
final AtomicLong requested;
Disposable upstream;
volatile Iterator<? extends R> it;
volatile boolean cancelled;
boolean outputFused;
FlatMapIterableObserver(Subscriber<? super R> actual,
Function<? super T, ? extends Iterable<? extends R>> mapper) {
this.downstream = actual;
this.mapper = mapper;
this.requested = new AtomicLong();
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T value) {
Iterator<? extends R> iterator;
boolean has;
try {
iterator = mapper.apply(value).iterator();
has = iterator.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
downstream.onError(ex);
return;
}
if (!has) {
downstream.onComplete();
return;
}
this.it = iterator;
drain();
}
@Override
public void onError(Throwable e) {
upstream = DisposableHelper.DISPOSED;
downstream.onError(e);
}
@Override
public void onComplete() {
downstream.onComplete();
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
drain();
}
}
@Override
public void cancel() {
cancelled = true;
upstream.dispose();
upstream = DisposableHelper.DISPOSED;
}
void fastPath(Subscriber<? super R> a, Iterator<? extends R> iterator) {
for (;;) {
if (cancelled) {
return;
}
R v;
try {
v = iterator.next();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
a.onNext(v);
if (cancelled) {
return;
}
boolean b;
try {
b = iterator.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!b) {
a.onComplete();
return;
}
}
}
void drain() {
if (getAndIncrement() != 0) {
return;
}
Subscriber<? super R> a = downstream;
Iterator<? extends R> iterator = this.it;
if (outputFused && iterator != null) {
a.onNext(null);
a.onComplete();
return;
}
int missed = 1;
for (;;) {
if (iterator != null) {
long r = requested.get();
if (r == Long.MAX_VALUE) {
fastPath(a, iterator);
return;
}
long e = 0L;
while (e != r) {
if (cancelled) {
return;
}
R v;
try {
v = Objects.requireNonNull(iterator.next(), "The iterator returned a null value");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
a.onNext(v);
if (cancelled) {
return;
}
e++;
boolean b;
try {
b = iterator.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (!b) {
a.onComplete();
return;
}
}
if (e != 0L) {
BackpressureHelper.produced(requested, e);
}
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
if (iterator == null) {
iterator = it;
}
}
}
@Override
public int requestFusion(int mode) {
if ((mode & ASYNC) != 0) {
outputFused = true;
return ASYNC;
}
return NONE;
}
@Override
public void clear() {
it = null;
}
@Override
public boolean isEmpty() {
return it == null;
}
@Nullable
@Override
public R poll() {
Iterator<? extends R> iterator = it;
if (iterator != null) {
R v = Objects.requireNonNull(iterator.next(), "The iterator returned a null value");
if (!iterator.hasNext()) {
it = null;
}
return v;
}
return null;
}
}
}
| FlatMapIterableObserver |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/ITestAuditManager.java | {
"start": 2436,
"end": 4312
} | class ____ extends AbstractS3ACostTest {
@Override
public Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
resetAuditOptions(conf);
enableLoggingAuditor(conf);
conf.set(AUDIT_EXECUTION_INTERCEPTORS,
SimpleAWSExecutionInterceptor.CLASS);
conf.set(AUDIT_REQUEST_HANDLERS, "not-valid-class");
return conf;
}
/**
* Get the FS IOStatistics.
* @return the FS live IOSTats.
*/
private IOStatistics iostats() {
return getFileSystem().getIOStatistics();
}
/**
* Verify that operations outside a span are rejected
* by ensuring that the thread is outside a span, create
* a write operation helper, then
* reject it.
*/
@Test
public void testInvokeOutOfSpanRejected() throws Throwable {
describe("Operations against S3 will be rejected outside of a span");
final S3AFileSystem fs = getFileSystem();
requireOutOfSpanOperationsRejected(fs);
final long failures0 = lookupCounterStatistic(iostats(),
AUDIT_FAILURE.getSymbol());
final long exec0 = lookupCounterStatistic(iostats(),
AUDIT_REQUEST_EXECUTION.getSymbol());
// API call
// create and close a span, so the FS is not in a span.
fs.createSpan("span", null, null).close();
// this will be out of span
final WriteOperationHelper writer
= fs.getWriteOperationHelper();
// which can be verified
Assertions.assertThat(writer.getAuditSpan())
.matches(s -> !s.isValidSpan(), "Span is not valid");
// an S3 API call will fail and be mapped to access denial.
final AccessDeniedException ex = intercept(
AccessDeniedException.class, UNAUDITED_OPERATION, () ->
writer.listMultipartUploads("/"));
// verify the type of the inner cause, throwing the outer ex
// if it is null or a different | ITestAuditManager |
java | elastic__elasticsearch | x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java | {
"start": 2063,
"end": 19557
} | class ____ {
/**
* Translates a non-rollup aggregation tree into a rollup-enabled agg tree. For example, the
* source aggregation may look like this:
*
* <pre>{@code
* POST /foo/_rollup_search
* {
* "aggregations": {
* "the_histo": {
* "date_histogram" : {
* "field" : "ts",
* "calendar_interval" : "1d"
* },
* "aggs": {
* "the_max": {
* "max": {
* "field": "foo"
* }
* }
* }
* }
* }
* }
* }</pre>
*
* Which is then translated into an aggregation looking like this:
*
* <pre>{@code
* POST /rolled_foo/_search
* {
* "aggregations" : {
* "filter_histo" : {
* "filter" : {
* "bool" : {
* "must" : [
* { "term" : { "_rollup.version" : 1 } },
* { "term": { "ts.date_histogram.interval" : "1d" } }
* ]
* }
* },
* "aggregations" : {
* "the_histo" : {
* "date_histogram" : {
* "field" : "ts.date_histogram.timestamp",
* "calendar_interval" : "1d"
* },
* "aggregations" : {
* "the_histo._count" : {
* "sum" : { "field" : "ts.date_histogram._count" }
* },
* "the_max" : {
* "max" : { "field" : "foo.max.value" }
* }
* }
* }
* }
* }
* }
* }
* }</pre>
*
* The various conventions that are applied during the translation are elucidated in the comments of the
* relevant method below.
*
* @param source The source aggregation to translate into rollup-enabled version
* @param registry Registry containing the various aggregations so that we can easily
* deserialize into a stream for cloning
* @return Returns the fully translated aggregation tree. Note that it returns a list instead
* of a single AggBuilder, since some aggregations (e.g. avg) may result in two
* translated aggs (sum + count)
*/
public static List<AggregationBuilder> translateAggregation(AggregationBuilder source, NamedWriteableRegistry registry) {
if (source.getWriteableName().equals(DateHistogramAggregationBuilder.NAME)) {
return translateDateHistogram((DateHistogramAggregationBuilder) source, registry);
} else if (source.getWriteableName().equals(HistogramAggregationBuilder.NAME)) {
return translateHistogram((HistogramAggregationBuilder) source, registry);
} else if (RollupField.SUPPORTED_METRICS.contains(source.getWriteableName())) {
return translateVSLeaf((ValuesSourceAggregationBuilder.LeafOnly) source, registry);
} else if (source.getWriteableName().equals(TermsAggregationBuilder.NAME)) {
return translateTerms((TermsAggregationBuilder) source, registry);
} else {
throw new IllegalArgumentException(
"Unable to translate aggregation tree into Rollup. Aggregation ["
+ source.getName()
+ "] is of type ["
+ source.getClass().getSimpleName()
+ "] which is "
+ "currently unsupported."
);
}
}
/**
* Translate a normal date_histogram into one that follows the rollup conventions.
* Notably, it adds a Sum metric to calculate the doc_count in each bucket.
*
* E.g. this date_histogram:
*
* <pre>{@code
* POST /foo/_rollup_search
* {
* "aggregations": {
* "the_histo": {
* "date_histogram" : {
* "field" : "ts",
* "calendar_interval" : "day"
* }
* }
* }
* }
* }</pre>
*
* is translated into:
*
* <pre>{@code
* POST /rolled_foo/_search
* {
* "aggregations" : {
* "the_histo" : {
* "date_histogram" : {
* "field" : "ts.date_histogram.timestamp",
* "interval" : "day"
* },
* "aggregations" : {
* "the_histo._count" : {
* "sum" : { "field" : "ts.date_histogram._count" }
* }
* }
* }
* }
* }
* }</pre>
*
* The conventions are:
* <ul>
* <li>Named: same as the source histogram</li>
* <li>Field: `{timestamp field}.date_histogram.timestamp`</li>
* <li>Add a SumAggregation to each bucket:</li>
* <li>
* <ul>
* <li>Named: `{parent histogram name}._count`</li>
* <li>Field: `{timestamp field}.date_histogram._count`</li>
* </ul>
* </li>
* </ul>
*
*/
private static List<AggregationBuilder> translateDateHistogram(
DateHistogramAggregationBuilder source,
NamedWriteableRegistry registry
) {
return translateVSAggBuilder(source, registry, () -> {
DateHistogramAggregationBuilder rolledDateHisto = new DateHistogramAggregationBuilder(source.getName());
if (source.getCalendarInterval() != null) {
rolledDateHisto.calendarInterval(source.getCalendarInterval());
} else if (source.getFixedInterval() != null) {
rolledDateHisto.fixedInterval(source.getFixedInterval());
}
ZoneId timeZone = source.timeZone() == null ? DateHistogramGroupConfig.DEFAULT_ZONEID_TIMEZONE : source.timeZone();
rolledDateHisto.timeZone(timeZone);
rolledDateHisto.offset(source.offset());
if (source.extendedBounds() != null) {
rolledDateHisto.extendedBounds(source.extendedBounds());
}
if (Strings.isNullOrEmpty(source.format()) == false) {
rolledDateHisto.format(source.format());
}
rolledDateHisto.keyed(source.keyed());
rolledDateHisto.minDocCount(source.minDocCount());
rolledDateHisto.order(source.order());
rolledDateHisto.field(RollupField.formatFieldName(source, RollupField.TIMESTAMP));
if (source.getMetadata() != null) {
rolledDateHisto.setMetadata(source.getMetadata());
}
return rolledDateHisto;
});
}
/**
* Translate a normal histogram into one that follows the rollup conventions.
* Notably, it adds a Sum metric to calculate the doc_count in each bucket.
*
* Conventions are identical to a date_histogram (excepting date-specific details), so see
* {@link #translateDateHistogram(DateHistogramAggregationBuilder, NamedWriteableRegistry)} for
* a complete list of conventions, examples, etc
*/
private static List<AggregationBuilder> translateHistogram(HistogramAggregationBuilder source, NamedWriteableRegistry registry) {
return translateVSAggBuilder(source, registry, () -> {
HistogramAggregationBuilder rolledHisto = new HistogramAggregationBuilder(source.getName());
rolledHisto.interval(source.interval());
rolledHisto.offset(source.offset());
if (Double.isFinite(source.minBound()) && Double.isFinite(source.maxBound())) {
rolledHisto.extendedBounds(source.minBound(), source.maxBound());
}
rolledHisto.keyed(source.keyed());
rolledHisto.minDocCount(source.minDocCount());
rolledHisto.order(source.order());
rolledHisto.field(RollupField.formatFieldName(source, RollupField.VALUE));
if (source.getMetadata() != null) {
rolledHisto.setMetadata(source.getMetadata());
}
return rolledHisto;
});
}
/**
* Translate a normal terms agg into one that follows the rollup conventions.
* Notably, it adds metadata to the terms, and a Sum metric to calculate the doc_count
* in each bucket.
*
* E.g. this terms agg:
*
* <pre>{@code
* POST /foo/_rollup_search
* {
* "aggregations": {
* "the_terms": {
* "terms" : {
* "field" : "foo"
* }
* }
* }
* }
* }</pre>
*
* is translated into:
*
* <pre>{@code
* POST /rolled_foo/_search
* {
* "aggregations" : {
* "the_terms" : {
* "terms" : {
* "field" : "foo.terms.value"
* },
* "aggregations" : {
* "the_terms._count" : {
* "sum" : { "field" : "foo.terms._count" }
* }
* }
* }
* }
* }
* }</pre>
*
* The conventions are:
* <ul>
* <li>Named: same as the source terms agg</li>
* <li>Field: `{field name}.terms.value`</li>
* <li>Add a SumAggregation to each bucket:</li>
* <li>
* <ul>
* <li>Named: `{parent terms name}._count`</li>
* <li>Field: `{field name}.terms._count`</li>
* </ul>
* </li>
* </ul>
*
*/
private static List<AggregationBuilder> translateTerms(TermsAggregationBuilder source, NamedWriteableRegistry registry) {
return translateVSAggBuilder(source, registry, () -> {
TermsAggregationBuilder rolledTerms = new TermsAggregationBuilder(source.getName());
if (source.userValueTypeHint() != null) {
rolledTerms.userValueTypeHint(source.userValueTypeHint());
}
rolledTerms.field(RollupField.formatFieldName(source, RollupField.VALUE));
rolledTerms.includeExclude(source.includeExclude());
if (source.collectMode() != null) {
rolledTerms.collectMode(source.collectMode());
}
rolledTerms.minDocCount(source.minDocCount());
rolledTerms.executionHint(source.executionHint());
if (source.order() != null) {
rolledTerms.order(source.order());
}
rolledTerms.shardMinDocCount(source.shardMinDocCount());
if (source.shardSize() > 0) {
rolledTerms.shardSize(source.shardSize());
}
rolledTerms.showTermDocCountError(source.showTermDocCountError());
rolledTerms.size(source.size());
return rolledTerms;
});
}
/**
* The generic method that does most of the actual heavy-lifting when translating a multi-bucket
* ValueSourceBuilder. This method is called by all the agg-specific methods (e.g. translateDateHistogram())
*
* @param source The source aggregation that we wish to translate
* @param registry Named registry for serializing leaf metrics. Not actually used by this method,
* but is passed downwards for leaf usage
* @param factory A factory closure that generates a new shallow clone of the `source`. E.g. if `source` is
* a date_histogram, the factory will take return a new DateHistogramAggBUilder with matching
* parameters. It is not a deep clone however; the returned object won't have children
* set.
* @param <T> The type of ValueSourceAggBuilder that we are working with
* @return the translated multi-bucket ValueSourceAggBuilder
*/
private static <T extends ValuesSourceAggregationBuilder<T>> List<AggregationBuilder> translateVSAggBuilder(
T source,
NamedWriteableRegistry registry,
Supplier<T> factory
) {
T rolled = factory.get();
// Translate all subaggs and add to the newly translated agg
// NOTE: using for loop instead of stream because compiler explodes with a bug :/
for (AggregationBuilder subAgg : source.getSubAggregations()) {
List<AggregationBuilder> translated = translateAggregation(subAgg, registry);
for (AggregationBuilder t : translated) {
rolled.subAggregation(t);
}
}
// Count is derived from a sum, e.g.
// "my_date_histo._count": { "sum": { "field": "foo.date_histogram._count" } } }
rolled.subAggregation(
new SumAggregationBuilder(RollupField.formatCountAggName(source.getName())).field(
RollupField.formatFieldName(source, RollupField.COUNT_FIELD)
)
);
return Collections.singletonList(rolled);
}
/**
* Translates leaf aggs (min/max/sum/etc) into their rollup version. For simple aggs like `min`,
* this is nearly a 1:1 copy. The source is deserialized into a new object, and the field is adjusted
* according to convention. E.g. for a `min` agg:
*
* <pre>{@code
* {
* "the_min":{ "min" : { "field" : "some_field" } }
* }
* }</pre>
*
* the translation would be:
*
* <pre>{@code
* {
* "the_min":{ "min" : { "field" : "some_field.min.value" }}
* }
* }</pre>
*
* However, for `avg` metrics (and potentially others in the future), the agg is translated into
* a sum + sum aggs; one for count and one for sum. When unrolling these will be combined back into
* a single avg. Note that we also have to rename the avg agg name to distinguish it from empty
* buckets. E.g. for an `avg` agg:
*
* <pre>{@code
* {
* "the_avg":{ "avg" : { "field" : "some_field" }}
* }
* }</pre>
*
* the translation would be:
*
* <pre>{@code
* [
* {
* "the_avg.value": {
* "sum" : { "field" : "some_field.avg.value" }}
* },
* {
* "the_avg._count": { "sum" : { "field" : "some_field.avg._count" }}
* }
* ]
* }</pre>
*
* The conventions are:
* <ul>
* <li>Agg type: same as source agg</li>
* <li>Named: same as the source agg</li>
* <li>Field: `{agg_type}.{field_name}.value`</li>
* </ul>
*
* IF the agg is an AvgAgg, the following additional conventions are added:
* <ul>
* <li>Agg type: becomes SumAgg, instead of AvgAgg</li>
* <li>Named: {source name}.value</li>
* <li>Additionally, an extra SumAgg is added:</li>
* <li>
* <ul>
* <li>Named: `{source name}._count`</li>
* <li>Field: `{field name}.{agg type}._count`</li>
* </ul>
* </li>
* </ul>
*
*
* @param metric The source leaf aggregation we wish to translate
* @param registry A registry of NamedWriteable's so we can do a simple serialize/deserialize for
* most of the leafs to easily clone them
* @return The translated leaf aggregation
*/
private static List<AggregationBuilder> translateVSLeaf(
ValuesSourceAggregationBuilder.LeafOnly<?> metric,
NamedWriteableRegistry registry
) {
List<AggregationBuilder> rolledMetrics;
// If it's an avg, we have to manually convert it into sum + sum aggs
if (metric instanceof AvgAggregationBuilder) {
rolledMetrics = new ArrayList<>(2);
// Avg metric is translated into a SumAgg, e.g.
// Note: we change the agg name to prevent conflicts with empty buckets
// "the_avg.value" : { "field" : "some_field.avg.value" }}
SumAggregationBuilder value = new SumAggregationBuilder(RollupField.formatValueAggName(metric.getName()));
value.field(RollupField.formatFieldName(metric, RollupField.VALUE));
rolledMetrics.add(value);
// Count is derived from a sum, e.g.
// "the_avg._count": { "sum" : { "field" : "some_field.avg._count" }}
rolledMetrics.add(
new SumAggregationBuilder(RollupField.formatCountAggName(metric.getName())).field(
RollupField.formatFieldName(metric, RollupField.COUNT_FIELD)
)
);
return rolledMetrics;
}
// Otherwise, we can cheat and serialize/deserialze into a temp stream as an easy way to clone
// leaf metrics, since they don't have any sub-aggs
try (BytesStreamOutput output = new BytesStreamOutput()) {
try {
output.writeString(metric.getType());
metric.writeTo(output);
try (
StreamInput stream = output.bytes().streamInput();
NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(stream, registry)
) {
ValuesSourceAggregationBuilder<?> serialized = ((ValuesSourceAggregationBuilder) in.readNamedWriteable(
AggregationBuilder.class
)).field(RollupField.formatFieldName(metric, RollupField.VALUE));
return Collections.singletonList(serialized);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
| RollupRequestTranslator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ClassNewInstanceTest.java | {
"start": 17730,
"end": 18241
} | class ____ {
void f(Exception e) {
try {
getClass().getDeclaredConstructor().newInstance();
} catch (InstantiationException e1) {
// one
} catch (IllegalAccessException e1) {
// two
} catch (ReflectiveOperationException e1) {
throw new LinkageError(e1.getMessage(), e1);
}
}
}
""")
.doTest();
}
}
| Test |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/tag/model/TagRouterRule.java | {
"start": 1644,
"end": 5486
} | class ____ extends AbstractRouterRule {
private List<Tag> tags;
private final Map<String, Set<String>> addressToTagnames = new HashMap<>();
private final Map<String, Set<String>> tagnameToAddresses = new HashMap<>();
@SuppressWarnings("unchecked")
public static TagRouterRule parseFromMap(Map<String, Object> map) {
TagRouterRule tagRouterRule = new TagRouterRule();
tagRouterRule.parseFromMap0(map);
Object tags = map.get(TAGS_KEY);
if (tags != null && List.class.isAssignableFrom(tags.getClass())) {
tagRouterRule.setTags(((List<Map<String, Object>>) tags)
.stream()
.map(objMap -> Tag.parseFromMap(objMap, tagRouterRule.getVersion()))
.collect(Collectors.toList()));
}
return tagRouterRule;
}
public void init(TagStateRouter<?> router) {
if (!isValid()) {
return;
}
BitList<? extends Invoker<?>> invokers = router.getInvokers();
// for tags with 'addresses` field set and 'match' field not set
tags.stream()
.filter(tag -> CollectionUtils.isNotEmpty(tag.getAddresses()))
.forEach(tag -> {
tagnameToAddresses.put(tag.getName(), new HashSet<>(tag.getAddresses()));
tag.getAddresses().forEach(addr -> {
Set<String> tagNames = addressToTagnames.computeIfAbsent(addr, k -> new HashSet<>());
tagNames.add(tag.getName());
});
});
if (this.getVersion() != null && this.getVersion().startsWith(RULE_VERSION_V30)) {
// for tags with 'match` field set and 'addresses' field not set
if (CollectionUtils.isNotEmpty(invokers)) {
tags.stream()
.filter(tag -> CollectionUtils.isEmpty(tag.getAddresses()))
.forEach(tag -> {
Set<String> addresses = new HashSet<>();
List<ParamMatch> paramMatchers = tag.getMatch();
invokers.forEach(invoker -> {
boolean isMatch = true;
for (ParamMatch matcher : paramMatchers) {
if (!matcher.isMatch(invoker.getUrl().getOriginalParameter(matcher.getKey()))) {
isMatch = false;
break;
}
}
if (isMatch) {
addresses.add(invoker.getUrl().getAddress());
}
});
if (CollectionUtils.isNotEmpty(addresses)) { // null means tag not set
tagnameToAddresses.put(tag.getName(), addresses);
}
});
}
}
}
public Set<String> getAddresses() {
return tagnameToAddresses.entrySet().stream()
.filter(entry -> CollectionUtils.isNotEmpty(entry.getValue()))
.flatMap(entry -> entry.getValue().stream())
.collect(Collectors.toSet());
}
public List<String> getTagNames() {
return tags.stream().map(Tag::getName).collect(Collectors.toList());
}
public Map<String, Set<String>> getAddressToTagnames() {
return addressToTagnames;
}
public Map<String, Set<String>> getTagnameToAddresses() {
return tagnameToAddresses;
}
public List<Tag> getTags() {
return tags;
}
public void setTags(List<Tag> tags) {
this.tags = tags;
}
}
| TagRouterRule |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/choosewhen/User.java | {
"start": 706,
"end": 1030
} | class ____ {
private String name;
private String status;
// getters/setters
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
}
| User |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/boolean_/AtomicBooleanAssert_overridingErrorMessage_Test.java | {
"start": 959,
"end": 2671
} | class ____ {
@Test
void should_honor_custom_error_message_set_with_withFailMessage() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicBoolean(true)).withFailMessage(error)
.isFalse();
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_overridingErrorMessage() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicBoolean(true)).overridingErrorMessage(error)
.isFalse();
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_withFailMessage_using_supplier() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicBoolean(true)).withFailMessage(() -> error)
.isFalse();
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_overridingErrorMessage_using_supplier() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicBoolean(true)).overridingErrorMessage(() -> error)
.isFalse();
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
}
| AtomicBooleanAssert_overridingErrorMessage_Test |
java | playframework__playframework | documentation/manual/working/javaGuide/main/i18n/code/javaguide/i18n/JavaI18N.java | {
"start": 1825,
"end": 2701
} | class ____ extends MockJavaAction {
private final MessagesApi messagesApi;
DefaultLangController(JavaHandlerComponents javaHandlerComponents, MessagesApi messagesApi) {
super(javaHandlerComponents);
this.messagesApi = messagesApi;
}
// #default-lang-render
public Result index(Http.Request request) {
Messages messages = this.messagesApi.preferred(request);
return ok(hellotemplate.render(messages));
}
// #default-lang-render
}
@Test
public void checkDefaultScalaHello() {
Result result =
MockJavaActionHelper.call(
new DefaultScalaLangController(
instanceOf(JavaHandlerComponents.class), instanceOf(MessagesApi.class)),
fakeRequest("GET", "/"),
mat);
assertThat(contentAsString(result)).contains("hello");
}
public static | DefaultLangController |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/SampleElements.java | {
"start": 2405,
"end": 2638
} | class ____ extends SampleElements<AnEnum> {
public Enums() {
// elements aren't sorted, to better test SortedSet iteration ordering
super(AnEnum.B, AnEnum.A, AnEnum.C, AnEnum.D, AnEnum.E);
}
}
public static | Enums |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/formatstring/FormatStringTest.java | {
"start": 1344,
"end": 5101
} | class ____ {",
" void f() {",
" // BUG: Diagnostic contains: " + expected,
" " + formatString,
" }",
"}")
.doTest();
}
@Test
public void duplicateFormatFlags() throws Exception {
testFormat("duplicate format flags: +", "String.format(\"e = %++10.4f\", Math.E);");
}
@Test
public void formatFlagsConversionMismatch() throws Exception {
testFormat(
"format specifier '%b' is not compatible with the given flag(s): #",
"String.format(\"%#b\", Math.E);");
}
@Test
public void illegalFormatCodePoint() throws Exception {
testFormat("invalid Unicode code point: 110000", "String.format(\"%c\", 0x110000);");
}
@Test
public void illegalFormatConversion() throws Exception {
testFormat(
"illegal format conversion: 'java.lang.String' cannot be formatted using '%f'",
"String.format(\"%f\", \"abcd\");");
}
@Test
public void illegalFormatFlags() throws Exception {
testFormat("illegal format flags: -0", "String.format(\"%-010d\", 5);");
}
@Test
public void illegalFormatPrecision() throws Exception {
testFormat("illegal format precision: 1", "String.format(\"%.1c\", 'c');");
}
@Test
public void illegalFormatWidth() throws Exception {
testFormat("illegal format width: 1", "String.format(\"%1n\");");
}
@Test
public void missingFormatArgument() throws Exception {
testFormat("missing argument for format specifier '%<s'", "String.format(\"%<s\", \"test\");");
}
@Test
public void missingFormatWidth() throws Exception {
testFormat("missing format width: %-f", "String.format(\"e = %-f\", Math.E);");
}
@Test
public void unknownFormatConversion() throws Exception {
testFormat("unknown format conversion: 'r'", "String.format(\"%r\", \"hello\");");
}
@Test
public void cStyleLongConversion() throws Exception {
testFormat("use %d to format integral types", "String.format(\"%l\", 42);");
}
@Test
public void cStyleLongConversion2() throws Exception {
testFormat("use %d to format integral types", "String.format(\"%ld\", 42);");
}
@Test
public void cStyleLongConversion3() throws Exception {
testFormat("use %d to format integral types", "String.format(\"%lld\", 42);");
}
@Test
public void cStyleLongConversion4() throws Exception {
testFormat("%f, %g or %e to format floating point types", "String.format(\"%lf\", 42);");
}
@Test
public void cStyleLongConversion5() throws Exception {
testFormat("%f, %g or %e to format floating point types", "String.format(\"%llf\", 42);");
}
@Test
public void conditionalExpression() throws Exception {
testFormat(
"missing argument for format specifier '%s'", "String.format(true ? \"\" : \"%s\");");
}
@Test
public void conditionalExpression2() throws Exception {
testFormat(
"missing argument for format specifier '%s'", "String.format(true ? \"%s\" : \"\");");
}
@Test
public void conditionalExpression3() throws Exception {
testFormat(
"extra format arguments: used 1, provided 2",
"String.format(true ? \"%s\" : \"%s\", 1, 2);");
}
@Test
public void conditionalExpression4() throws Exception {
testFormat(
"extra format arguments: used 1, provided 2",
"String.format(true ? \"%s\" : \"%s\", 1, 2);");
}
@Test
public void conditionalExpression5() throws Exception {
testFormat(
"missing argument for format specifier '%s'",
"String.format(true ? \"%s\" : true ? \"%s\" : \"\");");
}
@Test
public void missingArguments() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__thrift | lib/java/src/test/java/org/apache/thrift/server/TestNonblockingServer.java | {
"start": 1481,
"end": 4427
} | class ____ extends ServerTestBase {
private Thread serverThread;
private TServer server;
private static final int NUM_QUERIES = 1000;
protected TServer getServer(
TProcessor processor,
TNonblockingServerSocket socket,
TProtocolFactory protoFactory,
TTransportFactory factory) {
final Args args = new Args(socket).processor(processor).protocolFactory(protoFactory);
if (factory != null) {
args.transportFactory(factory);
}
return new TNonblockingServer(args);
}
@Override
public void startServer(
final TProcessor processor,
final TProtocolFactory protoFactory,
final TTransportFactory factory)
throws Exception {
serverThread =
new Thread() {
public void run() {
try {
// Transport
TNonblockingServerSocket tServerSocket =
new TNonblockingServerSocket(
new TNonblockingServerSocket.NonblockingAbstractServerSocketArgs()
.port(PORT));
server = getServer(processor, tServerSocket, protoFactory, factory);
// Run it
System.out.println("Starting the server on port " + PORT + "...");
server.serve();
} catch (Exception e) {
e.printStackTrace();
fail();
}
}
};
serverThread.start();
Thread.sleep(1000);
}
@Override
public void stopServer() throws Exception {
server.stop();
try {
serverThread.join();
} catch (InterruptedException e) {
}
}
@Override
public TTransport getClientTransport(TTransport underlyingTransport) throws Exception {
return new TFramedTransport(underlyingTransport);
}
@Test
public void testCleanupAllSelectionKeys() throws Exception {
for (TProtocolFactory protoFactory : getProtocols()) {
TestHandler handler = new TestHandler();
ThriftTest.Processor processor = new ThriftTest.Processor(handler);
startServer(processor, protoFactory);
TSocket socket = new TSocket(HOST, PORT);
socket.setTimeout(SOCKET_TIMEOUT);
TTransport transport = getClientTransport(socket);
TProtocol protocol = protoFactory.getProtocol(transport);
ThriftTest.Client testClient = new ThriftTest.Client(protocol);
open(transport);
for (int i = 0; i < NUM_QUERIES; ++i) {
testClient.testI32(1);
}
server.stop();
for (int i = 0; i < NUM_QUERIES; ++i) {
try {
testClient.testI32(1);
} catch (TTransportException e) {
System.err.println(e);
e.printStackTrace();
if (e.getCause() instanceof java.net.SocketTimeoutException) {
fail("timed out when it should have thrown another kind of error!");
}
}
}
transport.close();
stopServer();
}
}
}
| TestNonblockingServer |
java | google__guice | extensions/throwingproviders/test/com/google/inject/throwingproviders/CheckedProviderTest.java | {
"start": 49839,
"end": 49931
} | interface ____<T> extends CheckedProvider<String> {}
private static | WrongThrowingProviderType |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/standalone/VertxOutput.java | {
"start": 163,
"end": 325
} | interface ____ {
void write(ByteBuf data, boolean last) throws IOException;
CompletionStage<Void> writeNonBlocking(ByteBuf data, boolean last);
}
| VertxOutput |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/converter/DefaultHttpMessageConvertersTests.java | {
"start": 4522,
"end": 9397
} | class ____ {
@Test
void defaultConverters() {
var converters = HttpMessageConverters.forClient().registerDefaults().build();
assertThat(converters).hasExactlyElementsOfTypes(ByteArrayHttpMessageConverter.class,
StringHttpMessageConverter.class, ResourceHttpMessageConverter.class,
AllEncompassingFormHttpMessageConverter.class, KotlinSerializationJsonHttpMessageConverter.class,
JacksonJsonHttpMessageConverter.class, JacksonSmileHttpMessageConverter.class,
KotlinSerializationCborHttpMessageConverter.class, JacksonCborHttpMessageConverter.class,
JacksonYamlHttpMessageConverter.class, JacksonXmlHttpMessageConverter.class,
KotlinSerializationProtobufHttpMessageConverter.class, AtomFeedHttpMessageConverter.class,
RssChannelHttpMessageConverter.class);
}
@Test
void multipartConverterContainsOtherConverters() {
var converters = HttpMessageConverters.forClient().registerDefaults().build();
var multipartConverter = findMessageConverter(AllEncompassingFormHttpMessageConverter.class, converters);
assertThat(multipartConverter.getPartConverters()).hasExactlyElementsOfTypes(
ByteArrayHttpMessageConverter.class, StringHttpMessageConverter.class,
ResourceHttpMessageConverter.class, KotlinSerializationJsonHttpMessageConverter.class,
JacksonJsonHttpMessageConverter.class, JacksonSmileHttpMessageConverter.class,
KotlinSerializationCborHttpMessageConverter.class, JacksonCborHttpMessageConverter.class,
JacksonYamlHttpMessageConverter.class, JacksonXmlHttpMessageConverter.class,
KotlinSerializationProtobufHttpMessageConverter.class, AtomFeedHttpMessageConverter.class,
RssChannelHttpMessageConverter.class);
}
@Test
void registerCustomMessageConverter() {
var converters = HttpMessageConverters.forClient()
.addCustomConverter(new CustomHttpMessageConverter()).build();
assertThat(converters).hasExactlyElementsOfTypes(CustomHttpMessageConverter.class, AllEncompassingFormHttpMessageConverter.class);
}
@Test
void registerCustomMessageConverterAheadOfDefaults() {
var converters = HttpMessageConverters.forClient().registerDefaults()
.addCustomConverter(new CustomHttpMessageConverter()).build();
assertThat(converters).hasExactlyElementsOfTypes(
CustomHttpMessageConverter.class, ByteArrayHttpMessageConverter.class,
StringHttpMessageConverter.class, ResourceHttpMessageConverter.class,
AllEncompassingFormHttpMessageConverter.class, KotlinSerializationJsonHttpMessageConverter.class,
JacksonJsonHttpMessageConverter.class, JacksonSmileHttpMessageConverter.class,
KotlinSerializationCborHttpMessageConverter.class, JacksonCborHttpMessageConverter.class,
JacksonYamlHttpMessageConverter.class, JacksonXmlHttpMessageConverter.class,
KotlinSerializationProtobufHttpMessageConverter.class, AtomFeedHttpMessageConverter.class,
RssChannelHttpMessageConverter.class);
}
@Test
void registerCustomConverterInMultipartConverter() {
var converters = HttpMessageConverters.forClient().registerDefaults()
.addCustomConverter(new CustomHttpMessageConverter()).build();
var multipartConverter = findMessageConverter(AllEncompassingFormHttpMessageConverter.class, converters);
assertThat(multipartConverter.getPartConverters()).hasAtLeastOneElementOfType(CustomHttpMessageConverter.class);
}
@Test
void shouldNotConfigureOverridesWhenDefaultOff() {
var stringConverter = new StringHttpMessageConverter();
var converters = HttpMessageConverters.forClient().withStringConverter(stringConverter).build();
assertThat(converters).isEmpty();
}
@Test
void shouldUseSpecificConverter() {
var jacksonConverter = new JacksonJsonHttpMessageConverter();
var converters = HttpMessageConverters.forClient().registerDefaults()
.withJsonConverter(jacksonConverter).build();
var customConverter = findMessageConverter(JacksonJsonHttpMessageConverter.class, converters);
assertThat(customConverter).isEqualTo(jacksonConverter);
}
@Test
void shouldOverrideStringConverters() {
var stringConverter = new StringHttpMessageConverter();
var converters = HttpMessageConverters.forClient().registerDefaults()
.withStringConverter(stringConverter).build();
var actualConverter = findMessageConverter(StringHttpMessageConverter.class, converters);
assertThat(actualConverter).isEqualTo(stringConverter);
}
@Test
void shouldConfigureConverter() {
var customConverter = new CustomHttpMessageConverter();
HttpMessageConverters.forClient()
.addCustomConverter(customConverter)
.configureMessageConverters(converter -> {
if (converter instanceof CustomHttpMessageConverter custom) {
custom.processed = true;
}
}).build();
assertThat(customConverter.processed).isTrue();
}
}
@Nested
| ClientConvertersTests |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/websocketx/extensions/WebSocketExtensionDecoder.java | {
"start": 832,
"end": 949
} | class ____ <tt>io.netty.handler.codec.http.websocketx.extensions.WebSocketExtension</tt> decoder.
*/
public abstract | for |
java | quarkusio__quarkus | extensions/smallrye-graphql-client/deployment/src/test/java/io/quarkus/smallrye/graphql/client/deployment/TypesafeGraphQLClientMapTest.java | {
"start": 713,
"end": 1147
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MapClientApi.class, MapApi.class, Foo.class, ComplexToComplexMapWrapper.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"));
@Inject
private MapClientApi client;
@GraphQLApi
public static | TypesafeGraphQLClientMapTest |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/exc/StreamWriteException.java | {
"start": 192,
"end": 1148
} | class ____
extends JacksonException
{
private final static long serialVersionUID = 3L;
public StreamWriteException(JsonGenerator g, Throwable rootCause) {
super(g, rootCause);
}
public StreamWriteException(JsonGenerator g, String msg) {
super(g, msg);
}
public StreamWriteException(JsonGenerator g, String msg, Throwable rootCause) {
super(g, msg, rootCause);
}
/**
* Fluent method that may be used to assign originating {@link JsonGenerator},
* to be accessed using {@link #processor()}.
*
* @param g Generator to assign
*
* @return This exception instance (to allow call chaining)
*/
public StreamWriteException withGenerator(JsonGenerator g) {
_processor = g;
return this;
}
// Overridden for co-variance
@Override
public JsonGenerator processor() {
return (JsonGenerator) _processor;
}
}
| StreamWriteException |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java | {
"start": 74070,
"end": 79004
} | class ____ {
final static String BLOCK_NAME = "TFile.index";
private ByteArray firstKey;
private final ArrayList<TFileIndexEntry> index;
private final ArrayList<Long> recordNumIndex;
private final BytesComparator comparator;
private long sum = 0;
/**
* For reading from file.
*
* @throws IOException
*/
public TFileIndex(int entryCount, DataInput in, BytesComparator comparator)
throws IOException {
index = new ArrayList<TFileIndexEntry>(entryCount);
recordNumIndex = new ArrayList<Long>(entryCount);
int size = Utils.readVInt(in); // size for the first key entry.
if (size > 0) {
byte[] buffer = new byte[size];
in.readFully(buffer);
DataInputStream firstKeyInputStream =
new DataInputStream(new ByteArrayInputStream(buffer, 0, size));
int firstKeyLength = Utils.readVInt(firstKeyInputStream);
firstKey = new ByteArray(new byte[firstKeyLength]);
firstKeyInputStream.readFully(firstKey.buffer());
for (int i = 0; i < entryCount; i++) {
size = Utils.readVInt(in);
if (buffer.length < size) {
buffer = new byte[size];
}
in.readFully(buffer, 0, size);
TFileIndexEntry idx =
new TFileIndexEntry(new DataInputStream(new ByteArrayInputStream(
buffer, 0, size)));
index.add(idx);
sum += idx.entries();
recordNumIndex.add(sum);
}
} else {
if (entryCount != 0) {
throw new RuntimeException("Internal error");
}
}
this.comparator = comparator;
}
/**
* @param key
* input key.
* @return the ID of the first block that contains key >= input key. Or -1
* if no such block exists.
*/
public int lowerBound(RawComparable key) {
if (comparator == null) {
throw new RuntimeException("Cannot search in unsorted TFile");
}
if (firstKey == null) {
return -1; // not found
}
int ret = Utils.lowerBound(index, key, comparator);
if (ret == index.size()) {
return -1;
}
return ret;
}
/**
* @param key
* input key.
* @return the ID of the first block that contains key > input key. Or -1
* if no such block exists.
*/
public int upperBound(RawComparable key) {
if (comparator == null) {
throw new RuntimeException("Cannot search in unsorted TFile");
}
if (firstKey == null) {
return -1; // not found
}
int ret = Utils.upperBound(index, key, comparator);
if (ret == index.size()) {
return -1;
}
return ret;
}
/**
* For writing to file.
*/
public TFileIndex(BytesComparator comparator) {
index = new ArrayList<TFileIndexEntry>();
recordNumIndex = new ArrayList<Long>();
this.comparator = comparator;
}
public RawComparable getFirstKey() {
return firstKey;
}
public Reader.Location getLocationByRecordNum(long recNum) {
int idx = Utils.upperBound(recordNumIndex, recNum);
long lastRecNum = (idx == 0)? 0: recordNumIndex.get(idx-1);
return new Reader.Location(idx, recNum-lastRecNum);
}
public long getRecordNumByLocation(Reader.Location location) {
int blkIndex = location.getBlockIndex();
long lastRecNum = (blkIndex == 0) ? 0: recordNumIndex.get(blkIndex-1);
return lastRecNum + location.getRecordIndex();
}
public void setFirstKey(byte[] key, int offset, int length) {
firstKey = new ByteArray(new byte[length]);
System.arraycopy(key, offset, firstKey.buffer(), 0, length);
}
public RawComparable getLastKey() {
if (index.size() == 0) {
return null;
}
return new ByteArray(index.get(index.size() - 1).buffer());
}
public void addEntry(TFileIndexEntry keyEntry) {
index.add(keyEntry);
sum += keyEntry.entries();
recordNumIndex.add(sum);
}
public TFileIndexEntry getEntry(int bid) {
return index.get(bid);
}
public void write(DataOutput out) throws IOException {
if (firstKey == null) {
Utils.writeVInt(out, 0);
return;
}
DataOutputBuffer dob = new DataOutputBuffer();
Utils.writeVInt(dob, firstKey.size());
dob.write(firstKey.buffer());
Utils.writeVInt(out, dob.size());
out.write(dob.getData(), 0, dob.getLength());
for (TFileIndexEntry entry : index) {
dob.reset();
entry.write(dob);
Utils.writeVInt(out, dob.getLength());
out.write(dob.getData(), 0, dob.getLength());
}
}
}
/**
* TFile Data Index entry. We should try to make the memory footprint of each
* index entry as small as possible.
*/
static final | TFileIndex |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/RouterResult.java | {
"start": 879,
"end": 1748
} | class ____<T> {
private final boolean needContinueRoute;
private final List<T> result;
private final String message;
public RouterResult(List<T> result) {
this.needContinueRoute = true;
this.result = result;
this.message = null;
}
public RouterResult(List<T> result, String message) {
this.needContinueRoute = true;
this.result = result;
this.message = message;
}
public RouterResult(boolean needContinueRoute, List<T> result, String message) {
this.needContinueRoute = needContinueRoute;
this.result = result;
this.message = message;
}
public boolean isNeedContinueRoute() {
return needContinueRoute;
}
public List<T> getResult() {
return result;
}
public String getMessage() {
return message;
}
}
| RouterResult |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/clhm/ConcurrentLinkedHashMap.java | {
"start": 54965,
"end": 55290
} | class ____<T> {
private T object;
ObjectHolder() {
}
public T getObject() {
return object;
}
public void setObject(T object) {
this.object = object;
}
}
/** Adds the node to the page replacement policy. */
private final | ObjectHolder |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/validation/InvalidPostConstructTest.java | {
"start": 1339,
"end": 1486
} | class ____ {
@PostConstruct
public Uni<Void> invalid() {
return Uni.createFrom().nullItem();
}
}
}
| InvalidBean |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/features/ListFeature.java | {
"start": 1964,
"end": 2068
} | interface ____ {
ListFeature[] value() default {};
ListFeature[] absent() default {};
}
}
| Require |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/context/properties/scan/valid/ConfigurationPropertiesScanConfiguration.java | {
"start": 1366,
"end": 1567
} | class ____ {
}
@ConfigurationPropertiesScan(basePackages = "org.springframework.boot.context.properties.scan.valid.a",
basePackageClasses = BScanConfiguration.class)
public static | TestConfiguration |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MinAggFunction.java | {
"start": 5742,
"end": 5977
} | class ____ extends MinAggFunction {
@Override
public DataType getResultType() {
return DataTypes.STRING();
}
}
/** Built-in Date Min aggregate function. */
public static | StringMinAggFunction |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/file/SegmentPartitionFileWriterTest.java | {
"start": 2004,
"end": 5268
} | class ____ {
@TempDir java.nio.file.Path tempFolder;
@Test
void testWrite() throws IOException {
TieredStoragePartitionId partitionId =
TieredStorageIdMappingUtils.convertId(new ResultPartitionID());
int numSubpartitions = 5;
int numSegments = 10;
int numBuffersPerSegment = 10;
int bufferSizeBytes = 3;
Path tieredStorageDir = Path.fromLocalFile(tempFolder.toFile());
SegmentPartitionFileWriter partitionFileWriter =
new SegmentPartitionFileWriter(tieredStorageDir.getPath(), numSubpartitions);
// Prepare the buffers to be written
List<PartitionFileWriter.SubpartitionBufferContext> subpartitionBuffers =
generateBuffersToWrite(
numSubpartitions, numSegments, numBuffersPerSegment, bufferSizeBytes);
// Write the file
partitionFileWriter.write(partitionId, subpartitionBuffers);
partitionFileWriter.release();
// Check the written files
checkWrittenSegmentFiles(
partitionId,
numSubpartitions,
numSegments,
numBuffersPerSegment,
bufferSizeBytes,
tieredStorageDir);
}
private static void checkWrittenSegmentFiles(
TieredStoragePartitionId partitionId,
int numSubpartitions,
int numSegments,
int numBuffersPerSegment,
int bufferSizeBytes,
Path tieredStorageDir)
throws IOException {
FileSystem fs = tieredStorageDir.getFileSystem();
FileStatus[] partitionDirs = fs.listStatus(tieredStorageDir);
assertThat(partitionDirs).hasSize(1);
assertThat(partitionDirs[0].getPath().getName())
.isEqualTo(TieredStorageIdMappingUtils.convertId(partitionId).toString());
FileStatus[] subpartitionDirs = fs.listStatus(partitionDirs[0].getPath());
assertThat(subpartitionDirs).hasSize(numSubpartitions);
int expectedSegmentFileBytes =
(BufferReaderWriterUtil.HEADER_LENGTH + bufferSizeBytes) * numBuffersPerSegment;
for (int i = 0; i < numSubpartitions; i++) {
Path subpartitionDir = subpartitionDirs[i].getPath();
for (int j = 0; j < numSegments; j++) {
Path segmentFile = new Path(subpartitionDir, SEGMENT_FILE_PREFIX + j);
byte[] bytesRead =
Files.readAllBytes(new java.io.File(segmentFile.getPath()).toPath());
// Check the segment file
assertThat(bytesRead).hasSize(expectedSegmentFileBytes);
}
Path segmentFinishDir = new Path(subpartitionDir, SEGMENT_FINISH_DIR_NAME);
assertThat(fs.exists(segmentFinishDir)).isTrue();
// Check the segment-finish file
FileStatus[] segmentFinishFiles = fs.listStatus(segmentFinishDir);
assertThat(segmentFinishFiles).hasSize(1);
FileStatus segmentFinishFile = segmentFinishFiles[0];
assertThat(segmentFinishFile.getPath().getName())
.isEqualTo(String.valueOf(numSegments - 1));
}
}
}
| SegmentPartitionFileWriterTest |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/RestJettyCustomContentTypeTest.java | {
"start": 1178,
"end": 3145
} | class ____ extends BaseJettyTest {
@Test
public void testBlob() {
Exchange out = template.request("http://localhost:" + getPort() + "/users/blob", new Processor() {
@Override
public void process(Exchange exchange) {
}
});
assertEquals("application/foobar", out.getMessage().getHeader(Exchange.CONTENT_TYPE));
assertEquals("Some foobar stuff goes here", out.getMessage().getBody(String.class));
}
@Test
public void testJSon() {
Exchange out = template.request("http://localhost:" + getPort() + "/users/lives", new Processor() {
@Override
public void process(Exchange exchange) {
}
});
assertEquals("application/json", out.getMessage().getHeader(Exchange.CONTENT_TYPE));
assertEquals("{\"iso\":\"EN\",\"country\":\"England\"}", out.getMessage().getBody(String.class));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// enable json binding
restConfiguration().component("jetty").host("localhost").port(getPort()).bindingMode(RestBindingMode.json);
rest("/users/").consumes("application/json").produces("application/json").get("blob").to("direct:blob")
.get("lives").to("direct:lives");
from("direct:blob")
// but send back non json data
.setHeader(Exchange.CONTENT_TYPE, constant("application/foobar")).transform()
.constant("Some foobar stuff goes here");
CountryPojo country = new CountryPojo();
country.setIso("EN");
country.setCountry("England");
from("direct:lives").transform().constant(country);
}
};
}
}
| RestJettyCustomContentTypeTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java | {
"start": 1170,
"end": 1247
} | class ____ implements Iterable<ResponseInfo.Item> {
public static | ResponseInfo |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest66.java | {
"start": 893,
"end": 2047
} | class ____ extends TestCase {
public void test_true() throws Exception {
WallProvider provider = new MySqlWallProvider();
provider.getConfig().setSchemaCheck(true);
assertTrue(provider.checkValid(//
"SELECT LOGFILE_GROUP_NAME, FILE_NAME, TOTAL_EXTENTS, INITIAL_SIZE, ENGINE, EXTRA " +
"FROM INFORMATION_SCHEMA.FILES WHERE FILE_TYPE = 'UNDO LOG' AND FILE_NAME IS NOT NULL " +
"AND LOGFILE_GROUP_NAME IN (" +
" SELECT DISTINCT LOGFILE_GROUP_NAME FROM INFORMATION_SCHEMA.FILES " +
" WHERE FILE_TYPE = 'DATAFILE' " +
" AND TABLESPACE_NAME IN (" +
" SELECT DISTINCT TABLESPACE_NAME FROM INFORMATION_SCHEMA.PARTITIONS " +
" WHERE TABLE_SCHEMA IN ('stat'))" +
") " +
"GROUP BY LOGFILE_GROUP_NAME, FILE_NAME, ENGINE " +
"ORDER BY LOGFILE_GROUP_NAME"));
assertEquals(2, provider.getTableStats().size());
}
}
| MySqlWallTest66 |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/AwsElasticBlockStoreVolumeConfig.java | {
"start": 121,
"end": 518
} | interface ____ {
/**
* The name of the disk to mount.
*/
String volumeId();
/**
* The partition.
*/
OptionalInt partition();
/**
* Filesystem type.
*/
@WithDefault("ext4")
String fsType();
/**
* Whether the volumeName is read only or not.
*/
@WithDefault("false")
boolean readOnly();
}
| AwsElasticBlockStoreVolumeConfig |
java | elastic__elasticsearch | libs/gpu-codec/src/main/java/org/elasticsearch/gpu/codec/DatasetUtilsImpl.java | {
"start": 749,
"end": 5624
} | class ____ implements DatasetUtils {
private static final DatasetUtils INSTANCE = new DatasetUtilsImpl();
private static final MethodHandle createDataset$mh = CuVSProvider.provider().newNativeMatrixBuilder();
private static final MethodHandle createDatasetWithStrides$mh = CuVSProvider.provider().newNativeMatrixBuilderWithStrides();
static DatasetUtils getInstance() {
return INSTANCE;
}
static CuVSMatrix fromMemorySegment(MemorySegment memorySegment, int size, int dimensions, CuVSMatrix.DataType dataType) {
try {
return (CuVSMatrix) createDataset$mh.invokeExact(memorySegment, size, dimensions, dataType);
} catch (Throwable e) {
if (e instanceof Error err) {
throw err;
} else if (e instanceof RuntimeException re) {
throw re;
} else {
throw new RuntimeException(e);
}
}
}
static CuVSMatrix fromMemorySegment(
MemorySegment memorySegment,
int size,
int dimensions,
int rowStride,
int columnStride,
CuVSMatrix.DataType dataType
) {
try {
return (CuVSMatrix) createDatasetWithStrides$mh.invokeExact(memorySegment, size, dimensions, rowStride, columnStride, dataType);
} catch (Throwable e) {
if (e instanceof Error err) {
throw err;
} else if (e instanceof RuntimeException re) {
throw re;
} else {
throw new RuntimeException(e);
}
}
}
private DatasetUtilsImpl() {}
@Override
public CuVSMatrix fromInput(MemorySegmentAccessInput input, int numVectors, int dims, CuVSMatrix.DataType dataType) throws IOException {
if (numVectors < 0 || dims < 0) {
throwIllegalArgumentException(numVectors, dims);
}
return createCuVSMatrix(input, 0L, input.length(), numVectors, dims, dataType);
}
@Override
public CuVSMatrix fromInput(
MemorySegmentAccessInput input,
int numVectors,
int dims,
int rowStride,
int columnStride,
CuVSMatrix.DataType dataType
) throws IOException {
if (numVectors < 0 || dims < 0) {
throwIllegalArgumentException(numVectors, dims);
}
return createCuVSMatrix(input, 0L, input.length(), numVectors, dims, rowStride, columnStride, dataType);
}
@Override
public CuVSMatrix fromSlice(MemorySegmentAccessInput input, long pos, long len, int numVectors, int dims, CuVSMatrix.DataType dataType)
throws IOException {
if (pos < 0 || len < 0) {
throw new IllegalArgumentException("pos and len must be positive");
}
return createCuVSMatrix(input, pos, len, numVectors, dims, dataType);
}
private static CuVSMatrix createCuVSMatrix(
MemorySegmentAccessInput input,
long pos,
long len,
int numVectors,
int dims,
CuVSMatrix.DataType dataType
) throws IOException {
MemorySegment ms = input.segmentSliceOrNull(pos, len);
assert ms != null; // TODO: this can be null if larger than 16GB or ...
final int byteSize = dataType == CuVSMatrix.DataType.FLOAT ? Float.BYTES : Byte.BYTES;
if (((long) numVectors * dims * byteSize) > ms.byteSize()) {
throwIllegalArgumentException(ms, numVectors, dims);
}
return fromMemorySegment(ms, numVectors, dims, dataType);
}
private static CuVSMatrix createCuVSMatrix(
MemorySegmentAccessInput input,
long pos,
long len,
int numVectors,
int dims,
int rowStride,
int columnStride,
CuVSMatrix.DataType dataType
) throws IOException {
MemorySegment ms = input.segmentSliceOrNull(pos, len);
assert ms != null;
final int byteSize = dataType == CuVSMatrix.DataType.FLOAT ? Float.BYTES : Byte.BYTES;
if (((long) numVectors * rowStride * byteSize) > ms.byteSize()) {
throwIllegalArgumentException(ms, numVectors, dims);
}
return fromMemorySegment(ms, numVectors, dims, rowStride, columnStride, dataType);
}
static void throwIllegalArgumentException(MemorySegment ms, int numVectors, int dims) {
var s = "segment of size [" + ms.byteSize() + "] too small for expected " + numVectors + " float vectors of " + dims + " dims";
throw new IllegalArgumentException(s);
}
static void throwIllegalArgumentException(int numVectors, int dims) {
String s;
if (numVectors < 0) {
s = "negative number of vectors: " + numVectors;
} else {
s = "negative vector dims: " + dims;
}
throw new IllegalArgumentException(s);
}
}
| DatasetUtilsImpl |
java | elastic__elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/XorTests.java | {
"start": 560,
"end": 15698
} | class ____ extends ScriptTestCase {
public void testBasics() throws Exception {
assertEquals(9 ^ 3, exec("return 9 ^ 3;"));
assertEquals(9L ^ 3, exec("return 9L ^ 3;"));
assertEquals(9 ^ 3L, exec("return 9 ^ 3L;"));
assertEquals(10, exec("short x = 9; char y = 3; return x ^ y;"));
}
public void testInt() throws Exception {
assertEquals(5 ^ 12, exec("int x = 5; int y = 12; return x ^ y;"));
assertEquals(5 ^ -12, exec("int x = 5; int y = -12; return x ^ y;"));
assertEquals(7 ^ 15 ^ 3, exec("int x = 7; int y = 15; int z = 3; return x ^ y ^ z;"));
}
public void testIntConst() throws Exception {
assertEquals(5 ^ 12, exec("return 5 ^ 12;"));
assertEquals(5 ^ -12, exec("return 5 ^ -12;"));
assertEquals(7 ^ 15 ^ 3, exec("return 7 ^ 15 ^ 3;"));
}
public void testLong() throws Exception {
assertEquals(5L ^ 12L, exec("long x = 5; long y = 12; return x ^ y;"));
assertEquals(5L ^ -12L, exec("long x = 5; long y = -12; return x ^ y;"));
assertEquals(7L ^ 15L ^ 3L, exec("long x = 7; long y = 15; long z = 3; return x ^ y ^ z;"));
}
public void testLongConst() throws Exception {
assertEquals(5L ^ 12L, exec("return 5L ^ 12L;"));
assertEquals(5L ^ -12L, exec("return 5L ^ -12L;"));
assertEquals(7L ^ 15L ^ 3L, exec("return 7L ^ 15L ^ 3L;"));
}
public void testBool() throws Exception {
assertEquals(false, exec("boolean x = true; boolean y = true; return x ^ y;"));
assertEquals(true, exec("boolean x = true; boolean y = false; return x ^ y;"));
assertEquals(true, exec("boolean x = false; boolean y = true; return x ^ y;"));
assertEquals(false, exec("boolean x = false; boolean y = false; return x ^ y;"));
}
public void testBoolConst() throws Exception {
assertEquals(false, exec("return true ^ true;"));
assertEquals(true, exec("return true ^ false;"));
assertEquals(true, exec("return false ^ true;"));
assertEquals(false, exec("return false ^ false;"));
}
public void testIllegal() throws Exception {
expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; int y = 1; return x ^ y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; int y = 1; return x ^ y"); });
}
public void testDef() {
expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; def y = (byte)1; return x ^ y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; def y = (byte)1; return x ^ y"); });
assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (byte)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (short)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (char)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (byte)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (short)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (char)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (int)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y"));
assertEquals(false, exec("def x = true; def y = true; return x ^ y"));
assertEquals(true, exec("def x = true; def y = false; return x ^ y"));
assertEquals(true, exec("def x = false; def y = true; return x ^ y"));
assertEquals(false, exec("def x = false; def y = false; return x ^ y"));
}
public void testDefTypedLHS() {
expectScriptThrows(ClassCastException.class, () -> { exec("float x = (float)4; def y = (byte)1; return x ^ y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("double x = (double)4; def y = (byte)1; return x ^ y"); });
assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (byte)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (short)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (char)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (byte)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (short)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (char)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (int)4; def y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; def y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; def y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; def y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; def y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; def y = (long)1; return x ^ y"));
assertEquals(false, exec("def x = true; def y = true; return x ^ y"));
assertEquals(true, exec("def x = true; def y = false; return x ^ y"));
assertEquals(true, exec("def x = false; def y = true; return x ^ y"));
assertEquals(false, exec("def x = false; def y = false; return x ^ y"));
}
public void testDefTypedRHS() {
expectScriptThrows(ClassCastException.class, () -> { exec("def x = (float)4; byte y = (byte)1; return x ^ y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("def x = (double)4; byte y = (byte)1; return x ^ y"); });
assertEquals(5, exec("def x = (byte)4; byte y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; byte y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; byte y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; byte y = (byte)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; byte y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; short y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; short y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; short y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; short y = (short)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; short y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; char y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; char y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; char y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; char y = (char)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; char y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; int y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; int y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; int y = (int)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; int y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; int y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (byte)4; long y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (short)4; long y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (char)4; long y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (int)4; long y = (long)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; long y = (long)1; return x ^ y"));
assertEquals(5, exec("def x = (byte)4; byte y = (byte)1; return x ^ y"));
assertEquals(5, exec("def x = (short)4; short y = (short)1; return x ^ y"));
assertEquals(5, exec("def x = (char)4; char y = (char)1; return x ^ y"));
assertEquals(5, exec("def x = (int)4; int y = (int)1; return x ^ y"));
assertEquals(5L, exec("def x = (long)4; long y = (long)1; return x ^ y"));
assertEquals(false, exec("def x = true; boolean y = true; return x ^ y"));
assertEquals(true, exec("def x = true; boolean y = false; return x ^ y"));
assertEquals(true, exec("def x = false; boolean y = true; return x ^ y"));
assertEquals(false, exec("def x = false; boolean y = false; return x ^ y"));
}
public void testCompoundAssignment() {
// boolean
assertEquals(false, exec("boolean x = true; x ^= true; return x;"));
assertEquals(true, exec("boolean x = true; x ^= false; return x;"));
assertEquals(true, exec("boolean x = false; x ^= true; return x;"));
assertEquals(false, exec("boolean x = false; x ^= false; return x;"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= true; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = true; x[0] ^= false; return x[0];"));
assertEquals(true, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= true; return x[0];"));
assertEquals(false, exec("boolean[] x = new boolean[1]; x[0] = false; x[0] ^= false; return x[0];"));
// byte
assertEquals((byte) (13 ^ 14), exec("byte x = 13; x ^= 14; return x;"));
// short
assertEquals((short) (13 ^ 14), exec("short x = 13; x ^= 14; return x;"));
// char
assertEquals((char) (13 ^ 14), exec("char x = 13; x ^= 14; return x;"));
// int
assertEquals(13 ^ 14, exec("int x = 13; x ^= 14; return x;"));
// long
assertEquals((long) (13 ^ 14), exec("long x = 13L; x ^= 14; return x;"));
}
public void testBogusCompoundAssignment() {
expectScriptThrows(ClassCastException.class, () -> { exec("float x = 4; int y = 1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("double x = 4; int y = 1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; float y = 1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; double y = 1; x ^= y"); });
}
public void testCompoundAssignmentDef() {
// boolean
assertEquals(false, exec("def x = true; x ^= true; return x;"));
assertEquals(true, exec("def x = true; x ^= false; return x;"));
assertEquals(true, exec("def x = false; x ^= true; return x;"));
assertEquals(false, exec("def x = false; x ^= false; return x;"));
assertEquals(false, exec("def[] x = new def[1]; x[0] = true; x[0] ^= true; return x[0];"));
assertEquals(true, exec("def[] x = new def[1]; x[0] = true; x[0] ^= false; return x[0];"));
assertEquals(true, exec("def[] x = new def[1]; x[0] = false; x[0] ^= true; return x[0];"));
assertEquals(false, exec("def[] x = new def[1]; x[0] = false; x[0] ^= false; return x[0];"));
// byte
assertEquals((byte) (13 ^ 14), exec("def x = (byte)13; x ^= 14; return x;"));
// short
assertEquals((short) (13 ^ 14), exec("def x = (short)13; x ^= 14; return x;"));
// char
assertEquals((char) (13 ^ 14), exec("def x = (char)13; x ^= 14; return x;"));
// int
assertEquals(13 ^ 14, exec("def x = 13; x ^= 14; return x;"));
// long
assertEquals((long) (13 ^ 14), exec("def x = 13L; x ^= 14; return x;"));
}
public void testDefBogusCompoundAssignment() {
expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4F; int y = 1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("def x = 4D; int y = 1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = (float)1; x ^= y"); });
expectScriptThrows(ClassCastException.class, () -> { exec("int x = 4; def y = (double)1; x ^= y"); });
}
}
| XorTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java | {
"start": 2960,
"end": 3215
} | class ____ extends Plugin implements RobRepositoryPlugin {}
assertThat(pluginIntrospector.interfaces(FooRepositoryPlugin.class), contains("RepositoryPlugin"));
}
public void testInterfacesPluginExtends() {
abstract | FooRepositoryPlugin |
java | google__dagger | javatests/dagger/internal/codegen/ConflictingEntryPointsTest.java | {
"start": 7388,
"end": 7971
} | interface ____ {",
" @BindsInstance Builder foo(Object foo);",
" TestComponent build();",
" }",
"}");
CompilerTests.daggerCompiler(base1, base2, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(subject -> subject.hasErrorCount(0));
}
@Test
public void sameQualifiedKey() {
Source base1 =
CompilerTests.javaSource(
"test.Base1", //
"package test;",
"",
"import javax.inject.Named;",
"",
" | Builder |
java | google__guava | android/guava/src/com/google/common/util/concurrent/ListenableScheduledFuture.java | {
"start": 794,
"end": 944
} | interface ____ implement both {@link ListenableFuture} and {@link ScheduledFuture}.
*
* @author Anthony Zana
* @since 15.0
*/
@GwtCompatible
public | to |
java | google__dagger | javatests/dagger/functional/tck/EngineModule.java | {
"start": 1056,
"end": 1197
} | class ____ we prefer to leave unmodified.
V8Engine engine = new V8Engine();
injector.injectMembers(engine);
return engine;
}
}
| that |
java | apache__camel | components/camel-bean/src/test/java/org/apache/camel/component/bean/BeanInfoTest.java | {
"start": 4750,
"end": 4925
} | class ____ {
@Handler
public void myMethod() {
}
public String myOtherMethod() {
return "";
}
}
public static | MyClass |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/SpatialCentroidTests.java | {
"start": 4319,
"end": 5721
} | class ____ extends BaseMatcher<BytesRef> {
private final double x;
private final double y;
private final Matcher<Double> mx;
private final Matcher<Double> my;
private TestCentroidMatcher(double x, double y, double error) {
this.x = x;
this.y = y;
this.mx = closeTo(x, error);
this.my = closeTo(y, error);
}
@Override
public boolean matches(Object item) {
if (item instanceof BytesRef wkb) {
var point = (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length);
return mx.matches(point.getX()) && my.matches(point.getY());
}
return false;
}
@Override
public void describeMismatch(Object item, Description description) {
if (item instanceof BytesRef wkb) {
var point = (Point) WellKnownBinary.fromWKB(GeometryValidator.NOOP, false, wkb.bytes, wkb.offset, wkb.length);
description.appendText("was ").appendValue(point);
} else {
description.appendText("was ").appendValue(item);
}
}
@Override
public void describeTo(Description description) {
description.appendValue(" POINT (" + x + " " + y + ")");
}
}
}
| TestCentroidMatcher |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/ExpressionAdapterTest.java | {
"start": 1254,
"end": 2385
} | class ____ extends ExpressionAdapter {
@Override
protected String assertionFailureMessage(Exchange exchange) {
return "foo";
}
@Override
@SuppressWarnings("unchecked")
public <T> T evaluate(Exchange exchange, Class<T> type) {
String in = exchange.getIn().getBody(String.class);
if ("Kaboom".equals(in)) {
return null;
}
return (T) in;
}
}
@Test
public void testExpressionAdapter() {
MyExpression my = new MyExpression();
Exchange e = new DefaultExchange(context);
e.getIn().setBody("bar");
my.assertMatches("bar", e);
}
@Test
public void testExpressionAdapterFail() {
MyExpression my = new MyExpression();
Exchange e = new DefaultExchange(context);
e.getIn().setBody("Kaboom");
AssertionError ae = assertThrows(AssertionError.class,
() -> my.assertMatches("damn", e),
"Should have thrown exception");
assertTrue(ae.getMessage().contains("foo"));
}
}
| MyExpression |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsLogging.java | {
"start": 1789,
"end": 9809
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(IOStatisticsLogging.class);
private IOStatisticsLogging() {
}
/**
* Extract the statistics from a source object -or ""
* if it is not an instance of {@link IOStatistics},
* {@link IOStatisticsSource} or the retrieved
* statistics are null.
* <p>
* Exceptions are caught and downgraded to debug logging.
* @param source source of statistics.
* @return a string for logging.
*/
public static String ioStatisticsSourceToString(@Nullable Object source) {
try {
return ioStatisticsToString(retrieveIOStatistics(source));
} catch (RuntimeException e) {
LOG.debug("Ignoring", e);
return "";
}
}
/**
* Convert IOStatistics to a string form.
* @param statistics A statistics instance.
* @return string value or the empty string if null
*/
public static String ioStatisticsToString(
@Nullable final IOStatistics statistics) {
if (statistics != null) {
StringBuilder sb = new StringBuilder();
mapToString(sb, "counters", statistics.counters(), " ");
mapToString(sb, "gauges", statistics.gauges(), " ");
mapToString(sb, "minimums", statistics.minimums(), " ");
mapToString(sb, "maximums", statistics.maximums(), " ");
mapToString(sb, "means", statistics.meanStatistics(), " ");
return sb.toString();
} else {
return "";
}
}
/**
* Convert IOStatistics to a string form, with all the metrics sorted
* and empty value stripped.
* This is more expensive than the simple conversion, so should only
* be used for logging/output where it's known/highly likely that the
* caller wants to see the values. Not for debug logging.
* @param statistics A statistics instance.
* @return string value or the empty string if null
*/
public static String ioStatisticsToPrettyString(
@Nullable final IOStatistics statistics) {
if (statistics != null) {
StringBuilder sb = new StringBuilder();
mapToSortedString(sb, "counters", statistics.counters(),
p -> p == 0);
mapToSortedString(sb, "\ngauges", statistics.gauges(),
p -> p == 0);
mapToSortedString(sb, "\nminimums", statistics.minimums(),
p -> p < 0);
mapToSortedString(sb, "\nmaximums", statistics.maximums(),
p -> p < 0);
mapToSortedString(sb, "\nmeans", statistics.meanStatistics(),
MeanStatistic::isEmpty);
return sb.toString();
} else {
return "";
}
}
/**
* Given a map, add its entryset to the string.
* The entries are only sorted if the source entryset
* iterator is sorted, such as from a TreeMap.
* @param sb string buffer to append to
* @param type type (for output)
* @param map map to evaluate
* @param separator separator
* @param <E> type of values of the map
*/
private static <E> void mapToString(StringBuilder sb,
final String type,
final Map<String, E> map,
final String separator) {
int count = 0;
sb.append(type);
sb.append("=(");
for (Map.Entry<String, E> entry : map.entrySet()) {
if (count > 0) {
sb.append(separator);
}
count++;
sb.append(IOStatisticsBinding.entryToString(
entry.getKey(), entry.getValue()));
}
sb.append(");\n");
}
/**
* Given a map, produce a string with all the values, sorted.
* Needs to create a treemap and insert all the entries.
* @param sb string buffer to append to
* @param type type (for output)
* @param map map to evaluate
* @param <E> type of values of the map
*/
private static <E> void mapToSortedString(StringBuilder sb,
final String type,
final Map<String, E> map,
final Predicate<E> isEmpty) {
mapToString(sb, type, sortedMap(map, isEmpty), "\n");
}
/**
* Create a sorted (tree) map from an unsorted map.
* This incurs the cost of creating a map and that
* of inserting every object into the tree.
* @param source source map
* @param <E> value type
* @return a treemap with all the entries.
*/
private static <E> Map<String, E> sortedMap(
final Map<String, E> source,
final Predicate<E> isEmpty) {
Map<String, E> tm = new TreeMap<>();
for (Map.Entry<String, E> entry : source.entrySet()) {
if (!isEmpty.test(entry.getValue())) {
tm.put(entry.getKey(), entry.getValue());
}
}
return tm;
}
/**
* On demand stringifier of an IOStatisticsSource instance.
* <p>
* Whenever this object's toString() method is called, it evaluates the
* statistics.
* <p>
* This is designed to affordable to use in log statements.
* @param source source of statistics -may be null.
* @return an object whose toString() operation returns the current values.
*/
public static Object demandStringifyIOStatisticsSource(
@Nullable IOStatisticsSource source) {
return new SourceToString(source);
}
/**
* On demand stringifier of an IOStatistics instance.
* <p>
* Whenever this object's toString() method is called, it evaluates the
* statistics.
* <p>
* This is for use in log statements where for the cost of creation
* of this entry is low; it is affordable to use in log statements.
* @param statistics statistics to stringify -may be null.
* @return an object whose toString() operation returns the current values.
*/
public static Object demandStringifyIOStatistics(
@Nullable IOStatistics statistics) {
return new StatisticsToString(statistics);
}
/**
* Extract any statistics from the source and log at debug, if
* the log is set to log at debug.
* No-op if logging is not at debug or the source is null/of
* the wrong type/doesn't provide statistics.
* @param log log to log to
* @param message message for log -this must contain "{}" for the
* statistics report to actually get logged.
* @param source source object
*/
public static void logIOStatisticsAtDebug(
Logger log,
String message,
Object source) {
if (log.isDebugEnabled()) {
// robust extract and convert to string
String stats = ioStatisticsSourceToString(source);
if (!stats.isEmpty()) {
log.debug(message, stats);
}
}
}
/**
* Extract any statistics from the source and log to
* this class's log at debug, if
* the log is set to log at debug.
* No-op if logging is not at debug or the source is null/of
* the wrong type/doesn't provide statistics.
* @param message message for log -this must contain "{}" for the
* statistics report to actually get logged.
* @param source source object
*/
public static void logIOStatisticsAtDebug(
String message,
Object source) {
logIOStatisticsAtDebug(LOG, message, source);
}
/**
* A method to log IOStatistics from a source at different levels.
*
* @param log Logger for logging.
* @param level LOG level.
* @param source Source to LOG.
*/
public static void logIOStatisticsAtLevel(Logger log, String level,
Object source) {
IOStatistics stats = retrieveIOStatistics(source);
if (stats != null) {
switch (level.toLowerCase(Locale.US)) {
case IOSTATISTICS_LOGGING_LEVEL_INFO:
LOG.info("IOStatistics: {}", ioStatisticsToPrettyString(stats));
break;
case IOSTATISTICS_LOGGING_LEVEL_ERROR:
LOG.error("IOStatistics: {}", ioStatisticsToPrettyString(stats));
break;
case IOSTATISTICS_LOGGING_LEVEL_WARN:
LOG.warn("IOStatistics: {}", ioStatisticsToPrettyString(stats));
break;
default:
logIOStatisticsAtDebug(log, "IOStatistics: {}", source);
}
}
}
/**
* On demand stringifier.
* <p>
* Whenever this object's toString() method is called, it
* retrieves the latest statistics instance and re-evaluates it.
*/
private static final | IOStatisticsLogging |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/embeddables/generics/GenericEmbeddedIdentifierMappedSuperclassTest.java | {
"start": 11960,
"end": 12409
} | class ____ extends AccessReport<Group, GroupReport> {
@Override
@EmbeddedId
@AssociationOverrides( @AssociationOverride( name = "entity", joinColumns = @JoinColumn( name = "group_id" ) ) )
public EmbeddableKey<Group, GroupReport> getId() {
return super.getId();
}
@Override
public void setId(EmbeddableKey<Group, GroupReport> key) {
super.setId( key );
}
}
@Entity( name = "GroupAccessReport" )
public static | GroupAccessReport |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_hasSecond_Test.java | {
"start": 1054,
"end": 1936
} | class ____ {
@Test
void should_pass_if_actual_is_in_given_second() {
// GIVEN
LocalTime actual = LocalTime.of(23, 59, 59);
// WHEN/THEN
then(actual).hasSecond(59);
}
@Test
void should_fail_if_actual_is_not_in_given_second() {
// GIVEN
LocalTime actual = LocalTime.of(23, 59, 59);
int expectedSecond = 58;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasSecond(expectedSecond));
// THEN
then(assertionError).hasMessage(shouldHaveDateField(actual, "second", expectedSecond).create());
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
LocalTime actual = null;
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasHour(LocalTime.now().getHour()));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
}
| LocalTimeAssert_hasSecond_Test |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/blob/BlobCachePutTest.java | {
"start": 3324,
"end": 3661
} | class ____ {
@TempDir private java.nio.file.Path tempDir;
private final Random rnd = new Random();
// --- concurrency tests for utility methods which could fail during the put operation ---
/** Checked thread that calls {@link TransientBlobCache#getStorageLocation(JobID, BlobKey)}. */
private static | BlobCachePutTest |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Invoker.java | {
"start": 3110,
"end": 19768
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(Invoker.class);
/**
* Retry policy to use.
*/
private final RetryPolicy retryPolicy;
/**
* Default retry handler.
*/
private final Retried retryCallback;
/**
* Instantiate.
* @param retryPolicy retry policy for all operations.
* @param retryCallback standard retry policy
*/
public Invoker(
RetryPolicy retryPolicy,
Retried retryCallback) {
this.retryPolicy = retryPolicy;
this.retryCallback = retryCallback;
}
public RetryPolicy getRetryPolicy() {
return retryPolicy;
}
public Retried getRetryCallback() {
return retryCallback;
}
/**
* Execute a function, translating any exception into an IOException.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param operation operation to execute
* @param <T> type of return value
* @return the result of the function call
* @throws IOException any IOE raised, or translated exception
*/
@Retries.OnceTranslated
public static <T> T once(String action, String path,
CallableRaisingIOE<T> operation)
throws IOException {
try (DurationInfo ignored = new DurationInfo(LOG, false, "%s", action)) {
return operation.apply();
} catch (SdkException e) {
throw S3AUtils.translateException(action, path, e);
}
}
/**
* Execute a function, translating any exception into an IOException.
* The supplied duration tracker instance is updated with success/failure.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param tracker tracker to update
* @param operation operation to execute
* @param <T> type of return value
* @return the result of the function call
* @throws IOException any IOE raised, or translated exception
*/
@Retries.OnceTranslated
public static <T> T onceTrackingDuration(
final String action,
final String path,
final DurationTracker tracker,
final CallableRaisingIOE<T> operation)
throws IOException {
try {
return invokeTrackingDuration(tracker, operation);
} catch (SdkException e) {
throw S3AUtils.translateException(action, path, e);
}
}
/**
* Execute an operation with no result.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param operation operation to execute
* @throws IOException any IOE raised, or translated exception
*/
@Retries.OnceTranslated
public static void once(String action, String path,
InvocationRaisingIOE operation) throws IOException {
once(action, path,
() -> {
operation.apply();
return null;
});
}
/**
*
* Wait for a future, translating SdkException into an IOException.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param future future to await for
* @param <T> type of return value
* @return the result of the function call
* @throws IOException any IOE raised, or translated exception
* @throws RuntimeException any other runtime exception
*/
@Retries.OnceTranslated
public static <T> T onceInTheFuture(String action,
String path,
final Future<T> future)
throws IOException {
try (DurationInfo ignored = new DurationInfo(LOG, false, "%s", action)) {
return FutureIO.awaitFuture(future);
} catch (SdkException e) {
throw S3AUtils.translateException(action, path, e);
}
}
/**
* Execute an operation and ignore all raised IOExceptions; log at INFO;
* full stack only at DEBUG.
* @param log log to use.
* @param action action to include in log
* @param path optional path to include in log
* @param operation operation to execute
* @param <T> type of operation
*/
public static <T> void ignoreIOExceptions(
Logger log,
String action,
String path,
CallableRaisingIOE<T> operation) {
try {
once(action, path, operation);
} catch (IOException e) {
String description = toDescription(action, path);
String error = e.toString();
log.info("{}: {}", description, error);
log.debug("{}", description, e);
}
}
/**
* Execute an operation and ignore all raised IOExceptions; log at INFO;
* full stack only at DEBUG.
* @param log log to use.
* @param action action to include in log
* @param path optional path to include in log
* @param operation operation to execute
*/
public static void ignoreIOExceptions(
Logger log,
String action,
String path,
InvocationRaisingIOE operation) {
ignoreIOExceptions(log, action, path,
() -> {
operation.apply();
return null;
});
}
/**
* Execute a void operation with retry processing.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param retrying callback on retries
* @param operation operation to execute
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public void retry(String action,
String path,
boolean idempotent,
Retried retrying,
InvocationRaisingIOE operation)
throws IOException {
retry(action, path, idempotent, retrying,
() -> {
operation.apply();
return null;
});
}
/**
* Execute a void operation with retry processing when doRetry=true, else
* just once.
* @param doRetry true if retries should be performed
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param retrying callback on retries
* @param operation operation to execute
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public void maybeRetry(boolean doRetry,
String action,
String path,
boolean idempotent,
Retried retrying,
InvocationRaisingIOE operation)
throws IOException {
maybeRetry(doRetry, action, path, idempotent, retrying,
() -> {
operation.apply();
return null;
});
}
/**
* Execute a void operation with the default retry callback invoked.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param operation operation to execute
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public void retry(String action,
String path,
boolean idempotent,
InvocationRaisingIOE operation)
throws IOException {
retry(action, path, idempotent, retryCallback, operation);
}
/**
* Execute a void operation with the default retry callback invoked when
* doRetry=true, else just once.
* @param doRetry true if retries should be performed
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param operation operation to execute
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public void maybeRetry(
boolean doRetry,
String action,
String path,
boolean idempotent,
InvocationRaisingIOE operation)
throws IOException {
maybeRetry(doRetry, action, path, idempotent, retryCallback, operation);
}
/**
* Execute a function with the default retry callback invoked.
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param operation operation to execute
* @param <T> type of return value
* @return the result of the call
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public <T> T retry(String action,
@Nullable String path,
boolean idempotent,
CallableRaisingIOE<T> operation)
throws IOException {
return retry(action, path, idempotent, retryCallback, operation);
}
/**
* Execute a function with retry processing.
* Uses {@link #once(String, String, CallableRaisingIOE)} as the inner
* invocation mechanism before retry logic is performed.
* @param <T> type of return value
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param retrying callback on retries
* @param operation operation to execute
* @return the result of the call
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public <T> T retry(
String action,
@Nullable String path,
boolean idempotent,
Retried retrying,
CallableRaisingIOE<T> operation)
throws IOException {
return retryUntranslated(
toDescription(action, path),
idempotent,
retrying,
() -> once(action, path, operation));
}
/**
* Execute a function with retry processing when doRetry=true, else just once.
* Uses {@link #once(String, String, CallableRaisingIOE)} as the inner
* invocation mechanism before retry logic is performed.
* @param <T> type of return value
* @param doRetry true if retries should be performed
* @param action action to execute (used in error messages)
* @param path path of work (used in error messages)
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param retrying callback on retries
* @param operation operation to execute
* @return the result of the call
* @throws IOException any IOE raised, or translated exception
*/
@Retries.RetryTranslated
public <T> T maybeRetry(
boolean doRetry,
String action,
@Nullable String path,
boolean idempotent,
Retried retrying,
CallableRaisingIOE<T> operation)
throws IOException {
if (doRetry) {
return retryUntranslated(
toDescription(action, path),
idempotent,
retrying,
() -> once(action, path, operation));
} else {
return once(action, path, operation);
}
}
/**
* Execute a function with retry processing and no translation.
* and the default retry callback.
* @param text description for the catching callback
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param operation operation to execute
* @param <T> type of return value
* @return the result of the call
* @throws IOException any IOE raised
* @throws RuntimeException any Runtime exception raised
*/
@Retries.RetryRaw
public <T> T retryUntranslated(
String text,
boolean idempotent,
CallableRaisingIOE<T> operation) throws IOException {
return retryUntranslated(text, idempotent,
retryCallback, operation);
}
/**
* Execute a function with retry processing: AWS SDK Exceptions
* are <i>not</i> translated.
* This is method which the others eventually invoke.
* @param <T> type of return value
* @param text text to include in messages
* @param idempotent does the operation have semantics
* which mean that it can be retried even if was already executed?
* @param retrying callback on retries
* @param operation operation to execute
* @return the result of the call
* @throws IOException any IOE raised
* @throws SdkException any AWS exception raised
* @throws RuntimeException : these are never caught and retries.
*/
@Retries.RetryRaw
public <T> T retryUntranslated(
String text,
boolean idempotent,
Retried retrying,
CallableRaisingIOE<T> operation) throws IOException {
Preconditions.checkArgument(retrying != null, "null retrying argument");
int retryCount = 0;
Exception caught;
RetryPolicy.RetryAction retryAction;
boolean shouldRetry;
do {
try {
if (retryCount > 0) {
LOG.debug("{} retry #{}", text, retryCount);
}
// execute the operation, returning if successful
return operation.apply();
} catch (IOException | SdkException e) {
caught = e;
}
// you only get here if the operation didn't complete
// normally, hence caught != null
LOG.debug("{} ; {}, ", text, caught.toString());
LOG.trace("", caught);
// translate the exception into an IOE for the retry logic
IOException translated;
if (caught instanceof IOException) {
translated = (IOException) caught;
} else {
translated = S3AUtils.translateException(text, "/",
(SdkException) caught);
}
try {
// decide action base on operation, invocation count, etc
retryAction = retryPolicy.shouldRetry(translated, retryCount, 0,
idempotent);
// is it a retry operation?
shouldRetry = retryAction.action.equals(
RetryPolicy.RetryAction.RETRY.action);
if (shouldRetry) {
// notify the callback
retrying.onFailure(text, translated, retryCount, idempotent);
// then sleep for the policy delay
Thread.sleep(retryAction.delayMillis);
}
// increment the retry count
retryCount++;
} catch (InterruptedException e) {
// sleep was interrupted
// change the exception
caught = new InterruptedIOException("Interrupted");
caught.initCause(e);
// no retry
shouldRetry = false;
// and re-interrupt the thread
Thread.currentThread().interrupt();
} catch (Exception e) {
// The retry policy raised an exception
// log that something happened
LOG.warn("{}: exception in retry processing", text, e);
// and fail the execution with the last execution exception.
shouldRetry = false;
}
} while (shouldRetry);
if (caught instanceof IOException) {
throw (IOException) caught;
} else {
throw (SdkException) caught;
}
}
/**
* Execute an operation; any exception raised is simply caught and
* logged at debug.
* @param action action to execute
* @param path path (for exception construction)
* @param operation operation
*/
public static void quietly(String action,
String path,
InvocationRaisingIOE operation) {
try {
once(action, path, operation);
} catch (Exception e) {
LOG.debug("Action {} failed", action, e);
}
}
/**
* Execute an operation; any exception raised is caught and
* logged at debug.
* The result is only non-empty if the operation succeeded
* @param <T> type to return
* @param action action to execute
* @param path path (for exception construction)
* @param operation operation
* @return the result of a successful operation
*/
public static <T> Optional<T> quietlyEval(String action,
String path,
CallableRaisingIOE<T> operation) {
try {
return Optional.of(once(action, path, operation));
} catch (Exception e) {
LOG.debug("Action {} failed", action, e);
return Optional.empty();
}
}
/**
* Take an action and path and produce a string for logging.
* @param action action
* @param path path (may be null or empty)
* @return string for logs
*/
private static String toDescription(String action, @Nullable String path) {
return action +
(StringUtils.isNotEmpty(path) ? (" on " + path) : "");
}
/**
* Callback for retry and notification operations.
* Even if the | Invoker |
java | apache__camel | components/camel-flink/src/main/java/org/apache/camel/component/flink/DataStreamFlinkProducer.java | {
"start": 1456,
"end": 7810
} | class ____ extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(DataStreamFlinkProducer.class);
private volatile boolean environmentConfigured = false;
public DataStreamFlinkProducer(FlinkEndpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
DataStream ds = resolveDataStream(exchange);
// Configure environment on first use when DataStream is available
if (!environmentConfigured && ds != null) {
synchronized (this) {
if (!environmentConfigured) {
configureStreamExecutionEnvironment(ds);
environmentConfigured = true;
}
}
}
DataStreamCallback dataStreamCallback = resolveDataStreamCallback(exchange);
Object body = exchange.getIn().getBody();
Object result = body instanceof List
? dataStreamCallback.onDataStream(ds, ((List) body).toArray(new Object[0]))
: dataStreamCallback.onDataStream(ds, body);
collectResults(exchange, result);
}
@Override
public FlinkEndpoint getEndpoint() {
return (FlinkEndpoint) super.getEndpoint();
}
protected void collectResults(Exchange exchange, Object result) {
if (result instanceof DataStream) {
if (getEndpoint().isCollect()) {
throw new IllegalArgumentException("collect mode not supported for Flink DataStreams.");
} else {
exchange.getIn().setBody(result);
exchange.getIn().setHeader(FlinkConstants.FLINK_DATASTREAM_HEADER, result);
}
} else {
exchange.getIn().setBody(result);
}
}
protected DataStream resolveDataStream(Exchange exchange) {
if (exchange.getIn().getHeader(FlinkConstants.FLINK_DATASTREAM_HEADER) != null) {
return (DataStream) exchange.getIn().getHeader(FlinkConstants.FLINK_DATASTREAM_HEADER);
} else if (getEndpoint().getDataStream() != null) {
return getEndpoint().getDataStream();
} else {
throw new IllegalArgumentException("No DataStream defined");
}
}
protected DataStreamCallback resolveDataStreamCallback(Exchange exchange) {
if (exchange.getIn().getHeader(FlinkConstants.FLINK_DATASTREAM_CALLBACK_HEADER) != null) {
return (DataStreamCallback) exchange.getIn().getHeader(FlinkConstants.FLINK_DATASTREAM_CALLBACK_HEADER);
} else if (getEndpoint().getDataStreamCallback() != null) {
return getEndpoint().getDataStreamCallback();
} else {
throw new IllegalArgumentException("Cannot resolve DataStream callback.");
}
}
/**
* Configures the StreamExecutionEnvironment with the settings from the endpoint. This includes execution mode,
* checkpointing, parallelism, and other advanced options.
*
* @param dataStream the DataStream to configure the environment for
*/
protected void configureStreamExecutionEnvironment(DataStream dataStream) {
if (dataStream == null) {
LOG.debug("No DataStream provided, skipping environment configuration");
return;
}
StreamExecutionEnvironment env = dataStream.getExecutionEnvironment();
// Configure execution mode (BATCH, STREAMING, AUTOMATIC)
if (getEndpoint().getExecutionMode() != null) {
try {
RuntimeExecutionMode mode = RuntimeExecutionMode.valueOf(getEndpoint().getExecutionMode());
env.setRuntimeMode(mode);
LOG.info("Set Flink runtime execution mode to: {}", mode);
} catch (IllegalArgumentException e) {
LOG.warn("Invalid execution mode '{}'. Valid values are: STREAMING, BATCH, AUTOMATIC",
getEndpoint().getExecutionMode());
}
}
// Configure parallelism
if (getEndpoint().getParallelism() != null) {
env.setParallelism(getEndpoint().getParallelism());
LOG.info("Set Flink parallelism to: {}", getEndpoint().getParallelism());
}
// Configure max parallelism
if (getEndpoint().getMaxParallelism() != null) {
env.setMaxParallelism(getEndpoint().getMaxParallelism());
LOG.info("Set Flink max parallelism to: {}", getEndpoint().getMaxParallelism());
}
// Configure checkpointing
if (getEndpoint().getCheckpointInterval() != null && getEndpoint().getCheckpointInterval() > 0) {
env.enableCheckpointing(getEndpoint().getCheckpointInterval());
LOG.info("Enabled checkpointing with interval: {} ms", getEndpoint().getCheckpointInterval());
// Configure checkpointing mode
if (getEndpoint().getCheckpointingMode() != null) {
try {
CheckpointingMode mode = CheckpointingMode.valueOf(getEndpoint().getCheckpointingMode());
env.getCheckpointConfig().setCheckpointingMode(mode);
LOG.info("Set checkpointing mode to: {}", mode);
} catch (IllegalArgumentException e) {
LOG.warn("Invalid checkpointing mode '{}'. Valid values are: EXACTLY_ONCE, AT_LEAST_ONCE",
getEndpoint().getCheckpointingMode());
}
}
// Configure checkpoint timeout
if (getEndpoint().getCheckpointTimeout() != null) {
env.getCheckpointConfig().setCheckpointTimeout(getEndpoint().getCheckpointTimeout());
LOG.info("Set checkpoint timeout to: {} ms", getEndpoint().getCheckpointTimeout());
}
// Configure min pause between checkpoints
if (getEndpoint().getMinPauseBetweenCheckpoints() != null) {
env.getCheckpointConfig()
.setMinPauseBetweenCheckpoints(getEndpoint().getMinPauseBetweenCheckpoints());
LOG.info("Set min pause between checkpoints to: {} ms",
getEndpoint().getMinPauseBetweenCheckpoints());
}
}
LOG.debug("StreamExecutionEnvironment configuration completed");
}
}
| DataStreamFlinkProducer |
java | apache__camel | components/camel-chunk/src/main/java/org/apache/camel/component/chunk/ChunkComponent.java | {
"start": 1070,
"end": 3266
} | class ____ extends DefaultComponent {
@Metadata(defaultValue = "true", description = "Sets whether to use resource content cache or not")
private boolean contentCache = true;
@Metadata
private boolean allowTemplateFromHeader;
@Metadata
private boolean allowContextMapAll;
public ChunkComponent() {
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
ChunkEndpoint endpoint = new ChunkEndpoint(uri, this, remaining);
endpoint.setAllowTemplateFromHeader(allowTemplateFromHeader);
endpoint.setAllowContextMapAll(allowContextMapAll);
endpoint.setContentCache(contentCache);
setProperties(endpoint, parameters);
return endpoint;
}
public boolean isAllowTemplateFromHeader() {
return allowTemplateFromHeader;
}
/**
* Whether to allow to use resource template from header or not (default false).
*
* Enabling this allows to specify dynamic templates via message header. However this can be seen as a potential
* security vulnerability if the header is coming from a malicious user, so use this with care.
*/
public void setAllowTemplateFromHeader(boolean allowTemplateFromHeader) {
this.allowTemplateFromHeader = allowTemplateFromHeader;
}
public boolean isAllowContextMapAll() {
return allowContextMapAll;
}
/**
* Sets whether the context map should allow access to all details. By default only the message body and headers can
* be accessed. This option can be enabled for full access to the current Exchange and CamelContext. Doing so impose
* a potential security risk as this opens access to the full power of CamelContext API.
*/
public void setAllowContextMapAll(boolean allowContextMapAll) {
this.allowContextMapAll = allowContextMapAll;
}
public boolean isContentCache() {
return contentCache;
}
/**
* Sets whether to use resource content cache or not
*/
public void setContentCache(boolean contentCache) {
this.contentCache = contentCache;
}
}
| ChunkComponent |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/support/DefaultListableBeanFactory.java | {
"start": 5738,
"end": 7080
} | class ____ extends AbstractAutowireCapableBeanFactory
implements ConfigurableListableBeanFactory, BeanDefinitionRegistry, Serializable {
/**
* System property that instructs Spring to enforce strict locking during bean creation,
* rather than the mix of strict and lenient locking that 6.2 applies by default. Setting
* this flag to "true" restores 6.1.x style locking in the entire pre-instantiation phase.
* <p>By default, the factory infers strict locking from the encountered thread names:
* If additional threads have names that match the thread prefix of the main bootstrap thread,
* they are considered external (multiple external bootstrap threads calling into the factory)
* and therefore have strict locking applied to them. This inference can be turned off through
* explicitly setting this flag to "false" rather than leaving it unspecified.
* @since 6.2.6
* @see #preInstantiateSingletons()
*/
public static final String STRICT_LOCKING_PROPERTY_NAME = "spring.locking.strict";
private static @Nullable Class<?> jakartaInjectProviderClass;
static {
try {
jakartaInjectProviderClass =
ClassUtils.forName("jakarta.inject.Provider", DefaultListableBeanFactory.class.getClassLoader());
}
catch (ClassNotFoundException ex) {
// JSR-330 API not available - Provider | DefaultListableBeanFactory |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java | {
"start": 4292,
"end": 4592
} | class ____ extends AbstractShapeGeometryFieldMapper<Geometry> {
public static final String CONTENT_TYPE = "geo_shape";
private static Builder builder(FieldMapper in) {
return ((GeoShapeWithDocValuesFieldMapper) in).builder;
}
public static final | GeoShapeWithDocValuesFieldMapper |
java | elastic__elasticsearch | plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/AuthorizationEnginePlugin.java | {
"start": 793,
"end": 868
} | class ____ extends Plugin implements ActionPlugin {
}
| AuthorizationEnginePlugin |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/SortCommandIntegrationTests.java | {
"start": 1631,
"end": 5465
} | class ____ extends TestSupport {
private final RedisCommands<String, String> redis;
@Inject
protected SortCommandIntegrationTests(RedisCommands<String, String> redis) {
this.redis = redis;
}
@BeforeEach
void setUp() {
this.redis.flushall();
}
@Test
void sort() {
redis.rpush(key, "3", "2", "1");
assertThat(redis.sort(key)).isEqualTo(list("1", "2", "3"));
assertThat(redis.sort(key, asc())).isEqualTo(list("1", "2", "3"));
}
@Test
void sortStreaming() {
redis.rpush(key, "3", "2", "1");
ListStreamingAdapter<String> streamingAdapter = new ListStreamingAdapter<>();
Long count = redis.sort(streamingAdapter, key);
assertThat(count.longValue()).isEqualTo(3);
assertThat(streamingAdapter.getList()).isEqualTo(list("1", "2", "3"));
streamingAdapter.getList().clear();
count = redis.sort(streamingAdapter, key, desc());
assertThat(count.longValue()).isEqualTo(3);
assertThat(streamingAdapter.getList()).isEqualTo(list("3", "2", "1"));
}
@Test
void sortAlpha() {
redis.rpush(key, "A", "B", "C");
assertThat(redis.sort(key, alpha().desc())).isEqualTo(list("C", "B", "A"));
}
@Test
void sortBy() {
redis.rpush(key, "foo", "bar", "baz");
redis.set("weight_foo", "8");
redis.set("weight_bar", "4");
redis.set("weight_baz", "2");
assertThat(redis.sort(key, by("weight_*"))).isEqualTo(list("baz", "bar", "foo"));
}
@Test
void sortDesc() {
redis.rpush(key, "1", "2", "3");
assertThat(redis.sort(key, desc())).isEqualTo(list("3", "2", "1"));
}
@Test
void sortGet() {
redis.rpush(key, "1", "2");
redis.set("obj_1", "foo");
redis.set("obj_2", "bar");
assertThat(redis.sort(key, get("obj_*"))).isEqualTo(list("foo", "bar"));
}
@Test
void sortLimit() {
redis.rpush(key, "3", "2", "1");
assertThat(redis.sort(key, limit(1, 2))).isEqualTo(list("2", "3"));
}
@Test
void sortStore() {
redis.rpush("one", "1", "2", "3");
assertThat(redis.sortStore("one", desc(), "two")).isEqualTo(3);
assertThat(redis.lrange("two", 0, -1)).isEqualTo(list("3", "2", "1"));
}
@Test
@EnabledOnCommand("SORT_RO")
void sortReadOnly() {
redis.rpush(key, "3", "2", "1");
assertThat(redis.sortReadOnly(key)).isEqualTo(list("1", "2", "3"));
assertThat(redis.sortReadOnly(key, asc())).isEqualTo(list("1", "2", "3"));
}
@Test
@EnabledOnCommand("SORT_RO")
void sortReadOnlyStreaming() {
redis.rpush(key, "3", "2", "1");
ListStreamingAdapter<String> streamingAdapter = new ListStreamingAdapter<>();
Long count = redis.sortReadOnly(streamingAdapter, key);
assertThat(count.longValue()).isEqualTo(3);
assertThat(streamingAdapter.getList()).isEqualTo(list("1", "2", "3"));
streamingAdapter.getList().clear();
count = redis.sortReadOnly(streamingAdapter, key, desc());
assertThat(count.longValue()).isEqualTo(3);
assertThat(streamingAdapter.getList()).isEqualTo(list("3", "2", "1"));
}
@Test
@EnabledOnCommand("SORT_RO")
void sortReadOnlyAlpha() {
redis.rpush(key, "A", "B", "C");
assertThat(redis.sortReadOnly(key, alpha().desc())).isEqualTo(list("C", "B", "A"));
}
@Test
@EnabledOnCommand("SORT_RO")
void sortReadOnlyBy() {
redis.rpush(key, "foo", "bar", "baz");
redis.set("weight_foo", "8");
redis.set("weight_bar", "4");
redis.set("weight_baz", "2");
assertThat(redis.sortReadOnly(key, by("weight_*"))).isEqualTo(list("baz", "bar", "foo"));
}
}
| SortCommandIntegrationTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/health/node/UpdateHealthInfoCacheActionTests.java | {
"start": 1930,
"end": 6213
} | class ____ extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
private TransportService transportService;
private DiscoveryNode localNode;
private DiscoveryNode[] allNodes;
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("UpdateHealthInfoCacheAction");
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
clusterService = createClusterService(threadPool);
CapturingTransport transport = new CapturingTransport();
transportService = transport.createTransportService(
clusterService.getSettings(),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> clusterService.localNode(),
null,
Collections.emptySet()
);
transportService.start();
transportService.acceptIncomingRequests();
localNode = DiscoveryNodeUtils.builder("local_node")
.roles(Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE))
.build();
allNodes = new DiscoveryNode[] { localNode };
}
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
transportService.close();
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
public void testAction() throws ExecutionException, InterruptedException {
DiskHealthInfo diskHealthInfo = new DiskHealthInfo(HealthStatus.GREEN, null);
Request request = new Request.Builder().nodeId(localNode.getId()).diskHealthInfo(diskHealthInfo).build();
PlainActionFuture<AcknowledgedResponse> listener = new PlainActionFuture<>();
setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes));
HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService);
final AcknowledgedResponse expectedResponse = AcknowledgedResponse.of(true);
ActionTestUtils.execute(
new UpdateHealthInfoCacheAction.TransportAction(
transportService,
clusterService,
threadPool,
new ActionFilters(Set.of()),
healthInfoCache
),
null,
request,
listener
);
AcknowledgedResponse actualResponse = listener.get();
assertThat(actualResponse, equalTo(expectedResponse));
assertThat(healthInfoCache.getHealthInfo().diskInfoByNode().get(localNode.getId()), equalTo(diskHealthInfo));
}
public void testRequestSerialization() {
// We start off with an "empty" request (i.e. only nodeId set), and let #mutateRequest change one of the fields at a time.
Request request = new Request.Builder().nodeId(randomAlphaOfLength(10)).build();
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
request,
serializedRequest -> copyWriteable(serializedRequest, writableRegistry(), Request::new),
this::mutateRequest
);
}
private Request mutateRequest(Request request) {
String nodeId = request.getNodeId();
DiskHealthInfo diskHealthInfo = request.getDiskHealthInfo();
var dslHealthInfo = request.getDslHealthInfo();
var repoHealthInfo = request.getRepositoriesHealthInfo();
switch (randomInt(3)) {
case 0 -> nodeId = randomAlphaOfLength(10);
case 1 -> diskHealthInfo = randomValueOtherThan(diskHealthInfo, HealthInfoTests::randomDiskHealthInfo);
case 2 -> dslHealthInfo = randomValueOtherThan(dslHealthInfo, HealthInfoTests::randomDslHealthInfo);
case 3 -> repoHealthInfo = randomValueOtherThan(repoHealthInfo, HealthInfoTests::randomRepoHealthInfo);
default -> throw new IllegalStateException();
}
return new Request.Builder().nodeId(nodeId)
.diskHealthInfo(diskHealthInfo)
.dslHealthInfo(dslHealthInfo)
.repositoriesHealthInfo(repoHealthInfo)
.build();
}
}
| UpdateHealthInfoCacheActionTests |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/main/java/io/quarkus/rest/client/reactive/deployment/MicroProfileRestClientEnricher.java | {
"start": 67859,
"end": 68190
} | class ____ extends Node {
Verbatim(String value) {
super(value);
}
@Override
public String toString() {
return "Verbatim{" +
"value='" + value + '\'' +
'}';
}
}
static | Verbatim |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultIRTreeToASMBytesPhase.java | {
"start": 60968,
"end": 77738
} | interface ____
// which is resolved and replace at runtime
methodWriter.push((String) null);
if (irDefInterfaceReferenceNode.hasCondition(IRCInstanceCapture.class)) {
Variable capturedThis = writeScope.getInternalVariable("this");
methodWriter.visitVarInsn(CLASS_TYPE.getOpcode(Opcodes.ILOAD), capturedThis.getSlot());
}
List<String> captureNames = irDefInterfaceReferenceNode.getDecorationValue(IRDCaptureNames.class);
boolean captureBox = irDefInterfaceReferenceNode.hasCondition(IRCCaptureBox.class);
if (captureNames != null) {
for (String captureName : captureNames) {
Variable captureVariable = writeScope.getVariable(captureName);
methodWriter.visitVarInsn(captureVariable.getAsmType().getOpcode(Opcodes.ILOAD), captureVariable.getSlot());
if (captureBox) {
methodWriter.box(captureVariable.getAsmType());
captureBox = false;
}
}
}
}
@Override
public void visitTypedInterfaceReference(TypedInterfaceReferenceNode irTypedInterfaceReferenceNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irTypedInterfaceReferenceNode.getLocation());
if (irTypedInterfaceReferenceNode.hasCondition(IRCInstanceCapture.class)) {
Variable capturedThis = writeScope.getInternalVariable("this");
methodWriter.visitVarInsn(CLASS_TYPE.getOpcode(Opcodes.ILOAD), capturedThis.getSlot());
}
List<String> captureNames = irTypedInterfaceReferenceNode.getDecorationValue(IRDCaptureNames.class);
boolean captureBox = irTypedInterfaceReferenceNode.hasCondition(IRCCaptureBox.class);
if (captureNames != null) {
for (String captureName : captureNames) {
Variable captureVariable = writeScope.getVariable(captureName);
methodWriter.visitVarInsn(captureVariable.getAsmType().getOpcode(Opcodes.ILOAD), captureVariable.getSlot());
if (captureBox) {
methodWriter.box(captureVariable.getAsmType());
captureBox = false;
}
}
}
methodWriter.invokeLambdaCall(irTypedInterfaceReferenceNode.getDecorationValue(IRDReference.class));
}
@Override
public void visitTypedCaptureReference(TypedCaptureReferenceNode irTypedCaptureReferenceNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irTypedCaptureReferenceNode.getLocation());
String methodName = irTypedCaptureReferenceNode.getDecorationValue(IRDName.class);
Variable captured = writeScope.getVariable(irTypedCaptureReferenceNode.getDecorationValue(IRDCaptureNames.class).get(0));
Class<?> expressionType = irTypedCaptureReferenceNode.getDecorationValue(IRDExpressionType.class);
String expressionCanonicalTypeName = irTypedCaptureReferenceNode.getDecorationString(IRDExpressionType.class);
methodWriter.visitVarInsn(captured.getAsmType().getOpcode(Opcodes.ILOAD), captured.getSlot());
if (irTypedCaptureReferenceNode.hasCondition(IRCCaptureBox.class)) {
methodWriter.box(captured.getAsmType());
}
Type methodType = Type.getMethodType(MethodWriter.getType(expressionType), captured.getAsmType());
methodWriter.invokeDefCall(methodName, methodType, DefBootstrap.REFERENCE, expressionCanonicalTypeName);
}
@Override
public void visitStatic(StaticNode irStaticNode, WriteScope writeScope) {
// do nothing
}
@Override
public void visitLoadVariable(LoadVariableNode irLoadVariableNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
Variable variable = writeScope.getVariable(irLoadVariableNode.getDecorationValue(IRDName.class));
methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ILOAD), variable.getSlot());
}
@Override
public void visitNullSafeSub(NullSafeSubNode irNullSafeSubNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irNullSafeSubNode.getLocation());
Label end = new Label();
methodWriter.dup();
methodWriter.ifNull(end);
visit(irNullSafeSubNode.getChildNode(), writeScope);
methodWriter.mark(end);
}
@Override
public void visitLoadDotArrayLengthNode(LoadDotArrayLengthNode irLoadDotArrayLengthNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadDotArrayLengthNode.getLocation());
methodWriter.arrayLength();
}
@Override
public void visitLoadDotDef(LoadDotDefNode irLoadDotDefNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadDotDefNode.getLocation());
Type methodType = Type.getMethodType(
MethodWriter.getType(irLoadDotDefNode.getDecorationValue(IRDExpressionType.class)),
MethodWriter.getType(def.class)
);
methodWriter.invokeDefCall(irLoadDotDefNode.getDecorationValue(IRDValue.class), methodType, DefBootstrap.LOAD);
}
@Override
public void visitLoadDot(LoadDotNode irLoadDotNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadDotNode.getLocation());
PainlessField painlessField = irLoadDotNode.getDecorationValue(IRDField.class);
boolean isStatic = Modifier.isStatic(painlessField.javaField().getModifiers());
Type asmOwnerType = Type.getType(painlessField.javaField().getDeclaringClass());
String fieldName = painlessField.javaField().getName();
Type asmFieldType = MethodWriter.getType(painlessField.typeParameter());
if (isStatic) {
methodWriter.getStatic(asmOwnerType, fieldName, asmFieldType);
} else {
methodWriter.getField(asmOwnerType, fieldName, asmFieldType);
}
}
@Override
public void visitLoadDotShortcut(LoadDotShortcutNode irDotSubShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irDotSubShortcutNode.getLocation());
PainlessMethod getterPainlessMethod = irDotSubShortcutNode.getDecorationValue(IRDMethod.class);
methodWriter.invokeMethodCall(getterPainlessMethod);
if (getterPainlessMethod.returnType() != getterPainlessMethod.javaMethod().getReturnType()) {
methodWriter.checkCast(MethodWriter.getType(getterPainlessMethod.returnType()));
}
}
@Override
public void visitLoadListShortcut(LoadListShortcutNode irLoadListShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadListShortcutNode.getLocation());
PainlessMethod getterPainlessMethod = irLoadListShortcutNode.getDecorationValue(IRDMethod.class);
methodWriter.invokeMethodCall(getterPainlessMethod);
if (getterPainlessMethod.returnType() != getterPainlessMethod.javaMethod().getReturnType()) {
methodWriter.checkCast(MethodWriter.getType(getterPainlessMethod.returnType()));
}
}
@Override
public void visitLoadMapShortcut(LoadMapShortcutNode irLoadMapShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadMapShortcutNode.getLocation());
PainlessMethod getterPainlessMethod = irLoadMapShortcutNode.getDecorationValue(IRDMethod.class);
methodWriter.invokeMethodCall(getterPainlessMethod);
if (getterPainlessMethod.returnType() != getterPainlessMethod.javaMethod().getReturnType()) {
methodWriter.checkCast(MethodWriter.getType(getterPainlessMethod.returnType()));
}
}
@Override
public void visitLoadFieldMember(LoadFieldMemberNode irLoadFieldMemberNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadFieldMemberNode.getLocation());
boolean isStatic = irLoadFieldMemberNode.hasCondition(IRCStatic.class);
String memberFieldName = irLoadFieldMemberNode.getDecorationValue(IRDName.class);
Type asmMemberFieldType = MethodWriter.getType(irLoadFieldMemberNode.getDecorationValue(IRDExpressionType.class));
if (isStatic) {
methodWriter.getStatic(CLASS_TYPE, memberFieldName, asmMemberFieldType);
} else {
methodWriter.loadThis();
methodWriter.getField(CLASS_TYPE, memberFieldName, asmMemberFieldType);
}
}
@Override
public void visitLoadBraceDef(LoadBraceDefNode irLoadBraceDefNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadBraceDefNode.getLocation());
Type methodType = Type.getMethodType(
MethodWriter.getType(irLoadBraceDefNode.getDecorationValue(IRDExpressionType.class)),
MethodWriter.getType(def.class),
MethodWriter.getType(irLoadBraceDefNode.getDecorationValue(IRDIndexType.class))
);
methodWriter.invokeDefCall("arrayLoad", methodType, DefBootstrap.ARRAY_LOAD);
}
@Override
public void visitLoadBrace(LoadBraceNode irLoadBraceNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irLoadBraceNode.getLocation());
methodWriter.arrayLoad(MethodWriter.getType(irLoadBraceNode.getDecorationValue(IRDExpressionType.class)));
}
@Override
public void visitStoreVariable(StoreVariableNode irStoreVariableNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreVariableNode.getChildNode(), writeScope);
Variable variable = writeScope.getVariable(irStoreVariableNode.getDecorationValue(IRDName.class));
methodWriter.visitVarInsn(variable.getAsmType().getOpcode(Opcodes.ISTORE), variable.getSlot());
}
@Override
public void visitStoreDotDef(StoreDotDefNode irStoreDotDefNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreDotDefNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreDotDefNode.getLocation());
Type methodType = Type.getMethodType(
MethodWriter.getType(void.class),
MethodWriter.getType(def.class),
MethodWriter.getType(irStoreDotDefNode.getDecorationValue(IRDStoreType.class))
);
methodWriter.invokeDefCall(irStoreDotDefNode.getDecorationValue(IRDValue.class), methodType, DefBootstrap.STORE);
}
@Override
public void visitStoreDot(StoreDotNode irStoreDotNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreDotNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreDotNode.getLocation());
PainlessField painlessField = irStoreDotNode.getDecorationValue(IRDField.class);
boolean isStatic = Modifier.isStatic(painlessField.javaField().getModifiers());
Type asmOwnerType = Type.getType(painlessField.javaField().getDeclaringClass());
String fieldName = painlessField.javaField().getName();
Type asmFieldType = MethodWriter.getType(painlessField.typeParameter());
if (isStatic) {
methodWriter.putStatic(asmOwnerType, fieldName, asmFieldType);
} else {
methodWriter.putField(asmOwnerType, fieldName, asmFieldType);
}
}
@Override
public void visitStoreDotShortcut(StoreDotShortcutNode irDotSubShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irDotSubShortcutNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irDotSubShortcutNode.getLocation());
methodWriter.invokeMethodCall(irDotSubShortcutNode.getDecorationValue(IRDMethod.class));
methodWriter.writePop(MethodWriter.getType(irDotSubShortcutNode.getDecorationValue(IRDMethod.class).returnType()).getSize());
}
@Override
public void visitStoreListShortcut(StoreListShortcutNode irStoreListShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreListShortcutNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreListShortcutNode.getLocation());
methodWriter.invokeMethodCall(irStoreListShortcutNode.getDecorationValue(IRDMethod.class));
methodWriter.writePop(MethodWriter.getType(irStoreListShortcutNode.getDecorationValue(IRDMethod.class).returnType()).getSize());
}
@Override
public void visitStoreMapShortcut(StoreMapShortcutNode irStoreMapShortcutNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreMapShortcutNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreMapShortcutNode.getLocation());
methodWriter.invokeMethodCall(irStoreMapShortcutNode.getDecorationValue(IRDMethod.class));
methodWriter.writePop(MethodWriter.getType(irStoreMapShortcutNode.getDecorationValue(IRDMethod.class).returnType()).getSize());
}
@Override
public void visitStoreFieldMember(StoreFieldMemberNode irStoreFieldMemberNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
if (irStoreFieldMemberNode.hasCondition(IRCStatic.class) == false) {
methodWriter.loadThis();
}
visit(irStoreFieldMemberNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreFieldMemberNode.getLocation());
boolean isStatic = irStoreFieldMemberNode.hasCondition(IRCStatic.class);
String memberFieldName = irStoreFieldMemberNode.getDecorationValue(IRDName.class);
Type asmMemberFieldType = MethodWriter.getType(irStoreFieldMemberNode.getDecorationValue(IRDStoreType.class));
if (isStatic) {
methodWriter.putStatic(CLASS_TYPE, memberFieldName, asmMemberFieldType);
} else {
methodWriter.loadThis();
methodWriter.putField(CLASS_TYPE, memberFieldName, asmMemberFieldType);
}
}
@Override
public void visitStoreBraceDef(StoreBraceDefNode irStoreBraceDefNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreBraceDefNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreBraceDefNode.getLocation());
Type methodType = Type.getMethodType(
MethodWriter.getType(void.class),
MethodWriter.getType(def.class),
MethodWriter.getType(irStoreBraceDefNode.getDecorationValue(IRDIndexType.class)),
MethodWriter.getType(irStoreBraceDefNode.getDecorationValue(IRDStoreType.class))
);
methodWriter.invokeDefCall("arrayStore", methodType, DefBootstrap.ARRAY_STORE);
}
@Override
public void visitStoreBrace(StoreBraceNode irStoreBraceNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
visit(irStoreBraceNode.getChildNode(), writeScope);
methodWriter.writeDebugInfo(irStoreBraceNode.getLocation());
methodWriter.arrayStore(MethodWriter.getType(irStoreBraceNode.getDecorationValue(IRDStoreType.class)));
}
@Override
public void visitInvokeCallDef(InvokeCallDefNode irInvokeCallDefNode, WriteScope writeScope) {
MethodWriter methodWriter = writeScope.getMethodWriter();
methodWriter.writeDebugInfo(irInvokeCallDefNode.getLocation());
// its possible to have unknown functional interfaces
// as arguments that require captures; the set of
// captures with call arguments is ambiguous so
// additional information is encoded to indicate
// which are values are arguments and which are captures
StringBuilder defCallRecipe = new StringBuilder();
List<Object> boostrapArguments = new ArrayList<>();
List<Class<?>> typeParameters = new ArrayList<>();
int capturedCount = 0;
// add an Object | receiver |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/util/AbstractObjectUtils.java | {
"start": 22808,
"end": 33608
} | class ____
*/
public static String nullSafeClassName(Object obj) {
return (obj != null ? obj.getClass().getName() : NULL_STRING);
}
/**
* Return a String representation of the specified Object.
*
* <p>Builds a String representation of the contents in case of an array.
* Returns a {@code "null"} String if {@code obj} is {@code null}.
*
* @param obj the object to build a String representation for
* @return a String representation of {@code obj}
*/
public static String nullSafeToString(Object obj) {
if (obj == null) {
return NULL_STRING;
}
if (obj instanceof String) {
return (String) obj;
}
if (obj instanceof Object[]) {
return nullSafeToString((Object[]) obj);
}
if (obj instanceof boolean[]) {
return nullSafeToString((boolean[]) obj);
}
if (obj instanceof byte[]) {
return nullSafeToString((byte[]) obj);
}
if (obj instanceof char[]) {
return nullSafeToString((char[]) obj);
}
if (obj instanceof double[]) {
return nullSafeToString((double[]) obj);
}
if (obj instanceof float[]) {
return nullSafeToString((float[]) obj);
}
if (obj instanceof int[]) {
return nullSafeToString((int[]) obj);
}
if (obj instanceof long[]) {
return nullSafeToString((long[]) obj);
}
if (obj instanceof short[]) {
return nullSafeToString((short[]) obj);
}
String str = obj.toString();
return (str != null ? str : EMPTY_STRING);
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(Object[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (Object o : array) {
stringJoiner.add(String.valueOf(o));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(boolean[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (boolean b : array) {
stringJoiner.add(String.valueOf(b));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(byte[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (byte b : array) {
stringJoiner.add(String.valueOf(b));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(char[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (char c : array) {
stringJoiner.add('\'' + String.valueOf(c) + '\'');
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(double[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (double d : array) {
stringJoiner.add(String.valueOf(d));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(float[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (float f : array) {
stringJoiner.add(String.valueOf(f));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(int[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (int i : array) {
stringJoiner.add(String.valueOf(i));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(long[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (long l : array) {
stringJoiner.add(String.valueOf(l));
}
return stringJoiner.toString();
}
/**
* Return a String representation of the contents of the specified array.
*
* <p>The String representation consists of a list of the array's elements,
* enclosed in curly braces ({@code "{}"}). Adjacent elements are separated
* by the characters {@code ", "} (a comma followed by a space).
* Returns a {@code "null"} String if {@code array} is {@code null}.
*
* @param array the array to build a String representation for
* @return a String representation of {@code array}
*/
public static String nullSafeToString(short[] array) {
if (array == null) {
return NULL_STRING;
}
int length = array.length;
if (length == 0) {
return EMPTY_ARRAY;
}
StringJoiner stringJoiner = new StringJoiner(ARRAY_ELEMENT_SEPARATOR, ARRAY_START, ARRAY_END);
for (short s : array) {
stringJoiner.add(String.valueOf(s));
}
return stringJoiner.toString();
}
}
| name |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/function/scalar/string/IndexOfFunctionProcessorTests.java | {
"start": 947,
"end": 5888
} | class ____ extends ESTestCase {
public void testIndexOfFunctionWithValidInputInsensitive() {
assertEquals(5, insensitiveIndexOf("foobarbar", "r", null));
assertEquals(5, insensitiveIndexOf("foobaRbar", "r", null));
assertEquals(0, insensitiveIndexOf("foobar", "Foo", null));
assertNull(insensitiveIndexOf("foo", "foobar", null));
assertEquals(0, insensitiveIndexOf("foo", "foo", null));
assertEquals(1, insensitiveIndexOf("foo", "oO", null));
assertEquals(0, insensitiveIndexOf("foo", "FOo", null));
assertNull(insensitiveIndexOf("", "bar", 1));
assertEquals(5, insensitiveIndexOf("foobarbar", "R", 5));
assertEquals(2, insensitiveIndexOf("foobar", "O", 2));
assertNull(insensitiveIndexOf("foobar", "O", 3));
assertEquals(6, insensitiveIndexOf("foobarbaz", "ba", 4));
assertNull(insensitiveIndexOf(null, "bar", 2));
assertNull(insensitiveIndexOf(null, "bar", 2));
assertNull(insensitiveIndexOf("foo", null, 3));
assertNull(insensitiveIndexOf(null, null, 4));
assertEquals(0, insensitiveIndexOf("bar", "bar", null));
assertEquals(0, new IndexOf(EMPTY, l('f'), l('f'), null, false).makePipe().asProcessor().process(null));
}
private Object insensitiveIndexOf(String left, String right, Integer optional) {
return indexOf(true, left, right, optional);
}
public void testIndexOfFunctionWithValidInputSensitive() {
assertEquals(5, sensitiveIndexOf("foobarbar", "r", null));
assertEquals(8, sensitiveIndexOf("foobaRbar", "r", null));
assertEquals(4, sensitiveIndexOf("foobARbar", "AR", null));
assertEquals(0, sensitiveIndexOf("foobar", "foo", null));
assertNull(sensitiveIndexOf("foo", "foobar", null));
assertEquals(0, sensitiveIndexOf("foo", "foo", null));
assertNull(sensitiveIndexOf("foo", "oO", null));
assertNull(sensitiveIndexOf("foo", "FOo", null));
assertNull(sensitiveIndexOf("", "bar", 1));
assertNull(sensitiveIndexOf("foobarbar", "R", 5));
assertNull(sensitiveIndexOf("foobar", "O", 2));
assertNull(sensitiveIndexOf("foobar", "O", 3));
assertEquals(6, sensitiveIndexOf("foobarbaz", "ba", 4));
assertNull(sensitiveIndexOf(null, "bar", 2));
assertNull(sensitiveIndexOf(null, "bar", 2));
assertNull(sensitiveIndexOf("foo", null, 3));
assertNull(sensitiveIndexOf(null, null, 4));
assertEquals(0, sensitiveIndexOf("bar", "bar", null));
assertEquals(0, new IndexOf(EMPTY, l('f'), l('f'), null, true).makePipe().asProcessor().process(null));
}
private Object sensitiveIndexOf(String left, String right, Integer optional) {
return indexOf(false, left, right, optional);
}
protected Object indexOf(boolean caseInsensitive, String left, String right, Integer optional) {
return new IndexOf(EMPTY, l(left), l(right), l(optional), caseInsensitive).makePipe().asProcessor().process(null);
}
protected Object indexOfUntyped(Object left, Object right, Object optional) {
return new IndexOf(EMPTY, l(left), l(right), l(optional), randomBoolean()).makePipe().asProcessor().process(null);
}
public void testIndexOfFunctionInputsValidation() {
QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped(5, "foo", null));
assertEquals("A string/char is required; received [5]", siae.getMessage());
siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped("bar", false, 2));
assertEquals("A string/char is required; received [false]", siae.getMessage());
siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped("bar", "a", "1"));
assertEquals("A number is required; received [1]", siae.getMessage());
}
public void testIndexOfFunctionWithRandomInvalidDataType() {
Configuration config = randomConfiguration();
Literal stringLiteral = randomValueOtherThanMany(v -> v.dataType() == KEYWORD, () -> LiteralTests.randomLiteral());
QlIllegalArgumentException siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped(stringLiteral, "foo", 1));
assertThat(siae.getMessage(), startsWith("A string/char is required; received"));
siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped("foo", stringLiteral, 2));
assertThat(siae.getMessage(), startsWith("A string/char is required; received"));
Literal numericLiteral = randomValueOtherThanMany(v -> v.dataType().isNumeric(), () -> LiteralTests.randomLiteral());
siae = expectThrows(QlIllegalArgumentException.class, () -> indexOfUntyped("foo", "o", numericLiteral));
assertThat(siae.getMessage(), startsWith("A number is required; received"));
}
}
| IndexOfFunctionProcessorTests |
java | jhy__jsoup | src/test/java/org/jsoup/safety/CleanerTest.java | {
"start": 682,
"end": 23963
} | class ____ {
@Test public void simpleBehaviourTest() {
String h = "<div><p class=foo><a href='http://evil.com'>Hello <b id=bar>there</b>!</a></div>";
String cleanHtml = Jsoup.clean(h, Safelist.simpleText());
assertEquals("Hello <b>there</b>!", TextUtil.stripNewlines(cleanHtml));
}
@Test public void simpleBehaviourTest2() {
String h = "Hello <b>there</b>!";
String cleanHtml = Jsoup.clean(h, Safelist.simpleText());
assertEquals("Hello <b>there</b>!", TextUtil.stripNewlines(cleanHtml));
}
@Test public void basicBehaviourTest() {
String h = "<div><p><a href='javascript:sendAllMoney()'>Dodgy</a> <A HREF='HTTP://nice.com'>Nice</a></p><blockquote>Hello</blockquote>";
String cleanHtml = Jsoup.clean(h, Safelist.basic());
assertEquals("<p><a rel=\"nofollow\">Dodgy</a> <a href=\"http://nice.com\" rel=\"nofollow\">Nice</a></p><blockquote>Hello</blockquote>",
TextUtil.stripNewlines(cleanHtml));
}
@Test public void basicWithImagesTest() {
String h = "<div><p><img src='http://example.com/' alt=Image></p><p><img src='ftp://ftp.example.com'></p></div>";
String cleanHtml = Jsoup.clean(h, Safelist.basicWithImages());
assertEquals("<p><img src=\"http://example.com/\" alt=\"Image\"></p><p><img></p>", TextUtil.stripNewlines(cleanHtml));
}
@Test public void testRelaxed() {
String h = "<h1>Head</h1><table><tr><td>One<td>Two</td></tr></table>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<h1>Head</h1><table><tbody><tr><td>One</td><td>Two</td></tr></tbody></table>", TextUtil.stripNewlines(cleanHtml));
}
@Test public void testRemoveTags() {
String h = "<div><p><A HREF='HTTP://nice.com'>Nice</a></p><blockquote>Hello</blockquote>";
String cleanHtml = Jsoup.clean(h, Safelist.basic().removeTags("a"));
assertEquals("<p>Nice</p><blockquote>Hello</blockquote>", TextUtil.stripNewlines(cleanHtml));
}
@Test public void testRemoveAttributes() {
String h = "<div><p>Nice</p><blockquote cite='http://example.com/quotations'>Hello</blockquote>";
String cleanHtml = Jsoup.clean(h, Safelist.basic().removeAttributes("blockquote", "cite"));
assertEquals("<p>Nice</p><blockquote>Hello</blockquote>", TextUtil.stripNewlines(cleanHtml));
}
@Test void allAttributes() {
String h = "<div class=foo data=true><p class=bar>Text</p></div><blockquote cite='https://example.com'>Foo";
Safelist safelist = Safelist.relaxed();
safelist.addAttributes(":all", "class");
safelist.addAttributes("div", "data");
String clean1 = Jsoup.clean(h, safelist);
assertEquals("<div class=\"foo\" data=\"true\"><p class=\"bar\">Text</p></div><blockquote cite=\"https://example.com\">Foo</blockquote>", TextUtil.stripNewlines(clean1));
safelist.removeAttributes(":all", "class", "cite");
String clean2 = Jsoup.clean(h, safelist);
assertEquals("<div data=\"true\"><p>Text</p></div><blockquote>Foo</blockquote>", TextUtil.stripNewlines(clean2));
}
@Test void removeProtocols() {
String h = "<a href='any://example.com'>Link</a>";
Safelist safelist = Safelist.relaxed();
String clean1 = Jsoup.clean(h, safelist);
assertEquals("<a>Link</a>", clean1);
safelist.removeProtocols("a", "href", "ftp", "http", "https", "mailto");
String clean2 = Jsoup.clean(h, safelist); // all removed means any will work
assertEquals("<a href=\"any://example.com\">Link</a>", clean2);
}
@Test public void testRemoveEnforcedAttributes() {
String h = "<div><p><A HREF='HTTP://nice.com'>Nice</a></p><blockquote>Hello</blockquote>";
String cleanHtml = Jsoup.clean(h, Safelist.basic().removeEnforcedAttribute("a", "rel"));
assertEquals("<p><a href=\"http://nice.com\">Nice</a></p><blockquote>Hello</blockquote>",
TextUtil.stripNewlines(cleanHtml));
}
@Test public void testRemoveProtocols() {
String h = "<p>Contact me <a href='mailto:info@example.com'>here</a></p>";
String cleanHtml = Jsoup.clean(h, Safelist.basic().removeProtocols("a", "href", "ftp", "mailto"));
assertEquals("<p>Contact me <a rel=\"nofollow\">here</a></p>",
TextUtil.stripNewlines(cleanHtml));
}
@MultiLocaleTest
public void safeListedProtocolShouldBeRetained(Locale locale) {
Locale.setDefault(locale);
Safelist safelist = Safelist.none()
.addTags("a")
.addAttributes("a", "href")
.addProtocols("a", "href", "something");
String cleanHtml = Jsoup.clean("<a href=\"SOMETHING://x\"></a>", safelist);
assertEquals("<a href=\"SOMETHING://x\"></a>", TextUtil.stripNewlines(cleanHtml));
}
@Test public void testDropComments() {
String h = "<p>Hello<!-- no --></p>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<p>Hello</p>", cleanHtml);
}
@Test public void testDropXmlProc() {
String h = "<?import namespace=\"xss\"><p>Hello</p>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<p>Hello</p>", cleanHtml);
}
@Test public void testDropScript() {
String h = "<SCRIPT SRC=//ha.ckers.org/.j><SCRIPT>alert(/XSS/.source)</SCRIPT>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("", cleanHtml);
}
@Test public void testDropImageScript() {
String h = "<IMG SRC=\"javascript:alert('XSS')\">";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<img>", cleanHtml);
}
@Test public void testCleanJavascriptHref() {
String h = "<A HREF=\"javascript:document.location='http://www.google.com/'\">XSS</A>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<a>XSS</a>", cleanHtml);
}
@Test public void testCleanAnchorProtocol() {
String validAnchor = "<a href=\"#valid\">Valid anchor</a>";
String invalidAnchor = "<a href=\"#anchor with spaces\">Invalid anchor</a>";
// A Safelist that does not allow anchors will strip them out.
String cleanHtml = Jsoup.clean(validAnchor, Safelist.relaxed());
assertEquals("<a>Valid anchor</a>", cleanHtml);
cleanHtml = Jsoup.clean(invalidAnchor, Safelist.relaxed());
assertEquals("<a>Invalid anchor</a>", cleanHtml);
// A Safelist that allows them will keep them.
Safelist relaxedWithAnchor = Safelist.relaxed().addProtocols("a", "href", "#");
cleanHtml = Jsoup.clean(validAnchor, relaxedWithAnchor);
assertEquals(validAnchor, cleanHtml);
// An invalid anchor is never valid.
cleanHtml = Jsoup.clean(invalidAnchor, relaxedWithAnchor);
assertEquals("<a>Invalid anchor</a>", cleanHtml);
}
@Test public void testDropsUnknownTags() {
String h = "<p><custom foo=true>Test</custom></p>";
String cleanHtml = Jsoup.clean(h, Safelist.relaxed());
assertEquals("<p>Test</p>", cleanHtml);
}
@Test public void testHandlesEmptyAttributes() {
String h = "<img alt=\"\" src= unknown=''>";
String cleanHtml = Jsoup.clean(h, Safelist.basicWithImages());
assertEquals("<img alt=\"\">", cleanHtml);
}
@Test public void testIsValidBodyHtml() {
String ok = "<p>Test <b><a href='http://example.com/' rel='nofollow'>OK</a></b></p>";
String ok1 = "<p>Test <b><a href='http://example.com/'>OK</a></b></p>"; // missing enforced is OK because still needs run thru cleaner
String nok1 = "<p><script></script>Not <b>OK</b></p>";
String nok2 = "<p align=right>Test Not <b>OK</b></p>";
String nok3 = "<!-- comment --><p>Not OK</p>"; // comments and the like will be cleaned
String nok4 = "<html><head>Foo</head><body><b>OK</b></body></html>"; // not body html
String nok5 = "<p>Test <b><a href='http://example.com/' rel='nofollowme'>OK</a></b></p>";
String nok6 = "<p>Test <b><a href='http://example.com/'>OK</b></p>"; // missing close tag
String nok7 = "</div>What";
assertTrue(Jsoup.isValid(ok, Safelist.basic()));
assertTrue(Jsoup.isValid(ok1, Safelist.basic()));
assertFalse(Jsoup.isValid(nok1, Safelist.basic()));
assertFalse(Jsoup.isValid(nok2, Safelist.basic()));
assertFalse(Jsoup.isValid(nok3, Safelist.basic()));
assertFalse(Jsoup.isValid(nok4, Safelist.basic()));
assertFalse(Jsoup.isValid(nok5, Safelist.basic()));
assertFalse(Jsoup.isValid(nok6, Safelist.basic()));
assertFalse(Jsoup.isValid(ok, Safelist.none()));
assertFalse(Jsoup.isValid(nok7, Safelist.basic()));
}
@Test public void testIsValidDocument() {
String ok = "<html><head></head><body><p>Hello</p></body><html>";
String nok = "<html><head><script>woops</script><title>Hello</title></head><body><p>Hello</p></body><html>";
Safelist relaxed = Safelist.relaxed();
Cleaner cleaner = new Cleaner(relaxed);
Document okDoc = Jsoup.parse(ok);
assertTrue(cleaner.isValid(okDoc));
assertFalse(cleaner.isValid(Jsoup.parse(nok)));
assertFalse(new Cleaner(Safelist.none()).isValid(okDoc));
}
@Test public void resolvesRelativeLinks() {
String html = "<a href='/foo'>Link</a><img src='/bar'>";
String clean = Jsoup.clean(html, "http://example.com/", Safelist.basicWithImages());
assertEquals("<a href=\"http://example.com/foo\">Link</a><img src=\"http://example.com/bar\">", clean);
}
@Test public void preservesRelativeLinksIfConfigured() {
String html = "<a href='/foo'>Link</a><img src='/bar'> <img src='javascript:alert()'>";
String clean = Jsoup.clean(html, "http://example.com/", Safelist.basicWithImages().preserveRelativeLinks(true));
assertEquals("<a href=\"/foo\">Link</a><img src=\"/bar\"> <img>", clean);
}
@Test public void dropsUnresolvableRelativeLinks() { // when not preserving
String html = "<a href='/foo'>Link</a>";
String clean = Jsoup.clean(html, Safelist.basic());
assertEquals("<a rel=\"nofollow\">Link</a>", clean);
}
@Test void dropsJavascriptWhenRelativeLinks() {
String html ="<a href='javascript:alert()'>One</a>";
Safelist safelist = Safelist.basic().preserveRelativeLinks(true);
assertEquals("<a rel=\"nofollow\">One</a>", Jsoup.clean(html, safelist));
assertFalse(Jsoup.isValid(html, safelist));
}
@Test void dropsConcealedJavascriptProtocolWhenRelativesLinksEnabled() {
Safelist safelist = Safelist.basic().preserveRelativeLinks(true);
String html = "<a href=\"
ja	va	script
:alert(1)\">Link</a>";
String clean = Jsoup.clean(html, "https://", safelist);
assertEquals("<a rel=\"nofollow\">Link</a>", clean);
assertFalse(Jsoup.isValid(html, safelist));
String colon = "<a href=\"ja	va	script:alert(1)\">Link</a>";
String cleanColon = Jsoup.clean(colon, "https://", safelist);
assertEquals("<a rel=\"nofollow\">Link</a>", cleanColon);
assertFalse(Jsoup.isValid(colon, safelist));
}
@Test void dropsConcealedJavascriptProtocolWhenRelativesLinksDisabled() {
Safelist safelist = Safelist.basic().preserveRelativeLinks(false);
String html = "<a href=\"ja	vas
cript:alert(1)\">Link</a>";
String clean = Jsoup.clean(html, "https://", safelist);
assertEquals("<a rel=\"nofollow\">Link</a>", clean);
assertFalse(Jsoup.isValid(html, safelist));
}
@Test public void handlesCustomProtocols() {
String html = "<img src='cid:12345' /> <img src='data:gzzt' />";
String dropped = Jsoup.clean(html, Safelist.basicWithImages());
assertEquals("<img> <img>", dropped);
String preserved = Jsoup.clean(html, Safelist.basicWithImages().addProtocols("img", "src", "cid", "data"));
assertEquals("<img src=\"cid:12345\"> <img src=\"data:gzzt\">", preserved);
}
@Test public void handlesAllPseudoTag() {
String html = "<p class='foo' src='bar'><a class='qux'>link</a></p>";
Safelist safelist = new Safelist()
.addAttributes(":all", "class")
.addAttributes("p", "style")
.addTags("p", "a");
String clean = Jsoup.clean(html, safelist);
assertEquals("<p class=\"foo\"><a class=\"qux\">link</a></p>", clean);
}
@Test public void addsTagOnAttributesIfNotSet() {
String html = "<p class='foo' src='bar'>One</p>";
Safelist safelist = new Safelist()
.addAttributes("p", "class");
// ^^ safelist does not have explicit tag add for p, inferred from add attributes.
String clean = Jsoup.clean(html, safelist);
assertEquals("<p class=\"foo\">One</p>", clean);
}
@Test public void supplyOutputSettings() {
// test that one can override the default document output settings
Document.OutputSettings os = new Document.OutputSettings();
os.prettyPrint(false);
os.escapeMode(Entities.EscapeMode.extended);
os.charset("ascii");
String html = "<div><p>ℬ</p></div>";
String customOut = Jsoup.clean(html, "http://foo.com/", Safelist.relaxed(), os);
String defaultOut = Jsoup.clean(html, "http://foo.com/", Safelist.relaxed());
assertNotSame(defaultOut, customOut);
assertEquals("<div><p>ℬ</p></div>", customOut); // entities now prefers shorted names if aliased
assertEquals("<div>\n" +
" <p>ℬ</p>\n" +
"</div>", defaultOut);
os.charset("ASCII");
os.escapeMode(Entities.EscapeMode.base);
String customOut2 = Jsoup.clean(html, "http://foo.com/", Safelist.relaxed(), os);
assertEquals("<div><p>ℬ</p></div>", customOut2);
}
@Test public void handlesFramesets() {
String dirty = "<html><head><script></script><noscript></noscript></head><frameset><frame src=\"foo\" /><frame src=\"foo\" /></frameset></html>";
String clean = Jsoup.clean(dirty, Safelist.basic());
assertEquals("", clean); // nothing good can come out of that
Document dirtyDoc = Jsoup.parse(dirty);
Document cleanDoc = new Cleaner(Safelist.basic()).clean(dirtyDoc);
assertNotNull(cleanDoc);
assertEquals(0, cleanDoc.body().childNodeSize());
}
@Test public void cleansInternationalText() {
assertEquals("привет", Jsoup.clean("привет", Safelist.none()));
}
@Test
public void testScriptTagInSafeList() {
Safelist safelist = Safelist.relaxed();
safelist.addTags( "script" );
assertTrue( Jsoup.isValid("Hello<script>alert('Doh')</script>World !", safelist) );
}
@Test
public void bailsIfRemovingProtocolThatsNotSet() {
assertThrows(IllegalArgumentException.class, () -> {
// a case that came up on the email list
Safelist w = Safelist.none();
// note no add tag, and removing protocol without adding first
w.addAttributes("a", "href");
w.removeProtocols("a", "href", "javascript"); // with no protocols enforced, this was a noop. Now validates.
});
}
@Test public void handlesControlCharactersAfterTagName() {
String html = "<a/\06>";
String clean = Jsoup.clean(html, Safelist.basic());
assertEquals("<a rel=\"nofollow\"></a>", clean);
}
@Test public void handlesAttributesWithNoValue() {
// https://github.com/jhy/jsoup/issues/973
String clean = Jsoup.clean("<a href>Clean</a>", Safelist.basic());
assertEquals("<a rel=\"nofollow\">Clean</a>", clean);
}
@Test public void handlesNoHrefAttribute() {
String dirty = "<a>One</a> <a href>Two</a>";
Safelist relaxedWithAnchor = Safelist.relaxed().addProtocols("a", "href", "#");
String clean = Jsoup.clean(dirty, relaxedWithAnchor);
assertEquals("<a>One</a> <a>Two</a>", clean);
}
@Test public void handlesNestedQuotesInAttribute() {
// https://github.com/jhy/jsoup/issues/1243 - no repro
String orig = "<div style=\"font-family: 'Calibri'\">Will (not) fail</div>";
Safelist allow = Safelist.relaxed()
.addAttributes("div", "style");
String clean = Jsoup.clean(orig, allow);
boolean isValid = Jsoup.isValid(orig, allow);
assertEquals(orig, TextUtil.stripNewlines(clean)); // only difference is pretty print wrap & indent
assertTrue(isValid);
}
@Test public void copiesOutputSettings() {
Document orig = Jsoup.parse("<p>test<br></p>");
orig.outputSettings().syntax(Document.OutputSettings.Syntax.xml);
orig.outputSettings().escapeMode(Entities.EscapeMode.xhtml);
Safelist safelist = Safelist.none().addTags("p", "br");
Document result = new Cleaner(safelist).clean(orig);
assertEquals(Document.OutputSettings.Syntax.xml, result.outputSettings().syntax());
assertEquals("<p>test\n <br /></p>", result.body().html());
}
@Test void preservesSourcePositionViaUserData() {
Document orig = Jsoup.parse("<script>xss</script>\n <p id=1>Hello</p>", Parser.htmlParser().setTrackPosition(true));
Element p = orig.expectFirst("p");
Range origRange = p.sourceRange();
assertEquals("2,2:22-2,10:30", origRange.toString());
assertEquals("1,1:0-1,1:0", orig.sourceRange().toString());
assertEquals("2,19:39-2,19:39", orig.endSourceRange().toString());
Range.AttributeRange attributeRange = p.attributes().sourceRange("id");
assertEquals("2,5:25-2,7:27=2,8:28-2,9:29", attributeRange.toString());
Document clean = new Cleaner(Safelist.relaxed().addAttributes("p", "id")).clean(orig);
Element cleanP = clean.expectFirst("p");
assertEquals("1", cleanP.id());
Range cleanRange = cleanP.sourceRange();
assertEquals(origRange, cleanRange);
assertEquals(p.endSourceRange(), cleanP.endSourceRange());
assertEquals(attributeRange, cleanP.attributes().sourceRange("id"));
}
@ParameterizedTest @ValueSource(booleans = {true, false})
void cleansCaseSensitiveElements(boolean preserveCase) {
// https://github.com/jhy/jsoup/issues/2049
String html = "<svg><feMerge baseFrequency=2><feMergeNode kernelMatrix=1 /><feMergeNode><clipPath /></feMergeNode><feMergeNode />";
String[] tags = {"svg", "feMerge", "feMergeNode", "clipPath"};
String[] attrs = {"kernelMatrix", "baseFrequency"};
if (!preserveCase) {
tags = Arrays.stream(tags).map(String::toLowerCase).toArray(String[]::new);
attrs = Arrays.stream(attrs).map(String::toLowerCase).toArray(String[]::new);
}
Safelist safelist = Safelist.none().addTags(tags).addAttributes(":all", attrs);
String clean = Jsoup.clean(html, safelist);
String expected = "<svg>\n" +
" <feMerge baseFrequency=\"2\">\n" +
" <feMergeNode kernelMatrix=\"1\" />\n" +
" <feMergeNode>\n" +
" <clipPath />\n" +
" </feMergeNode>\n" +
" <feMergeNode />\n" +
" </feMerge>\n" +
"</svg>";
assertEquals(expected, clean);
}
@Test void nofollowOnlyOnExternalLinks() {
// We want to add nofollow to external links, but not to for relative links or those on the same site
String html = "<a href='http://external.com/'>One</a> <a href='/relative/'>Two</a> <a href='../other/'>Three</a> <a href='http://example.com/bar'>Four</a>";
Safelist basic = Safelist.basic().preserveRelativeLinks(true);
String clean = Jsoup.clean(html, "http://example.com/", basic);
assertEquals("<a href=\"http://external.com/\" rel=\"nofollow\">One</a> <a href=\"/relative/\">Two</a> <a href=\"../other/\">Three</a> <a href=\"http://example.com/bar\">Four</a>", clean);
// If we don't pass in a base URI, still want to preserve the relative links.
String clean2 = Jsoup.clean(html, basic);
assertEquals("<a href=\"http://external.com/\" rel=\"nofollow\">One</a> <a href=\"/relative/\">Two</a> <a href=\"../other/\">Three</a> <a href=\"http://example.com/bar\" rel=\"nofollow\">Four</a>", clean2);
// Four gets nofollowed because we didn't specify the base URI, so must assume it is external
// Want it to be valid with relative links (and no base uri required / provided):
assertTrue(Jsoup.isValid(html, basic));
// test that it works in safelist.relaxed as well, which doesn't by default have rel=nofollow
Safelist relaxed = Safelist.relaxed().preserveRelativeLinks(true).addEnforcedAttribute("a", "rel", "nofollow");
String clean3 = Jsoup.clean(html, "http://example.com/", relaxed);
assertEquals("<a href=\"http://external.com/\" rel=\"nofollow\">One</a> <a href=\"/relative/\">Two</a> <a href=\"../other/\">Three</a> <a href=\"http://example.com/bar\">Four</a>", clean3);
assertTrue(Jsoup.isValid(html, relaxed));
String clean4 = Jsoup.clean(html, relaxed);
assertEquals("<a href=\"http://external.com/\" rel=\"nofollow\">One</a> <a href=\"/relative/\">Two</a> <a href=\"../other/\">Three</a> <a href=\"http://example.com/bar\" rel=\"nofollow\">Four</a>", clean4);
}
@Test void discardsSvgScriptData() {
// https://github.com/jhy/jsoup/issues/2320
Safelist svgOk = Safelist.none().addTags("svg");
String cleaned = Jsoup.clean("<svg><script> a < b </script></svg>", svgOk);
assertEquals("<svg></svg>", cleaned);
}
@Test void canSupplyConfiguredTagset() {
// https://github.com/jhy/jsoup/issues/2326
// by default, iframe is data
String input = "<iframe>content is <data></iframe>";
Safelist safelist = Safelist.relaxed().addTags("iframe");
String clean = Jsoup.clean(input, safelist);
assertEquals("<iframe>content is <data></iframe>", clean);
Document doc = Jsoup.parse(input);
assertEquals("", doc.text()); // data is not text
// can change to text
TagSet tags = TagSet.Html();
Tag iframe = tags.valueOf("iframe", Parser.NamespaceHtml);
iframe.clear(Tag.Data).set(Tag.RcData);
Document doc2 = Jsoup.parse(input, Parser.htmlParser().tagSet(tags));
assertEquals("content is <data>", doc2.text());
assertEquals("<iframe>content is <data></iframe>", doc2.body().html());
// text nodes are escaped
assertEquals("<iframe>content is <data></iframe>", doc2.body().html());
// can use cleaner with updated tagset
Cleaner cleaner = new Cleaner(Safelist.relaxed());
String clean2 = cleaner.clean(doc2).body().html();
assertEquals("content is <data>", clean2);
}
}
| CleanerTest |
java | quarkusio__quarkus | extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java | {
"start": 1975,
"end": 10728
} | class ____ {
private static final Logger log = Logger.getLogger(NettyProcessor.class);
private static final int DEFAULT_NETTY_ALLOCATOR_MAX_ORDER = 3;
static {
InternalLoggerFactory.setDefaultFactory(new JBossNettyLoggerFactory());
}
@BuildStep
public NativeImageSystemPropertyBuildItem limitMem() {
//in native mode we limit the size of the epoll array
//if the array overflows the selector just moves the overflow to a map
return new NativeImageSystemPropertyBuildItem("sun.nio.ch.maxUpdateArraySize", "100");
}
@BuildStep
public SystemPropertyBuildItem limitArenaSize(NettyBuildTimeConfig config,
List<MinNettyAllocatorMaxOrderBuildItem> minMaxOrderBuildItems) {
String maxOrder = calculateMaxOrder(config.allocatorMaxOrder(), minMaxOrderBuildItems, true);
//in native mode we limit the size of the epoll array
//if the array overflows the selector just moves the overflow to a map
return new SystemPropertyBuildItem("io.netty.allocator.maxOrder", maxOrder);
}
@BuildStep
public GeneratedRuntimeSystemPropertyBuildItem setNettyMachineId() {
// we set the io.netty.machineId system property so to prevent potential
// slowness when generating/inferring the default machine id in io.netty.channel.DefaultChannelId
// implementation, which iterates over the NetworkInterfaces to determine the "best" machine id
return new GeneratedRuntimeSystemPropertyBuildItem("io.netty.machineId", MachineIdGenerator.class);
}
@BuildStep
public SystemPropertyBuildItem disableFinalizers() {
return new SystemPropertyBuildItem("io.netty.allocator.disableCacheFinalizersForFastThreadLocalThreads", "true");
}
@BuildStep
NativeImageConfigBuildItem build(
NettyBuildTimeConfig config,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass,
BuildProducer<ReflectiveMethodBuildItem> reflectiveMethods,
BuildProducer<ReflectiveFieldBuildItem> reflectiveFields,
List<MinNettyAllocatorMaxOrderBuildItem> minMaxOrderBuildItems) {
reflectiveMethods.produce(
new ReflectiveMethodBuildItem("Reflectively accessed through PlatformDependent0's static initializer",
"jdk.internal.misc.Unsafe", "getUnsafe", new String[0]));
// in JDK >= 21 the constructor has `long, long` signature
reflectiveMethods.produce(
new ReflectiveMethodBuildItem("Reflectively accessed through PlatformDependent0's static initializer",
"java.nio.DirectByteBuffer", "<init>", new String[] { long.class.getName(), long.class.getName() }));
// in JDK < 21 the constructor has `long, int` signature
reflectiveMethods.produce(
new ReflectiveMethodBuildItem("Reflectively accessed through PlatformDependent0's static initializer",
"java.nio.DirectByteBuffer", "<init>", new String[] { long.class.getName(), int.class.getName() }));
reflectiveFields.produce(
new ReflectiveFieldBuildItem("Reflectively accessed through PlatformDependent0's static initializer",
"java.nio.Bits", "UNALIGNED"));
reflectiveFields.produce(
new ReflectiveFieldBuildItem("Reflectively accessed through PlatformDependent0's static initializer",
"java.nio.Bits", "MAX_MEMORY"));
reflectiveClass.produce(ReflectiveClassBuildItem.builder("io.netty.channel.socket.nio.NioSocketChannel")
.build());
reflectiveClass
.produce(ReflectiveClassBuildItem.builder("io.netty.channel.socket.nio.NioServerSocketChannel")
.build());
reflectiveClass.produce(ReflectiveClassBuildItem.builder("io.netty.channel.socket.nio.NioDatagramChannel")
.build());
reflectiveClass
.produce(ReflectiveClassBuildItem.builder("java.util.LinkedHashMap").build());
reflectiveClass.produce(ReflectiveClassBuildItem.builder("sun.nio.ch.SelectorImpl").methods().fields().build());
String maxOrder = calculateMaxOrder(config.allocatorMaxOrder(), minMaxOrderBuildItems, false);
NativeImageConfigBuildItem.Builder builder = NativeImageConfigBuildItem.builder()
// Use small chunks to avoid a lot of wasted space. Default is 16mb * arenas (derived from core count)
// Since buffers are cached to threads, the malloc overhead is temporary anyway
.addNativeImageSystemProperty("io.netty.allocator.maxOrder", maxOrder)
// Runtime initialize to respect io.netty.handler.ssl.conscrypt.useBufferAllocator
.addRuntimeInitializedClass("io.netty.handler.ssl.ConscryptAlpnSslEngine")
// Runtime initialize due to the use of tcnative in the static initializers?
.addRuntimeInitializedClass("io.netty.handler.ssl.ReferenceCountedOpenSslEngine")
// Runtime initialize to respect run-time provided values of the following properties:
// - io.netty.handler.ssl.openssl.bioNonApplicationBufferSize
// - io.netty.handler.ssl.openssl.useTasks
// - jdk.tls.client.enableSessionTicketExtension
// - io.netty.handler.ssl.openssl.sessionCacheServer
// - io.netty.handler.ssl.openssl.sessionCacheClient
// - jdk.tls.ephemeralDHKeySize
.addRuntimeInitializedClass("io.netty.handler.ssl.ReferenceCountedOpenSslContext")
// .addRuntimeInitializedClass("io.netty.handler.ssl.ReferenceCountedOpenSslClientContext")
// Runtime initialize to respect run-time provided values of the following properties:
// - keystore.type
// - ssl.KeyManagerFactory.algorithm
// - ssl.TrustManagerFactory.algorithm
.addRuntimeInitializedClass("io.netty.handler.ssl.JdkSslServerContext")
// .addRuntimeInitializedClass("io.netty.handler.ssl.JdkSslClientContext")
// Runtime initialize to prevent embedding SecureRandom instances in the native image
.addRuntimeInitializedClass("io.netty.handler.ssl.util.ThreadLocalInsecureRandom")
// The default channel id uses the process id, it should not be cached in the native image. This way we
// also respect the run-time provided value of the io.netty.processId property, io.netty.machineId
// property is being hardcoded in setNettyMachineId method
.addRuntimeInitializedClass("io.netty.channel.DefaultChannelId")
// Disable leak detection by default, it can still be enabled via
// io.netty.util.ResourceLeakDetector.setLevel method
.addNativeImageSystemProperty("io.netty.leakDetection.level", "DISABLED");
if (QuarkusClassLoader.isClassPresentAtRuntime("io.netty.handler.codec.http.HttpObjectEncoder")) {
builder
// Runtime initialize due to transitive use of the io.netty.util.internal.PlatformDependent class
// when initializing CRLF_BUF and ZERO_CRLF_CRLF_BUF
.addRuntimeInitializedClass("io.netty.handler.codec.http.HttpObjectEncoder")
.addRuntimeInitializedClass("io.netty.handler.codec.http.websocketx.extensions.compression.DeflateDecoder")
.addRuntimeInitializedClass("io.netty.handler.codec.http.websocketx.WebSocket00FrameEncoder");
// Zstd is an optional dependency, runtime initialize to avoid IllegalStateException when zstd is not
// available. This will result in a runtime ClassNotFoundException if the user tries to use zstd.
if (!QuarkusClassLoader.isClassPresentAtRuntime("com.github.luben.zstd.Zstd")) {
builder.addRuntimeInitializedClass("io.netty.handler.codec.compression.ZstdOptions")
.addRuntimeInitializedClass("io.netty.handler.codec.compression.ZstdConstants");
}
// Brotli is an optional dependency, we should only runtime initialize BrotliOptions to avoid
// IllegalStateException when brotli (e.g. com.aayushatharva.brotli4j.Brotli4jLoader) is not available.
// This will result in a runtime ClassNotFoundException if the user tries to use Brotli.
// Due to https://github.com/quarkusio/quarkus/issues/43662 we cannot do this yet though so we always enable
// runtime initialization of BrotliOptions if the | NettyProcessor |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authorization/method/SecuredAuthorizationManagerTests.java | {
"start": 9379,
"end": 9438
} | interface ____ {
}
@Secured("ROLE_ADMIN")
public | MySecured |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/AppendableFieldTest.java | {
"start": 1057,
"end": 1280
} | class ____ {
private Appendable value;
public Appendable getValue() {
return value;
}
public void setValue(Appendable value) {
this.value = value;
}
}
}
| V0 |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/GetClassOnAnnotation.java | {
"start": 1426,
"end": 2152
} | class ____ extends BugChecker
implements BugChecker.MethodInvocationTreeMatcher {
private static final Matcher<ExpressionTree> ANNOTATION_CLASS =
instanceMethod()
.onDescendantOf(Annotation.class.getName())
.named("getClass")
.withNoParameters();
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (ANNOTATION_CLASS.matches(tree, state)) {
return describeMatch(
tree,
SuggestedFix.replace(
state.getEndPosition(ASTHelpers.getReceiver(tree)),
state.getEndPosition(tree),
".annotationType()"));
}
return Description.NO_MATCH;
}
}
| GetClassOnAnnotation |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java | {
"start": 2807,
"end": 15732
} | class ____ {
private int responseId;
private NodeId nodeId;
private Resource capability;
private ResourceTrackerService resourceTracker;
private int httpPort = 2;
private MasterKey currentContainerTokenMasterKey;
private MasterKey currentNMTokenMasterKey;
private String version;
private Map<ContainerId, ContainerStatus> containerStats =
new HashMap<ContainerId, ContainerStatus>();
private Map<ApplicationId, AppCollectorData> registeringCollectors
= new ConcurrentHashMap<>();
private Set<NodeLabel> nodeLabels;
private long tokenSequenceNo;
public MockNM(String nodeIdStr, int memory, ResourceTrackerService resourceTracker) {
// scale vcores based on the requested memory
this(nodeIdStr, memory,
Math.max(1, (memory * YarnConfiguration.DEFAULT_NM_VCORES) /
YarnConfiguration.DEFAULT_NM_PMEM_MB),
resourceTracker);
}
public MockNM(String nodeIdStr, int memory, int vcores,
ResourceTrackerService resourceTracker) {
this(nodeIdStr, memory, vcores, resourceTracker,
YarnVersionInfo.getVersion());
}
public MockNM(String nodeIdStr, int memory, int vcores,
ResourceTrackerService resourceTracker, String version) {
this(nodeIdStr, Resource.newInstance(memory, vcores), resourceTracker,
version);
}
public MockNM(String nodeIdStr, Resource capability,
ResourceTrackerService resourceTracker) {
this(nodeIdStr, capability, resourceTracker,
YarnVersionInfo.getVersion());
}
public MockNM(String nodeIdStr, Resource capability,
ResourceTrackerService resourceTracker, String version) {
this.capability = capability;
this.resourceTracker = resourceTracker;
this.version = version;
String[] splits = nodeIdStr.split(":");
nodeId = BuilderUtils.newNodeId(splits[0], Integer.parseInt(splits[1]));
}
public MockNM(String nodeIdStr, Resource capability,
ResourceTrackerService resourceTracker, String version, Set<NodeLabel>
nodeLabels) {
this(nodeIdStr, capability, resourceTracker, version);
this.nodeLabels = nodeLabels;
}
public NodeId getNodeId() {
return nodeId;
}
public int getHttpPort() {
return httpPort;
}
public void setHttpPort(int port) {
httpPort = port;
}
public void setResourceTrackerService(ResourceTrackerService resourceTracker) {
this.resourceTracker = resourceTracker;
}
public void containerStatus(ContainerStatus containerStatus) throws Exception {
Map<ApplicationId, List<ContainerStatus>> conts =
new HashMap<ApplicationId, List<ContainerStatus>>();
conts.put(containerStatus.getContainerId().getApplicationAttemptId().getApplicationId(),
Arrays.asList(new ContainerStatus[] { containerStatus }));
nodeHeartbeat(conts, true);
}
public void containerIncreaseStatus(Container container) throws Exception {
ContainerStatus containerStatus = BuilderUtils.newContainerStatus(
container.getId(), ContainerState.RUNNING, "Success", 0,
container.getResource());
List<Container> increasedConts = Collections.singletonList(container);
nodeHeartbeat(Collections.singletonList(containerStatus), increasedConts,
true, responseId);
}
public void addRegisteringCollector(ApplicationId appId,
AppCollectorData data) {
this.registeringCollectors.put(appId, data);
}
public Map<ApplicationId, AppCollectorData> getRegisteringCollectors() {
return this.registeringCollectors;
}
public void unRegisterNode() throws Exception {
UnRegisterNodeManagerRequest request = Records
.newRecord(UnRegisterNodeManagerRequest.class);
request.setNodeId(nodeId);
resourceTracker.unRegisterNodeManager(request);
}
public RegisterNodeManagerResponse registerNode() throws Exception {
return registerNode(null, null);
}
public RegisterNodeManagerResponse registerNode(
List<ApplicationId> runningApplications) throws Exception {
return registerNode(null, runningApplications);
}
public RegisterNodeManagerResponse registerNode(
List<NMContainerStatus> containerReports,
List<ApplicationId> runningApplications) throws Exception {
RegisterNodeManagerRequest req = Records.newRecord(
RegisterNodeManagerRequest.class);
req.setNodeId(nodeId);
req.setHttpPort(httpPort);
req.setResource(capability);
req.setContainerStatuses(containerReports);
req.setNMVersion(version);
req.setRunningApplications(runningApplications);
if ( nodeLabels != null && nodeLabels.size() > 0) {
req.setNodeLabels(nodeLabels);
}
NodeStatus status = Records.newRecord(NodeStatus.class);
status.setResponseId(0);
status.setNodeId(nodeId);
status.setContainersStatuses(new ArrayList<>(containerStats.values()));
NodeHealthStatus healthStatus = Records.newRecord(NodeHealthStatus.class);
healthStatus.setHealthReport("");
healthStatus.setIsNodeHealthy(true);
healthStatus.setLastHealthReportTime(1);
status.setNodeHealthStatus(healthStatus);
req.setNodeStatus(status);
RegisterNodeManagerResponse registrationResponse =
resourceTracker.registerNodeManager(req);
this.currentContainerTokenMasterKey =
registrationResponse.getContainerTokenMasterKey();
this.currentNMTokenMasterKey = registrationResponse.getNMTokenMasterKey();
Resource newResource = registrationResponse.getResource();
if (newResource != null) {
capability = Resources.clone(newResource);
}
containerStats.clear();
if (containerReports != null) {
for (NMContainerStatus report : containerReports) {
if (report.getContainerState() != ContainerState.COMPLETE) {
containerStats.put(report.getContainerId(),
ContainerStatus.newInstance(report.getContainerId(),
report.getContainerState(), report.getDiagnostics(),
report.getContainerExitStatus()));
}
}
}
responseId = 0;
return registrationResponse;
}
public NodeHeartbeatResponse nodeHeartbeat(boolean isHealthy) throws Exception {
return nodeHeartbeat(Collections.<ContainerStatus>emptyList(),
Collections.<Container>emptyList(), isHealthy, responseId);
}
public NodeHeartbeatResponse nodeHeartbeat(ApplicationAttemptId attemptId,
long containerId, ContainerState containerState) throws Exception {
ContainerStatus containerStatus = BuilderUtils.newContainerStatus(
BuilderUtils.newContainerId(attemptId, containerId), containerState,
"Success", 0, capability);
ArrayList<ContainerStatus> containerStatusList =
new ArrayList<ContainerStatus>(1);
containerStatusList.add(containerStatus);
Log.getLog().info("ContainerStatus: " + containerStatus);
return nodeHeartbeat(containerStatusList,
Collections.<Container>emptyList(), true, responseId);
}
public NodeHeartbeatResponse nodeHeartbeat(Map<ApplicationId,
List<ContainerStatus>> conts, boolean isHealthy) throws Exception {
return nodeHeartbeat(conts, isHealthy, responseId);
}
/**
* Sends the heartbeat of the node.
* @param isHealthy whether node is healthy.
* @param resId response id.
* @return response of the heartbeat.
* @throws Exception
*/
public NodeHeartbeatResponse nodeHeartbeat(Map<ApplicationId,
List<ContainerStatus>> conts, boolean isHealthy, int resId) throws Exception {
ArrayList<ContainerStatus> updatedStats = new ArrayList<ContainerStatus>();
for (List<ContainerStatus> stats : conts.values()) {
updatedStats.addAll(stats);
}
return nodeHeartbeat(updatedStats, Collections.<Container>emptyList(),
isHealthy, resId);
}
/**
* Sends the heartbeat of the node.
* @param updatedStats containers with updated status.
* @param isHealthy whether node is healthy.
* @return response of the heartbeat.
* @throws Exception
*/
public NodeHeartbeatResponse nodeHeartbeat(
List<ContainerStatus> updatedStats, boolean isHealthy) throws Exception {
return nodeHeartbeat(updatedStats, Collections.<Container>emptyList(),
isHealthy, responseId);
}
/**
* Sends the heartbeat of the node.
* @param oppContainersStatus opportunistic containers status.
* @param isHealthy whether node is healthy.
* @return response of the heartbeat.
* @throws Exception
*/
public NodeHeartbeatResponse nodeHeartbeat(
OpportunisticContainersStatus oppContainersStatus, boolean isHealthy)
throws Exception {
return nodeHeartbeat(Collections.emptyList(),
Collections.emptyList(), isHealthy, responseId, oppContainersStatus);
}
/**
* Sends the heartbeat of the node.
* @param updatedStats containers with updated status.
* @param increasedConts containers whose resource has been increased.
* @param isHealthy whether node is healthy.
* @param resId response id.
* @return response of the heartbeat.
* @throws Exception
*/
public NodeHeartbeatResponse nodeHeartbeat(
List<ContainerStatus> updatedStats, List<Container> increasedConts,
boolean isHealthy, int resId) throws Exception {
return nodeHeartbeat(updatedStats, increasedConts,
isHealthy, resId, null);
}
/**
* Sends the heartbeat of the node.
* @param updatedStats containers with updated status.
* @param increasedConts containers whose resource has been increased.
* @param isHealthy whether node is healthy.
* @param resId response id.
* @param oppContainersStatus opportunistic containers status.
* @return response of the heartbeat.
* @throws Exception
*/
public NodeHeartbeatResponse nodeHeartbeat(List<ContainerStatus> updatedStats,
List<Container> increasedConts, boolean isHealthy, int resId,
OpportunisticContainersStatus oppContainersStatus) throws Exception {
NodeHeartbeatRequest req = Records.newRecord(NodeHeartbeatRequest.class);
NodeStatus status = Records.newRecord(NodeStatus.class);
status.setResponseId(resId);
status.setNodeId(nodeId);
ArrayList<ContainerId> completedContainers = new ArrayList<ContainerId>();
for (ContainerStatus stat : updatedStats) {
if (stat.getState() == ContainerState.COMPLETE) {
completedContainers.add(stat.getContainerId());
}
containerStats.put(stat.getContainerId(), stat);
}
status.setContainersStatuses(
new ArrayList<ContainerStatus>(containerStats.values()));
for (ContainerId cid : completedContainers) {
containerStats.remove(cid);
}
status.setIncreasedContainers(increasedConts);
status.setOpportunisticContainersStatus(oppContainersStatus);
NodeHealthStatus healthStatus = Records.newRecord(NodeHealthStatus.class);
healthStatus.setHealthReport("");
healthStatus.setIsNodeHealthy(isHealthy);
healthStatus.setLastHealthReportTime(1);
status.setNodeHealthStatus(healthStatus);
req.setNodeStatus(status);
req.setLastKnownContainerTokenMasterKey(this.currentContainerTokenMasterKey);
req.setLastKnownNMTokenMasterKey(this.currentNMTokenMasterKey);
req.setRegisteringCollectors(this.registeringCollectors);
req.setTokenSequenceNo(this.tokenSequenceNo);
NodeHeartbeatResponse heartbeatResponse =
resourceTracker.nodeHeartbeat(req);
responseId = heartbeatResponse.getResponseId();
MasterKey masterKeyFromRM = heartbeatResponse.getContainerTokenMasterKey();
if (masterKeyFromRM != null
&& masterKeyFromRM.getKeyId() != this.currentContainerTokenMasterKey
.getKeyId()) {
this.currentContainerTokenMasterKey = masterKeyFromRM;
}
masterKeyFromRM = heartbeatResponse.getNMTokenMasterKey();
if (masterKeyFromRM != null
&& masterKeyFromRM.getKeyId() != this.currentNMTokenMasterKey
.getKeyId()) {
this.currentNMTokenMasterKey = masterKeyFromRM;
}
Resource newResource = heartbeatResponse.getResource();
if (newResource != null) {
capability = Resources.clone(newResource);
}
this.tokenSequenceNo = heartbeatResponse.getTokenSequenceNo();
return heartbeatResponse;
}
public static NodeStatus createMockNodeStatus() {
NodeStatus mockNodeStatus = mock(NodeStatus.class);
NodeHealthStatus mockNodeHealthStatus = mock(NodeHealthStatus.class);
when(mockNodeStatus.getNodeHealthStatus()).thenReturn(mockNodeHealthStatus);
when(mockNodeHealthStatus.getIsNodeHealthy()).thenReturn(true);
return mockNodeStatus;
}
public long getMemory() {
return capability.getMemorySize();
}
public int getvCores() {
return capability.getVirtualCores();
}
public Resource getCapability() {
return capability;
}
public String getVersion() {
return version;
}
public void setResponseId(int id) {
this.responseId = id;
}
}
| MockNM |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/generics/GenericsAndNestedTests.java | {
"start": 1215,
"end": 1419
} | class ____ extends GenericComicCharactersTests<Cat> {
@Override
int getExpectedNumCharacters() {
return 2;
}
@Override
String getExpectedName() {
return "Catbert";
}
}
@Nested
| CatTests |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oracle/ast/stmt/OracleRaiseStatement.java | {
"start": 792,
"end": 1240
} | class ____ extends OracleStatementImpl {
private SQLExpr exception;
public SQLExpr getException() {
return exception;
}
public void setException(SQLExpr exception) {
this.exception = exception;
}
@Override
public void accept0(OracleASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, exception);
}
visitor.endVisit(this);
}
}
| OracleRaiseStatement |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/spi/PrefixEnumTransformationStrategy.java | {
"start": 237,
"end": 527
} | class ____ implements EnumTransformationStrategy {
@Override
public String getStrategyName() {
return "prefix";
}
@Override
public String transform(String value, String configuration) {
return configuration + value;
}
}
| PrefixEnumTransformationStrategy |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ComponentsProviderGenerator.java | {
"start": 2146,
"end": 21237
} | class ____ extends AbstractGenerator {
static final String COMPONENTS_PROVIDER_SUFFIX = "_ComponentsProvider";
static final String SETUP_PACKAGE = Arc.class.getPackage().getName() + ".setup";
static final String ADD_OBSERVERS = "addObservers";
static final String ADD_REMOVED_BEANS = "addRemovedBeans";
static final String ADD_BEANS = "addBeans";
private static final Comparator<BeanInfo> BEAN_INFO_COMPARATOR = Comparator.comparing(BeanInfo::getIdentifier);
private static final Comparator<ObserverInfo> OBSERVER_INFO_COMPARATOR = Comparator.comparing(ObserverInfo::getIdentifier);
private final AnnotationLiteralProcessor annotationLiterals;
private final boolean detectUnusedFalsePositives;
public ComponentsProviderGenerator(AnnotationLiteralProcessor annotationLiterals, boolean generateSources,
boolean detectUnusedFalsePositives) {
super(generateSources);
this.annotationLiterals = annotationLiterals;
this.detectUnusedFalsePositives = detectUnusedFalsePositives;
}
/**
*
* @param name
* @param beanDeployment
* @param beanToGeneratedName
* @param observerToGeneratedName
* @param scopeToContextInstances
* @return a collection of resources
*/
Collection<Resource> generate(String name, BeanDeployment beanDeployment, Map<BeanInfo, String> beanToGeneratedName,
Map<ObserverInfo, String> observerToGeneratedName, Map<DotName, String> scopeToContextInstances) {
ResourceClassOutput classOutput = new ResourceClassOutput(true, generateSources);
Gizmo gizmo = gizmo(classOutput);
createComponentsProvider(gizmo, name, beanDeployment, beanToGeneratedName, observerToGeneratedName,
scopeToContextInstances);
List<Resource> resources = new ArrayList<>();
for (Resource resource : classOutput.getResources()) {
resources.add(resource);
if (resource.getName().endsWith(COMPONENTS_PROVIDER_SUFFIX)) {
// We need to filter out nested classes and functions
resources.add(ResourceImpl.serviceProvider(ComponentsProvider.class.getName(),
resource.getName().replace('/', '.').getBytes(StandardCharsets.UTF_8), null));
}
}
return resources;
}
private void createComponentsProvider(Gizmo gizmo, String name, BeanDeployment beanDeployment,
Map<BeanInfo, String> beanToGeneratedName, Map<ObserverInfo, String> observerToGeneratedName,
Map<DotName, String> scopeToContextInstances) {
CodeGenInfo info = preprocess(beanDeployment);
String generatedName = SETUP_PACKAGE + "." + name + COMPONENTS_PROVIDER_SUFFIX;
gizmo.class_(generatedName, cc -> {
cc.implements_(ComponentsProvider.class);
cc.defaultConstructor();
cc.method("getComponents", mc -> {
mc.returning(Components.class);
ParamVar currentContextFactory = mc.parameter("currentContextFactory", CurrentContextFactory.class);
mc.body(bc -> {
// Break bean processing into multiple addBeans() methods
// Map<String, InjectableBean<?>>
LocalVar beanIdToBean = bc.localVar("beanIdToBean", bc.new_(HashMap.class));
for (BeanGroup group : info.beanGroups()) {
ClassMethodDesc desc = ClassMethodDesc.of(cc.type(), ADD_BEANS + group.id(),
void.class, Map.class);
bc.invokeVirtual(desc, cc.this_(), beanIdToBean);
}
LocalVar beans = bc.localVar("beans", bc.withMap(beanIdToBean).values());
generateAddBeans(cc, info, beanToGeneratedName);
// Break observers processing into multiple addObservers() methods
// List<InjectableObserverMethod<?>
LocalVar observers = bc.localVar("observers", bc.new_(ArrayList.class));
for (ObserverGroup group : info.observerGroups()) {
ClassMethodDesc desc = ClassMethodDesc.of(cc.type(), ADD_OBSERVERS + group.id(),
void.class, Map.class, List.class);
bc.invokeVirtual(desc, cc.this_(), beanIdToBean, observers);
}
generateAddObservers(cc, info, observerToGeneratedName);
// Custom contexts
// List<InjectableContext>
ContextConfigurator.CreateGeneration createGeneration = new ContextConfigurator.CreateGeneration() {
@Override
public BlockCreator method() {
return bc;
}
@Override
public Var currentContextFactory() {
return currentContextFactory;
}
};
LocalVar contexts = bc.localVar("contexts", bc.new_(ArrayList.class));
for (var creators : beanDeployment.getCustomContexts().values()) {
for (Function<ContextConfigurator.CreateGeneration, Expr> creator : creators) {
bc.withList(contexts).add(creator.apply(createGeneration));
}
}
// All interceptor bindings
// Set<String>
LocalVar interceptorBindings = bc.localVar("interceptorBindings", bc.new_(HashSet.class));
for (ClassInfo binding : beanDeployment.getInterceptorBindings()) {
bc.withSet(interceptorBindings).add(Const.of(binding.name().toString()));
}
// Transitive interceptor bindings
// Map<Class, Set<Annotation>>
LocalVar transitiveBindings = bc.localVar("transitiveBindings", bc.new_(HashMap.class));
beanDeployment.getTransitiveInterceptorBindings().forEach((binding, transitives) -> {
LocalVar transitivesSet = bc.localVar("transitives", bc.new_(HashSet.class));
for (AnnotationInstance transitive : transitives) {
ClassInfo transitiveClass = beanDeployment.getInterceptorBinding(transitive.name());
bc.withSet(transitivesSet).add(annotationLiterals.create(bc, transitiveClass, transitive));
}
bc.withMap(transitiveBindings).put(Const.of(classDescOf(binding)), transitivesSet);
});
// removed beans
// Supplier<Collection<RemovedBean>>
LocalVar removedBeansSupplier;
if (detectUnusedFalsePositives) {
removedBeansSupplier = bc.localVar("removedBeansSupplier", bc.lambda(Supplier.class, lc -> {
lc.body(lbc -> {
LocalVar removedBeans = lbc.localVar("removedBeans", lbc.new_(ArrayList.class));
LocalVar typeCache = lbc.localVar("typeCache", lbc.new_(HashMap.class));
// Break removed beans processing into multiple addRemovedBeans() methods
for (RemovedBeanGroup group : info.removedBeans()) {
ClassMethodDesc desc = ClassMethodDesc.of(cc.type(), ADD_REMOVED_BEANS + group.id(),
void.class, List.class, Map.class);
lbc.invokeStatic(desc, removedBeans, typeCache);
}
lbc.return_(removedBeans);
});
}));
generateAddRemovedBeans(cc, info);
} else {
removedBeansSupplier = bc.localVar("removedBeansSupplier",
bc.new_(MethodDescs.FIXED_VALUE_SUPPLIER_CONSTRUCTOR, bc.setOf()));
}
// All qualifiers
// Set<String>
LocalVar qualifiers = bc.localVar("qualifiers", bc.new_(HashSet.class));
for (ClassInfo qualifier : beanDeployment.getQualifiers()) {
bc.withSet(qualifiers).add(Const.of(qualifier.name().toString()));
}
// Qualifier non-binding members
LocalVar qualifiersNonbindingMembers = bc.localVar("qualifiersNonbindingMembers", bc.new_(HashMap.class));
beanDeployment.getQualifierNonbindingMembers().forEach((qualifier, nonbindingMembers) -> {
LocalVar nonbindingMembersSet = bc.localVar("nonbindingMembers", bc.new_(HashSet.class));
for (String nonbindingMember : nonbindingMembers) {
bc.withSet(nonbindingMembersSet).add(Const.of(nonbindingMember));
}
bc.withMap(qualifiersNonbindingMembers).put(Const.of(qualifier.toString()), nonbindingMembersSet);
});
// context instances
LocalVar contextInstances;
if (scopeToContextInstances.isEmpty()) {
contextInstances = bc.localVar("contextInstances", bc.mapOf());
} else {
LocalVar contextInstancesFinal = bc.localVar("contextInstances", bc.new_(HashMap.class));
scopeToContextInstances.forEach((scopeClass, contextClass) -> {
Expr contextSupplier = bc.lambda(Supplier.class, lc -> {
lc.body(lbc -> {
lbc.return_(lbc.new_(ConstructorDesc.of(ClassDesc.of(contextClass))));
});
});
bc.withMap(contextInstancesFinal).put(Const.of(classDescOf(scopeClass)), contextSupplier);
});
contextInstances = contextInstancesFinal;
}
bc.return_(bc.new_(ConstructorDesc.of(Components.class, Collection.class, Collection.class,
Collection.class, Set.class, Map.class, Supplier.class, Map.class, Set.class, Map.class),
beans, observers, contexts, interceptorBindings, transitiveBindings,
removedBeansSupplier, qualifiersNonbindingMembers, qualifiers, contextInstances));
});
});
});
}
private void generateAddBeans(ClassCreator cc, CodeGenInfo info, Map<BeanInfo, String> beanToGeneratedName) {
Set<BeanInfo> processed = new HashSet<>();
for (BeanGroup group : info.beanGroups()) {
cc.method(ADD_BEANS + group.id(), mc -> {
mc.private_();
mc.returning(void.class);
ParamVar beanIdToBean = mc.parameter("beanIdToBean", Map.class);
mc.body(bc -> {
for (BeanInfo bean : group.beans()) {
ClassDesc beanType = beanToGeneratedName.containsKey(bean)
? ClassDesc.of(beanToGeneratedName.get(bean))
: null;
if (beanType == null) {
throw new IllegalStateException("No bean type found for: " + bean);
}
List<InjectionPointInfo> injectionPoints = bean.getInjections()
.stream()
.flatMap(i -> i.injectionPoints.stream())
.filter(ip -> !ip.isDelegate() && !BuiltinBean.resolvesTo(ip))
.toList();
List<ClassDesc> params = new ArrayList<>();
List<Expr> args = new ArrayList<>();
if (bean.isProducer()) {
params.add(ClassDesc.of(Supplier.class.getName()));
if (processed.contains(bean.getDeclaringBean())) {
args.add(bc.withMap(beanIdToBean).get(Const.of(bean.getDeclaringBean().getIdentifier())));
} else {
// Declaring bean was not processed yet - use MapValueSupplier
args.add(bc.new_(MethodDescs.MAP_VALUE_SUPPLIER_CONSTRUCTOR, beanIdToBean,
Const.of(bean.getDeclaringBean().getIdentifier())));
}
}
for (InjectionPointInfo injectionPoint : injectionPoints) {
params.add(ClassDesc.of(Supplier.class.getName()));
if (processed.contains(injectionPoint.getResolvedBean())) {
args.add(bc.withMap(beanIdToBean).get(
Const.of(injectionPoint.getResolvedBean().getIdentifier())));
} else {
// Dependency was not processed yet - use MapValueSupplier
args.add(bc.new_(MethodDescs.MAP_VALUE_SUPPLIER_CONSTRUCTOR, beanIdToBean,
Const.of(injectionPoint.getResolvedBean().getIdentifier())));
}
}
if (bean.getDisposer() != null) {
for (InjectionPointInfo injectionPoint : bean.getDisposer().getInjection().injectionPoints) {
if (BuiltinBean.resolvesTo(injectionPoint)) {
continue;
}
params.add(ClassDesc.of(Supplier.class.getName()));
args.add(bc.new_(MethodDescs.MAP_VALUE_SUPPLIER_CONSTRUCTOR, beanIdToBean,
Const.of(injectionPoint.getResolvedBean().getIdentifier())));
}
}
for (InterceptorInfo interceptor : bean.getBoundInterceptors()) {
params.add(ClassDesc.of(Supplier.class.getName()));
if (processed.contains(interceptor)) {
args.add(bc.withMap(beanIdToBean).get(Const.of(interceptor.getIdentifier())));
} else {
// Bound interceptor was not processed yet - use MapValueSupplier
args.add(bc.new_(MethodDescs.MAP_VALUE_SUPPLIER_CONSTRUCTOR, beanIdToBean,
Const.of(interceptor.getIdentifier())));
}
}
for (DecoratorInfo decorator : bean.getBoundDecorators()) {
params.add(ClassDesc.of(Supplier.class.getName()));
if (processed.contains(decorator)) {
args.add(bc.withMap(beanIdToBean).get(Const.of(decorator.getIdentifier())));
} else {
// Bound decorator was not processed yet - use MapValueSupplier
args.add(bc.new_(MethodDescs.MAP_VALUE_SUPPLIER_CONSTRUCTOR, beanIdToBean,
Const.of(decorator.getIdentifier())));
}
}
// Foo_Bean bean = new Foo_Bean(bean3)
Expr beanInstance = bc.new_(ConstructorDesc.of(beanType, params), args);
// beans.put(id, bean)
bc.withMap(beanIdToBean).put(Const.of(bean.getIdentifier()), beanInstance);
processed.add(bean);
}
bc.return_();
});
});
}
}
private void generateAddObservers(ClassCreator cc, CodeGenInfo info, Map<ObserverInfo, String> observerToGeneratedName) {
for (ObserverGroup group : info.observerGroups()) {
cc.method(ADD_OBSERVERS + group.id(), mc -> {
mc.private_();
mc.returning(void.class);
ParamVar beanIdToBean = mc.parameter("beanIdToBean", Map.class);
ParamVar observers = mc.parameter("observers", List.class);
mc.body(bc -> {
for (ObserverInfo observer : group.observers()) {
ClassDesc observerType = observerToGeneratedName.containsKey(observer)
? ClassDesc.of(observerToGeneratedName.get(observer))
: null;
if (observerType == null) {
throw new IllegalStateException("No observer type found for: " + observerType);
}
List<ClassDesc> params = new ArrayList<>();
List<Expr> args = new ArrayList<>();
if (!observer.isSynthetic()) {
List<InjectionPointInfo> injectionPoints = observer.getInjection().injectionPoints.stream()
.filter(ip -> !BuiltinBean.resolvesTo(ip))
.toList();
// declaring bean
params.add(ClassDesc.of(Supplier.class.getName()));
args.add(bc.withMap(beanIdToBean).get(Const.of(observer.getDeclaringBean().getIdentifier())));
// injections
for (InjectionPointInfo injectionPoint : injectionPoints) {
params.add(ClassDesc.of(Supplier.class.getName()));
args.add(bc.withMap(beanIdToBean).get(
Const.of(injectionPoint.getResolvedBean().getIdentifier())));
}
}
Expr observerInstance = bc.new_(ConstructorDesc.of(observerType, params), args);
bc.withList(observers).add(observerInstance);
}
bc.return_();
});
});
}
}
private void generateAddRemovedBeans(ClassCreator cc, CodeGenInfo info) {
for (RemovedBeanGroup group : info.removedBeans()) {
cc.staticMethod(ADD_REMOVED_BEANS + group.id(), mc -> {
mc.public_(); // to allow access from an anonymous | ComponentsProviderGenerator |
java | apache__kafka | metadata/src/main/java/org/apache/kafka/image/writer/ImageReWriter.java | {
"start": 1297,
"end": 2025
} | class ____ implements ImageWriter {
private final MetadataDelta delta;
private boolean closed = false;
private MetadataImage image = null;
public ImageReWriter(MetadataDelta delta) {
this.delta = delta;
}
@Override
public void write(ApiMessageAndVersion record) {
if (closed) throw new ImageWriterClosedException();
delta.replay(record.message());
}
@Override
public void close(boolean complete) {
if (closed) return;
closed = true;
if (complete) {
delta.finishSnapshot();
image = delta.apply(delta.image().provenance());
}
}
public MetadataImage image() {
return image;
}
}
| ImageReWriter |
java | hibernate__hibernate-orm | hibernate-testing/src/test/java/org/hibernate/testing/annotations/methods/MoreEntityManagerFactoryScopeTesting.java | {
"start": 974,
"end": 2648
} | class ____ {
@Jpa(
annotatedClasses = {
AnEntity.class
}
)
@Test
public void testBasicUsage(EntityManagerFactoryScope scope) {
assertThat( scope, notNullValue() );
assertThat( scope.getEntityManagerFactory(), notNullValue() );
// check we can use the EMF to create EMs
scope.inTransaction(
(session) -> session.createQuery( "select a from AnEntity a" ).getResultList()
);
}
@Jpa(
annotatedClasses = AnotherEntity.class,
integrationSettings = {@Setting(name = JpaComplianceSettings.JPA_QUERY_COMPLIANCE, value = "true")}
)
@Test
public void annotatedMethodTest(EntityManagerFactoryScope scope) {
assertThat( scope, notNullValue() );
assertThat( scope.getEntityManagerFactory(), notNullValue() );
Set<EntityType<?>> entities = scope.getEntityManagerFactory().getMetamodel().getEntities();
assertEquals( 1, entities.size() );
assertEquals( "AnotherEntity", entities.iterator().next().getName() );
assertEquals( "true", scope.getEntityManagerFactory().getProperties().get( "hibernate.jpa.compliance.query" ) );
scope.inTransaction(
entityManager -> {
AnotherEntity aoe = new AnotherEntity( 2, "AnotherEntity_1" );
entityManager.persist( aoe );
}
);
scope.inTransaction(
entityManager -> {
AnotherEntity aoe = entityManager.find( AnotherEntity.class, 2 );
assertNotNull( aoe );
assertEquals( 2, aoe.getId() );
assertEquals( "AnotherEntity_1", aoe.getName() );
}
);
Assertions.assertThrows(
IllegalArgumentException.class,
() -> scope.inTransaction(
entityManager -> entityManager.find( AnEntity.class, 1 )
)
);
}
}
| MoreEntityManagerFactoryScopeTesting |
java | square__retrofit | retrofit/src/main/java/retrofit2/package-info.java | {
"start": 113,
"end": 314
} | interface ____ {
* @GET("/users/{user}/repos")
* List<Repo> listRepos(@Path("user") String user);
* }
* </pre>
*/
@retrofit2.internal.EverythingIsNonNull
package retrofit2;
| GitHubService |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/context/properties/bind/BindHandler.java | {
"start": 830,
"end": 1005
} | interface ____ can be used to handle additional logic during element
* {@link Binder binding}.
*
* @author Phillip Webb
* @author Madhura Bhave
* @since 2.0.0
*/
public | that |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java | {
"start": 1854,
"end": 12083
} | class ____ {
private final transient PersistenceContext persistenceContext;
private transient InstanceIdentityStore<ImmutableManagedEntityHolder> immutableManagedEntityXref;
private transient int currentInstanceId = 1;
private transient ManagedEntity head;
private transient ManagedEntity tail;
private transient int count;
private transient IdentityHashMap<Object,ManagedEntity> nonEnhancedEntityXref;
@SuppressWarnings("unchecked")
private transient Map.Entry<Object,EntityEntry>[] reentrantSafeEntries = new Map.Entry[0];
private transient boolean dirty;
/**
* Constructs a EntityEntryContext
*/
EntityEntryContext(PersistenceContext persistenceContext) {
this.persistenceContext = persistenceContext;
}
/**
* Adds the entity and {@link EntityEntry} to this context, associating them.
*
* @param entity The entity
* @param entityEntry The entry
*/
void addEntityEntry(Object entity, EntityEntry entityEntry) {
// IMPORTANT!!!!!
// add is called more than once of some entities. In such cases the first
// call is simply setting up a "marker" to avoid infinite looping from reentrancy
// any addition (even the double one described above) should invalidate the cross-ref array
dirty = true;
// We only need to check a mutable EntityEntry is associated with the same PersistenceContext.
// Immutable EntityEntry can be associated with multiple PersistenceContexts, so no need to check.
// ImmutableEntityEntry#getPersistenceContext() throws an exception (HHH-10251).
assert !entityEntry.getPersister().isMutable()
|| ( (EntityEntryImpl) entityEntry ).getPersistenceContext() == persistenceContext;
// Determine the appropriate ManagedEntity instance to use based on whether the entity is enhanced or not.
// Throw an exception if entity is a mutable ManagedEntity that is associated with a different
// PersistenceContext.
var managedEntity = getAssociatedManagedEntity( entity );
int instanceId = nextManagedEntityInstanceId();
final boolean alreadyAssociated = managedEntity != null;
if ( !alreadyAssociated ) {
if ( isManagedEntity( entity ) ) {
final var managed = asManagedEntity( entity );
assert managed.$$_hibernate_getInstanceId() == 0;
if ( entityEntry.getPersister().isMutable() ) {
managedEntity = managed;
// We know that managedEntity is not associated with the same PersistenceContext.
// Check if managedEntity is associated with a different PersistenceContext.
checkNotAssociatedWithOtherPersistenceContextIfMutable( managedEntity );
}
else {
// Create a holder for PersistenceContext-related data.
managedEntity = new ImmutableManagedEntityHolder( managed );
if ( !isReferenceCachingEnabled( entityEntry.getPersister() ) ) {
putImmutableManagedEntity( managed, instanceId, (ImmutableManagedEntityHolder) managedEntity );
}
else {
// When reference caching is enabled we cannot set the instance-id on the entity instance
instanceId = 0;
putManagedEntity( entity, managedEntity );
}
}
}
else {
managedEntity = new ManagedEntityImpl( entity );
putManagedEntity( entity, managedEntity );
}
}
if ( alreadyAssociated ) {
// if the entity was already associated with the context, skip the linking step.
managedEntity.$$_hibernate_setEntityEntry( entityEntry );
return;
}
// TODO: can dirty be set to true here?
// finally, set up linking and count
final ManagedEntity previous;
if ( tail == null ) {
assert head == null;
// Protect against stale data in the ManagedEntity and nullify previous reference.
previous = null;
head = managedEntity;
tail = head;
count = 1;
}
else {
tail.$$_hibernate_setNextManagedEntity( managedEntity );
previous = tail;
tail = managedEntity;
count++;
}
// Protect against stale data left in the ManagedEntity nullify next reference.
managedEntity.$$_hibernate_setPersistenceInfo( entityEntry, previous, null, instanceId );
}
private static boolean isReferenceCachingEnabled(EntityPersister persister) {
// Immutable entities which can use reference caching are treated as non-enhanced entities, as setting
// the instance-id on them would be problematic in different sessions
return persister.canUseReferenceCacheEntries() && persister.canReadFromCache();
}
private ManagedEntity getAssociatedManagedEntity(Object entity) {
if ( isManagedEntity( entity ) ) {
final var managedEntity = asManagedEntity( entity );
if ( managedEntity.$$_hibernate_getEntityEntry() == null ) {
// it is not associated
return null;
}
final var entityEntry =
(EntityEntryImpl)
managedEntity.$$_hibernate_getEntityEntry();
if ( entityEntry.getPersister().isMutable() ) {
return entityEntry.getPersistenceContext() == persistenceContext
? managedEntity // it is associated
: null;
}
else if ( !isReferenceCachingEnabled( entityEntry.getPersister() ) ) {
// if managedEntity is associated with this EntityEntryContext, it may have
// an entry in immutableManagedEntityXref and its holder will be returned.
return immutableManagedEntityXref != null
? immutableManagedEntityXref.get( managedEntity.$$_hibernate_getInstanceId(), managedEntity )
: null;
}
}
return nonEnhancedEntityXref != null
? nonEnhancedEntityXref.get( entity )
: null;
}
private void putManagedEntity(Object entity, ManagedEntity managedEntity) {
if ( nonEnhancedEntityXref == null ) {
nonEnhancedEntityXref = new IdentityHashMap<>();
}
nonEnhancedEntityXref.put( entity, managedEntity );
}
private int nextManagedEntityInstanceId() {
return currentInstanceId++;
}
private void putImmutableManagedEntity(ManagedEntity managed, int instanceId, ImmutableManagedEntityHolder holder) {
if ( immutableManagedEntityXref == null ) {
immutableManagedEntityXref = new InstanceIdentityStore<>();
}
immutableManagedEntityXref.put( managed, instanceId, holder );
}
private void checkNotAssociatedWithOtherPersistenceContextIfMutable(ManagedEntity managedEntity) {
// we only have to check mutable managedEntity
final var entityEntry = (EntityEntryImpl) managedEntity.$$_hibernate_getEntityEntry();
if ( entityEntry != null && entityEntry.getPersister().isMutable() ) {
final var entryPersistenceContext = entityEntry.getPersistenceContext();
if ( entryPersistenceContext != null && entryPersistenceContext != persistenceContext ) {
if ( entryPersistenceContext.getSession().isOpen() ) {
// NOTE: otherPersistenceContext may be operating on the entityEntry in a different thread.
// it is not safe to associate entityEntry with this EntityEntryContext.
throw new HibernateException(
"Illegal attempt to associate a ManagedEntity with two open persistence contexts: " + entityEntry
);
}
else {
// otherPersistenceContext is associated with a closed PersistenceContext
CORE_LOGGER.stalePersistenceContextInEntityEntry( entityEntry.toString() );
}
}
}
}
/**
* Does this entity exist in this context, associated with an {@link EntityEntry}?
*
* @param entity The entity to check
*
* @return {@code true} if it is associated with this context
*/
boolean hasEntityEntry(Object entity) {
return getEntityEntry( entity ) != null;
}
/**
* Retrieve the associated {@link EntityEntry} for the given entity.
*
* @param entity The entity
*
* @return The associated {@link EntityEntry}
*/
EntityEntry getEntityEntry(Object entity) {
// locate a ManagedEntity for the entity, but only if it is associated with the same PersistenceContext.
final var managedEntity = getAssociatedManagedEntity( entity );
// and get/return the EntityEntry from the ManagedEntry
return managedEntity == null
? null
: managedEntity.$$_hibernate_getEntityEntry();
}
/**
* Remove an entity from the context, returning its {@link EntityEntry}.
*
* @param entity The entity to remove
*
* @return The removed {@link EntityEntry}
*/
EntityEntry removeEntityEntry(Object entity) {
// locate a ManagedEntity for the entity, but only if it is associated with the same PersistenceContext.
// no need to check if the entity is a ManagedEntity that is associated with a different PersistenceContext
final var managedEntity = getAssociatedManagedEntity( entity );
if ( managedEntity == null ) {
// not associated with this EntityEntryContext, so nothing to do.
return null;
}
dirty = true;
removeXref( entity, managedEntity );
// re-link
count--;
if ( count == 0 ) {
// handle as a special case...
head = null;
tail = null;
assert managedEntity.$$_hibernate_getPreviousManagedEntity() == null;
assert managedEntity.$$_hibernate_getNextManagedEntity() == null;
}
else {
// otherwise, previous or next (or both) should be non-null
final var previous = managedEntity.$$_hibernate_getPreviousManagedEntity();
final var next = managedEntity.$$_hibernate_getNextManagedEntity();
if ( previous == null ) {
// we are removing head
assert managedEntity == head;
head = next;
}
else {
previous.$$_hibernate_setNextManagedEntity( next );
}
if ( next == null ) {
// we are removing tail
assert managedEntity == tail;
tail = previous;
}
else {
next.$$_hibernate_setPreviousManagedEntity( previous );
}
}
// finally clean out the ManagedEntity and return the associated EntityEntry
return clearManagedEntity( managedEntity );
}
private void removeXref(Object entity, ManagedEntity managedEntity) {
if ( managedEntity instanceof ImmutableManagedEntityHolder holder ) {
assert entity == holder.managedEntity;
if ( !isReferenceCachingEnabled( holder.$$_hibernate_getEntityEntry().getPersister() ) ) {
immutableManagedEntityXref.remove( managedEntity.$$_hibernate_getInstanceId(), entity );
}
else {
nonEnhancedEntityXref.remove( entity );
}
}
else if ( !isManagedEntity( entity ) ) {
nonEnhancedEntityXref.remove( entity );
}
}
/**
* The main bugaboo with {@code IdentityMap} that warranted this | EntityEntryContext |
java | apache__maven | its/core-it-suite/src/test/resources/mng-8005/extension/src/main/java/org/apache/maven/its/extensions/IdeWorkspaceReader.java | {
"start": 1231,
"end": 1930
} | class ____ implements WorkspaceReader {
private static final Logger log = LoggerFactory.getLogger(IdeWorkspaceReader.class);
private final WorkspaceRepository repository = new WorkspaceRepository();
public IdeWorkspaceReader() {
log.info("created");
}
@Override
public WorkspaceRepository getRepository() {
return repository;
}
@Override
public File findArtifact(Artifact artifact) {
log.info("findArtifact({})", artifact);
return null;
}
@Override
public List<String> findVersions(Artifact artifact) {
log.info("findVersions({})", artifact);
return Collections.emptyList();
}
}
| IdeWorkspaceReader |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/util/XpathExpectationsHelper.java | {
"start": 1580,
"end": 10538
} | class ____ {
private final String expression;
private final XPathExpression xpathExpression;
private final boolean hasNamespaces;
/**
* XpathExpectationsHelper constructor.
* @param expression the XPath expression
* @param namespaces the XML namespaces referenced in the XPath expression, or {@code null}
* @param args arguments to parameterize the XPath expression with using the
* formatting specifiers defined in {@link String#format(String, Object...)}
* @throws XPathExpressionException if expression compilation failed
*/
public XpathExpectationsHelper(String expression, @Nullable Map<String, String> namespaces, Object... args)
throws XPathExpressionException {
this.expression = String.format(expression, args);
this.xpathExpression = compileXpathExpression(this.expression, namespaces);
this.hasNamespaces = !CollectionUtils.isEmpty(namespaces);
}
private static XPathExpression compileXpathExpression(String expression,
@Nullable Map<String, String> namespaces) throws XPathExpressionException {
SimpleNamespaceContext namespaceContext = new SimpleNamespaceContext();
namespaceContext.setBindings(namespaces != null ? namespaces : Collections.emptyMap());
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setNamespaceContext(namespaceContext);
return xpath.compile(expression);
}
/**
* Return the compiled XPath expression.
*/
protected XPathExpression getXpathExpression() {
return this.xpathExpression;
}
/**
* Parse the content, evaluate the XPath expression as a {@link Node},
* and assert it with the given {@code Matcher<Node>}.
*/
public void assertNode(byte[] content, @Nullable String encoding, Matcher<? super Node> matcher)
throws Exception {
Node node = evaluateXpath(content, encoding, Node.class);
MatcherAssert.assertThat("XPath " + this.expression, node, matcher);
}
/**
* Parse the content, evaluate the XPath expression as a {@link NodeList},
* and assert it with the given {@code Matcher<NodeList>}.
* @since 5.2.2
*/
public void assertNodeList(byte[] content, @Nullable String encoding, Matcher<? super NodeList> matcher)
throws Exception {
Document document = parseXmlByteArray(content, encoding);
NodeList nodeList = evaluateXpath(document, XPathConstants.NODESET, NodeList.class);
MatcherAssert.assertThat("XPath " + getXpathExpression(), nodeList, matcher);
}
/**
* Apply the XPath expression and assert the resulting content exists.
* @throws Exception if content parsing or expression evaluation fails
*/
public void exists(byte[] content, @Nullable String encoding) throws Exception {
Node node = evaluateXpath(content, encoding, Node.class);
AssertionErrors.assertNotNull("XPath " + this.expression + " does not exist", node);
}
/**
* Apply the XPath expression and assert the resulting content does not exist.
* @throws Exception if content parsing or expression evaluation fails
*/
public void doesNotExist(byte[] content, @Nullable String encoding) throws Exception {
Node node = evaluateXpath(content, encoding, Node.class);
AssertionErrors.assertNull("XPath " + this.expression + " exists", node);
}
/**
* Apply the XPath expression and assert the resulting content with the
* given Hamcrest matcher.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertNodeCount(byte[] content, @Nullable String encoding, Matcher<? super Integer> matcher)
throws Exception {
NodeList nodeList = evaluateXpath(content, encoding, NodeList.class);
String reason = "nodeCount for XPath " + this.expression;
MatcherAssert.assertThat(reason, nodeList != null ? nodeList.getLength() : 0, matcher);
}
/**
* Apply the XPath expression and assert the resulting content as an integer.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertNodeCount(byte[] content, @Nullable String encoding, int expectedCount) throws Exception {
NodeList nodeList = evaluateXpath(content, encoding, NodeList.class);
AssertionErrors.assertEquals("nodeCount for XPath " + this.expression, expectedCount,
(nodeList != null ? nodeList.getLength() : 0));
}
/**
* Apply the XPath expression and assert the resulting content with the
* given Hamcrest matcher.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertString(byte[] content, @Nullable String encoding, Matcher<? super String> matcher)
throws Exception {
String actual = evaluateXpath(content, encoding, String.class);
MatcherAssert.assertThat("XPath " + this.expression, actual, matcher);
}
/**
* Apply the XPath expression and assert the resulting content as a String.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertString(byte[] content, @Nullable String encoding, String expectedValue) throws Exception {
String actual = evaluateXpath(content, encoding, String.class);
AssertionErrors.assertEquals("XPath " + this.expression, expectedValue, actual);
}
/**
* Apply the XPath expression and assert the resulting content with the
* given Hamcrest matcher.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertNumber(byte[] content, @Nullable String encoding, Matcher<? super Double> matcher) throws Exception {
Double actual = evaluateXpath(content, encoding, Double.class);
MatcherAssert.assertThat("XPath " + this.expression, actual, matcher);
}
/**
* Apply the XPath expression and assert the resulting content as a Double.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertNumber(byte[] content, @Nullable String encoding, Double expectedValue) throws Exception {
Double actual = evaluateXpath(content, encoding, Double.class);
AssertionErrors.assertEquals("XPath " + this.expression, expectedValue, actual);
}
/**
* Apply the XPath expression and assert the resulting content as a Boolean.
* @throws Exception if content parsing or expression evaluation fails
*/
public void assertBoolean(byte[] content, @Nullable String encoding, boolean expectedValue) throws Exception {
String actual = evaluateXpath(content, encoding, String.class);
AssertionErrors.assertEquals("XPath " + this.expression, expectedValue, Boolean.parseBoolean(actual));
}
/**
* Evaluate the XPath and return the resulting value.
* @param content the content to evaluate against
* @param encoding the encoding to use (optionally)
* @param targetClass the target class, one of Number, String, Boolean,
* org.w3c.Node, or NodeList
* @throws Exception if content parsing or expression evaluation fails
* @since 5.1
*/
public <T> @Nullable T evaluateXpath(byte[] content, @Nullable String encoding, Class<T> targetClass) throws Exception {
Document document = parseXmlByteArray(content, encoding);
return evaluateXpath(document, toQName(targetClass), targetClass);
}
/**
* Parse the given XML content to a {@link Document}.
* @param xml the content to parse
* @param encoding optional content encoding, if provided as metadata (for example, in HTTP headers)
* @return the parsed document
*/
protected Document parseXmlByteArray(byte[] xml, @Nullable String encoding) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(this.hasNamespaces);
DocumentBuilder documentBuilder = factory.newDocumentBuilder();
InputSource inputSource = new InputSource(new ByteArrayInputStream(xml));
if (StringUtils.hasText(encoding)) {
inputSource.setEncoding(encoding);
}
return documentBuilder.parse(inputSource);
}
/**
* Apply the XPath expression to given document.
* @throws XPathExpressionException if expression evaluation failed
*/
@SuppressWarnings("unchecked")
protected <T> @Nullable T evaluateXpath(Document document, QName evaluationType, Class<T> expectedClass)
throws XPathExpressionException {
return (T) getXpathExpression().evaluate(document, evaluationType);
}
private <T> QName toQName(Class<T> expectedClass) {
QName evaluationType;
if (Number.class.isAssignableFrom(expectedClass)) {
evaluationType = XPathConstants.NUMBER;
}
else if (CharSequence.class.isAssignableFrom(expectedClass)) {
evaluationType = XPathConstants.STRING;
}
else if (Boolean.class.isAssignableFrom(expectedClass)) {
evaluationType = XPathConstants.BOOLEAN;
}
else if (Node.class.isAssignableFrom(expectedClass)) {
evaluationType = XPathConstants.NODE;
}
else if (NodeList.class.isAssignableFrom(expectedClass)) {
evaluationType = XPathConstants.NODESET;
}
else {
throw new IllegalArgumentException("Unexpected target class " + expectedClass + ". " +
"Supported: numbers, strings, boolean, and org.w3c.Node and NodeList");
}
return evaluationType;
}
}
| XpathExpectationsHelper |
java | apache__camel | core/camel-core-catalog/src/main/java/org/apache/camel/catalog/impl/DefaultRuntimeCamelCatalog.java | {
"start": 1448,
"end": 4229
} | class ____ extends AbstractCachingCamelCatalog implements RuntimeCamelCatalog {
private CamelContext camelContext;
public DefaultRuntimeCamelCatalog() {
this(true);
}
public DefaultRuntimeCamelCatalog(boolean caching) {
super(caching);
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
this.setJSonSchemaResolver(new CamelContextJSonSchemaResolver(camelContext));
}
@Override
public void start() {
// noop
}
@Override
public void stop() {
super.clearCache();
}
@Override
public String modelJSonSchema(String name) {
return cache("eip-" + name, name, super::modelJSonSchema);
}
@Override
public EipModel eipModel(String name) {
return cache("eip-model-" + name, name, super::eipModel);
}
@Override
public String componentJSonSchema(String name) {
return cache("component-" + name, name, super::componentJSonSchema);
}
@Override
public ComponentModel componentModel(String name) {
return cache("component-model-" + name, name, super::componentModel);
}
@Override
public String dataFormatJSonSchema(String name) {
return cache("dataformat-" + name, name, super::dataFormatJSonSchema);
}
@Override
public DataFormatModel dataFormatModel(String name) {
return cache("dataformat-model-" + name, name, super::dataFormatModel);
}
@Override
public String languageJSonSchema(String name) {
return cache("language-" + name, name, super::languageJSonSchema);
}
@Override
public LanguageModel languageModel(String name) {
return cache("language-model-" + name, name, super::languageModel);
}
@Override
public String transformerJSonSchema(String name) {
return cache("transformer-" + name, name, super::transformerJSonSchema);
}
@Override
public TransformerModel transformerModel(String name) {
return cache("transformer-model-" + name, name, super::transformerModel);
}
@Override
public String otherJSonSchema(String name) {
return cache("other-" + name, name, super::otherJSonSchema);
}
@Override
public OtherModel otherModel(String name) {
return cache("other-model-" + name, name, super::otherModel);
}
@Override
public String mainJSonSchema() {
return cache("main", "main", k -> super.mainJSonSchema());
}
@Override
public MainModel mainModel() {
return cache("main-model", "main-model", k -> super.mainModel());
}
}
| DefaultRuntimeCamelCatalog |
java | apache__rocketmq | remoting/src/test/java/org/apache/rocketmq/remoting/protocol/admin/TopicStatsTableTest.java | {
"start": 1109,
"end": 3450
} | class ____ {
private volatile TopicStatsTable topicStatsTable;
private static final String TEST_TOPIC = "test_topic";
private static final String TEST_BROKER = "test_broker";
private static final int QUEUE_ID = 1;
private static final long CURRENT_TIME_MILLIS = System.currentTimeMillis();
private static final long MAX_OFFSET = CURRENT_TIME_MILLIS + 100;
private static final long MIN_OFFSET = CURRENT_TIME_MILLIS - 100;
@Before
public void buildTopicStatsTable() {
HashMap<MessageQueue, TopicOffset> offsetTableMap = new HashMap<>();
MessageQueue messageQueue = new MessageQueue(TEST_TOPIC, TEST_BROKER, QUEUE_ID);
TopicOffset topicOffset = new TopicOffset();
topicOffset.setLastUpdateTimestamp(CURRENT_TIME_MILLIS);
topicOffset.setMinOffset(MIN_OFFSET);
topicOffset.setMaxOffset(MAX_OFFSET);
offsetTableMap.put(messageQueue, topicOffset);
topicStatsTable = new TopicStatsTable();
topicStatsTable.setOffsetTable(offsetTableMap);
}
@Test
public void testGetOffsetTable() throws Exception {
validateTopicStatsTable(topicStatsTable);
}
@Test
public void testFromJson() throws Exception {
String json = RemotingSerializable.toJson(topicStatsTable, true);
TopicStatsTable fromJson = RemotingSerializable.fromJson(json, TopicStatsTable.class);
validateTopicStatsTable(fromJson);
}
private static void validateTopicStatsTable(TopicStatsTable topicStatsTable) throws Exception {
Map.Entry<MessageQueue, TopicOffset> savedTopicStatsTableMap = topicStatsTable.getOffsetTable().entrySet().iterator().next();
MessageQueue savedMessageQueue = savedTopicStatsTableMap.getKey();
TopicOffset savedTopicOffset = savedTopicStatsTableMap.getValue();
Assert.assertTrue(savedMessageQueue.getTopic().equals(TEST_TOPIC));
Assert.assertTrue(savedMessageQueue.getBrokerName().equals(TEST_BROKER));
Assert.assertTrue(savedMessageQueue.getQueueId() == QUEUE_ID);
Assert.assertTrue(savedTopicOffset.getLastUpdateTimestamp() == CURRENT_TIME_MILLIS);
Assert.assertTrue(savedTopicOffset.getMaxOffset() == MAX_OFFSET);
Assert.assertTrue(savedTopicOffset.getMinOffset() == MIN_OFFSET);
}
}
| TopicStatsTableTest |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/AbstractLifeCycle.java | {
"start": 1069,
"end": 4235
} | class ____ implements LifeCycle2 {
public static final int DEFAULT_STOP_TIMEOUT = 0;
public static final TimeUnit DEFAULT_STOP_TIMEUNIT = TimeUnit.MILLISECONDS;
/**
* Allow subclasses access to the status logger without creating another instance.
*/
protected static final org.apache.logging.log4j.Logger LOGGER = StatusLogger.getLogger();
/**
* Gets the status logger.
*
* @return the status logger.
*/
protected static org.apache.logging.log4j.Logger getStatusLogger() {
return LOGGER;
}
private volatile LifeCycle.State state = LifeCycle.State.INITIALIZED;
protected boolean equalsImpl(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final LifeCycle other = (LifeCycle) obj;
if (state != other.getState()) {
return false;
}
return true;
}
@Override
public LifeCycle.State getState() {
return this.state;
}
protected int hashCodeImpl() {
final int prime = 31;
int result = 1;
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
public boolean isInitialized() {
return this.state == LifeCycle.State.INITIALIZED;
}
@Override
public boolean isStarted() {
return this.state == LifeCycle.State.STARTED;
}
public boolean isStarting() {
return this.state == LifeCycle.State.STARTING;
}
@Override
public boolean isStopped() {
return this.state == LifeCycle.State.STOPPED;
}
public boolean isStopping() {
return this.state == LifeCycle.State.STOPPING;
}
protected void setStarted() {
this.setState(LifeCycle.State.STARTED);
}
protected void setStarting() {
this.setState(LifeCycle.State.STARTING);
}
protected void setState(final LifeCycle.State newState) {
this.state = newState;
// Need a better string than this.toString() for the message
// LOGGER.trace("{} {}", this.state, this);
}
protected void setStopped() {
this.setState(LifeCycle.State.STOPPED);
}
protected void setStopping() {
this.setState(LifeCycle.State.STOPPING);
}
@Override
public void initialize() {
this.state = State.INITIALIZED;
}
@Override
public void start() {
this.setStarted();
}
@Override
public void stop() {
stop(DEFAULT_STOP_TIMEOUT, DEFAULT_STOP_TIMEUNIT);
}
protected boolean stop(final Future<?> future) {
boolean stopped = true;
if (future != null) {
if (future.isCancelled() || future.isDone()) {
return true;
}
stopped = future.cancel(true);
}
return stopped;
}
@Override
public boolean stop(final long timeout, final TimeUnit timeUnit) {
this.state = LifeCycle.State.STOPPED;
return true;
}
}
| AbstractLifeCycle |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java | {
"start": 6250,
"end": 6375
} | class ____ implements MetadataMap {
/**
* Represent a directory in memory.
*/
private static | InMemoryMetadataDB |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/RobotFrameworkEndpointBuilderFactory.java | {
"start": 125830,
"end": 126187
} | class ____ extends AbstractEndpointBuilder implements RobotFrameworkEndpointBuilder, AdvancedRobotFrameworkEndpointBuilder {
public RobotFrameworkEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new RobotFrameworkEndpointBuilderImpl(path);
}
} | RobotFrameworkEndpointBuilderImpl |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestProtosLegacy.java | {
"start": 259747,
"end": 260277
} | interface ____
extends com.google.protobuf.MessageOrBuilder {
// repeated int32 values = 1;
/**
* <code>repeated int32 values = 1;</code>
*/
java.util.List<java.lang.Integer> getValuesList();
/**
* <code>repeated int32 values = 1;</code>
*/
int getValuesCount();
/**
* <code>repeated int32 values = 1;</code>
*/
int getValues(int index);
}
/**
* Protobuf type {@code hadoop.common.ExchangeResponseProto}
*/
public static final | ExchangeResponseProtoOrBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/entitygraph/FindWithEntityGraphTest.java | {
"start": 1988,
"end": 3277
} | class ____ {
@Id
private Long id;
private String name;
@ManyToOne
private Person parent;
@OneToOne(mappedBy = "person", orphanRemoval = true, cascade = CascadeType.ALL)
private PersonContact personContact;
@OneToMany(mappedBy = "parent", orphanRemoval = true, cascade = CascadeType.ALL)
private Set<Person> children = new HashSet<>( 0 );
public Person() {
}
public Person(Long id, String name) {
this.id = id;
this.name = name;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public PersonContact getPersonContact() {
return personContact;
}
public void setPersonContact(PersonContact personContact) {
this.personContact = personContact;
}
public Person getParent() {
return parent;
}
public void setParent(Person parent) {
this.parent = parent;
}
public void addParent(Person parent) {
this.parent = parent;
parent.getChildren().add( this );
}
public Set<Person> getChildren() {
return children;
}
public void setChildren(Set<Person> children) {
this.children = children;
}
}
@Entity(name = "PersonContact")
public static | Person |
java | apache__camel | components/camel-observation/src/test/java/org/apache/camel/observation/SpanProcessorsTest.java | {
"start": 1103,
"end": 4402
} | class ____ extends CamelMicrometerObservationTestSupport {
private static final SpanTestData[] TEST_DATA = {
new SpanTestData().setLabel("seda:b server").setUri("seda://b").setOperation("b")
.setKind(SpanKind.SERVER)
.setParentId(1)
.addTag("b-tag", "request-header-value"),
new SpanTestData().setLabel("seda:b server").setUri("seda://b").setOperation("b")
.setKind(SpanKind.CLIENT)
.setParentId(4),
new SpanTestData().setLabel("seda:c server").setUri("seda://c").setOperation("c")
.setKind(SpanKind.SERVER)
.setParentId(3),
new SpanTestData().setLabel("seda:c server").setUri("seda://c").setOperation("c")
.setKind(SpanKind.CLIENT)
.setParentId(4),
new SpanTestData().setLabel("seda:a server").setUri("seda://a").setOperation("a")
.setKind(SpanKind.SERVER)
.setParentId(5),
new SpanTestData().setLabel("seda:a server").setUri("seda://a").setOperation("a")
.setKind(SpanKind.CLIENT)
.setParentId(6),
new SpanTestData().setLabel("direct:start server").setUri("direct://start").setOperation("start")
.setKind(SpanKind.SERVER).setParentId(7),
new SpanTestData().setLabel("direct:start server").setUri("direct://start").setOperation("start")
.setKind(SpanKind.CLIENT)
};
SpanProcessorsTest() {
super(TEST_DATA);
}
@Test
void testRoute() {
Exchange result = template.request("direct:start",
exchange -> {
exchange.getIn().setBody("Hello");
exchange.getIn().setHeader("request-header",
context.resolveLanguage("simple").createExpression("request-header-value"));
});
verify();
assertEquals("request-header-value", result.getMessage().getHeader("baggage-header", String.class));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("seda:a").routeId("start");
from("seda:a").routeId("a")
.log("routing at ${routeId}")
.process(new SetCorrelationContextProcessor("a-baggage", simple("${header.request-header}")))
.to("seda:b")
.delay(2000)
.to("seda:c")
.log("End of routing");
from("seda:b").routeId("b")
.log("routing at ${routeId}")
.process(new AttributeProcessor("b-tag", simple("${header.request-header}")))
.delay(simple("${random(1000,2000)}"));
from("seda:c").routeId("c")
.to("log:test")
.process(new GetCorrelationContextProcessor("a-baggage", "baggage-header"))
.delay(simple("${random(0,100)}"));
}
};
}
}
| SpanProcessorsTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/internal/AbstractEntityInstantiatorPojo.java | {
"start": 865,
"end": 2612
} | class ____ extends AbstractPojoInstantiator implements EntityInstantiator {
private final Class<?> proxyInterface;
private final boolean applyBytecodeInterception;
private final EntityRelatedState loadingInterceptorState;
public AbstractEntityInstantiatorPojo(
EntityPersister persister,
PersistentClass persistentClass,
JavaType<?> javaType) {
super( javaType.getJavaTypeClass() );
proxyInterface = persistentClass.getProxyInterface();
//TODO this PojoEntityInstantiator appears to not be reused ?!
applyBytecodeInterception =
isPersistentAttributeInterceptableType( persistentClass.getMappedClass() );
loadingInterceptorState =
applyBytecodeInterception
? new EntityRelatedState(
persister.getEntityName(),
persister.getBytecodeEnhancementMetadata()
.getLazyAttributesMetadata()
.getLazyAttributeNames()
)
: null;
}
protected Object applyInterception(Object entity) {
if ( applyBytecodeInterception ) {
asPersistentAttributeInterceptable( entity )
.$$_hibernate_setInterceptor( new LazyAttributeLoadingInterceptor(
loadingInterceptorState,
null,
null
) );
}
return entity;
}
@Override
public boolean isInstance(Object object) {
return super.isInstance( object )
// this one needed only for guessEntityMode()
|| proxyInterface!=null && proxyInterface.isInstance(object);
}
/*
* Used by Hibernate Reactive
*/
protected boolean isApplyBytecodeInterception() {
return applyBytecodeInterception;
}
/*
* Used by Hibernate Reactive
*/
protected LazyAttributeLoadingInterceptor.EntityRelatedState getLoadingInterceptorState() {
return loadingInterceptorState;
}
}
| AbstractEntityInstantiatorPojo |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/calcite/RexTableArgCall.java | {
"start": 1584,
"end": 4169
} | class ____ extends RexCall {
private final int inputIndex;
private final int[] partitionKeys;
private final int[] orderKeys;
public RexTableArgCall(RelDataType type, int inputIndex, int[] partitionKeys, int[] orderKeys) {
super(type, SqlTableArgOperator.INSTANCE, List.of());
this.inputIndex = inputIndex;
this.partitionKeys = partitionKeys;
this.orderKeys = orderKeys;
}
public int getInputIndex() {
return inputIndex;
}
public int[] getPartitionKeys() {
return partitionKeys;
}
public int[] getOrderKeys() {
return orderKeys;
}
@Override
protected String computeDigest(boolean withType) {
final StringBuilder sb = new StringBuilder(op.getName());
sb.append("(");
sb.append("#");
sb.append(inputIndex);
sb.append(")");
if (withType) {
sb.append(":");
sb.append(type.getFullTypeString());
}
formatKeys(sb, partitionKeys, " PARTITION BY");
formatKeys(sb, orderKeys, " ORDER BY");
return sb.toString();
}
private void formatKeys(StringBuilder sb, int[] keys, String prefix) {
if (keys.length == 0) {
return;
}
sb.append(
Arrays.stream(keys)
.mapToObj(key -> "$" + key)
.collect(Collectors.joining(", ", prefix + "(", ")")));
}
@Override
public RexCall clone(RelDataType type, List<RexNode> operands) {
return new RexTableArgCall(type, inputIndex, partitionKeys, orderKeys);
}
public RexTableArgCall copy(RelDataType type, int[] partitionKeys, int[] orderKeys) {
return new RexTableArgCall(type, inputIndex, partitionKeys, orderKeys);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
final RexTableArgCall that = (RexTableArgCall) o;
return inputIndex == that.inputIndex
&& Arrays.equals(partitionKeys, that.partitionKeys)
&& Arrays.equals(orderKeys, that.orderKeys);
}
@Override
public int hashCode() {
int result = Objects.hash(super.hashCode(), inputIndex);
result = 31 * result + Arrays.hashCode(partitionKeys);
result = 31 * result + Arrays.hashCode(orderKeys);
return result;
}
}
| RexTableArgCall |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/id/FlushIdGenTest.java | {
"start": 917,
"end": 1375
} | class ____ {
@Test
public void testPersistBeforeTransaction(SessionFactoryScope scope) {
scope.inSession(
session -> {
RootEntity ent1_0 = new RootEntity();
RootEntity ent1_1 = new RootEntity();
session.persist( ent1_0 );
session.persist( ent1_1 );
Transaction tx = session.beginTransaction();
try {
tx.commit(); // flush
}
catch (Exception e) {
tx.rollback();
}
}
);
}
}
| FlushIdGenTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/functions/Functions.java | {
"start": 16177,
"end": 16801
} | class ____<T> implements Function<List<T>, List<T>> {
final Comparator<? super T> comparator;
ListSorter(Comparator<? super T> comparator) {
this.comparator = comparator;
}
@Override
public List<T> apply(List<T> v) {
Collections.sort(v, comparator);
return v;
}
}
public static <T> Function<List<T>, List<T>> listSorter(final Comparator<? super T> comparator) {
return new ListSorter<>(comparator);
}
public static final Consumer<Subscription> REQUEST_MAX = new MaxRequestSubscription();
static final | ListSorter |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/MessageBrokerBeanDefinitionParserTests.java | {
"start": 26459,
"end": 26739
} | class ____ implements HandlerMethodArgumentResolver {
@Override
public boolean supportsParameter(MethodParameter parameter) {
return false;
}
@Override
public Object resolveArgument(MethodParameter parameter, Message<?> message) {
return null;
}
}
| CustomArgumentResolver |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/WebServletHandlerTests.java | {
"start": 5458,
"end": 5556
} | class ____ extends HttpServlet {
}
@WebServlet(asyncSupported = true)
| DefaultConfigurationServlet |
java | elastic__elasticsearch | test/fixtures/aws-sts-fixture/src/main/java/fixture/aws/sts/AwsStsHttpHandler.java | {
"start": 1382,
"end": 5944
} | class ____ implements HttpHandler {
public static final String ROLE_ARN = "arn:aws:iam::123456789012:role/FederatedWebIdentityRole";
public static final String ROLE_NAME = "sts-fixture-test";
private final BiConsumer<String, String> newCredentialsConsumer;
private final String webIdentityToken;
public AwsStsHttpHandler(BiConsumer<String, String> newCredentialsConsumer, String webIdentityToken) {
this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer);
this.webIdentityToken = Objects.requireNonNull(webIdentityToken);
}
@Override
public void handle(final HttpExchange exchange) throws IOException {
// https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html
try (exchange) {
final var requestMethod = exchange.getRequestMethod();
final var path = exchange.getRequestURI().getPath();
if ("POST".equals(requestMethod) && "/".equals(path)) {
String body = new String(exchange.getRequestBody().readAllBytes(), StandardCharsets.UTF_8);
Map<String, String> params = Arrays.stream(body.split("&"))
.map(e -> e.split("="))
.collect(Collectors.toMap(e -> e[0], e -> URLDecoder.decode(e[1], StandardCharsets.UTF_8)));
if ("AssumeRoleWithWebIdentity".equals(params.get("Action")) == false) {
exchange.sendResponseHeaders(RestStatus.BAD_REQUEST.getStatus(), 0);
exchange.close();
return;
}
if (ROLE_NAME.equals(params.get("RoleSessionName")) == false
|| webIdentityToken.equals(params.get("WebIdentityToken")) == false
|| ROLE_ARN.equals(params.get("RoleArn")) == false) {
exchange.sendResponseHeaders(RestStatus.UNAUTHORIZED.getStatus(), 0);
exchange.close();
return;
}
final var accessKey = "test_key_STS_" + randomIdentifier();
final var sessionToken = randomIdentifier();
newCredentialsConsumer.accept(accessKey, sessionToken);
final byte[] response = String.format(
Locale.ROOT,
"""
<AssumeRoleWithWebIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
<AssumeRoleWithWebIdentityResult>
<SubjectFromWebIdentityToken>amzn1.account.AF6RHO7KZU5XRVQJGXK6HB56KR2A</SubjectFromWebIdentityToken>
<Audience>client.5498841531868486423.1548@apps.example.com</Audience>
<AssumedRoleUser>
<Arn>%s</Arn>
<AssumedRoleId>AROACLKWSDQRAOEXAMPLE:%s</AssumedRoleId>
</AssumedRoleUser>
<Credentials>
<SessionToken>%s</SessionToken>
<SecretAccessKey>%s</SecretAccessKey>
<Expiration>%s</Expiration>
<AccessKeyId>%s</AccessKeyId>
</Credentials>
<SourceIdentity>SourceIdentityValue</SourceIdentity>
<Provider>www.amazon.com</Provider>
</AssumeRoleWithWebIdentityResult>
<ResponseMetadata>
<RequestId>ad4156e9-bce1-11e2-82e6-6b6efEXAMPLE</RequestId>
</ResponseMetadata>
</AssumeRoleWithWebIdentityResponse>""",
ROLE_ARN,
ROLE_NAME,
sessionToken,
randomSecretKey(),
ZonedDateTime.now(Clock.systemUTC()).plusDays(1L).format(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssZ")),
accessKey
).getBytes(StandardCharsets.UTF_8);
exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8");
exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length);
exchange.getResponseBody().write(response);
exchange.close();
return;
}
ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("not supported: " + requestMethod + " " + path));
}
}
}
| AwsStsHttpHandler |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/InternalParseException.java | {
"start": 942,
"end": 1203
} | class ____ extends RuntimeException {
public InternalParseException(@Nullable SpelParseException cause) {
super(cause);
}
@Override
public @Nullable SpelParseException getCause() {
return (SpelParseException) super.getCause();
}
}
| InternalParseException |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/BeanUtilsTests.java | {
"start": 24302,
"end": 24728
} | class ____ {
private String name;
private int specialProperty;
public void setName(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
public void setSpecialProperty(int specialProperty) {
this.specialProperty = specialProperty;
}
public int getSpecialProperty() {
return specialProperty;
}
}
@SuppressWarnings("unused")
private static | NameAndSpecialProperty |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/internal/transformation/impl/PomBuilder.java | {
"start": 1199,
"end": 1319
} | interface ____ not public and the purpose is to allow easy unit testing
* of {@link ConsumerPomArtifactTransformer}.
*/
| is |
java | spring-projects__spring-boot | module/spring-boot-jackson/src/main/java/org/springframework/boot/jackson/autoconfigure/JacksonAutoConfiguration.java | {
"start": 15014,
"end": 16010
} | class ____ extending DateFormat or a date
// pattern string value
String dateFormat = this.jacksonProperties.getDateFormat();
if (dateFormat != null) {
try {
Class<?> dateFormatClass = ClassUtils.forName(dateFormat, null);
builder.defaultDateFormat((DateFormat) BeanUtils.instantiateClass(dateFormatClass));
}
catch (ClassNotFoundException ex) {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateFormat);
// Since Jackson 2.6.3 we always need to set a TimeZone (see
// gh-4170). If none in our properties fallback to Jackson's
// default
TimeZone timeZone = this.jacksonProperties.getTimeZone();
if (timeZone == null) {
timeZone = new ObjectMapper().serializationConfig().getTimeZone();
}
simpleDateFormat.setTimeZone(timeZone);
builder.defaultDateFormat(simpleDateFormat);
}
}
}
private void configurePropertyNamingStrategy(MapperBuilder<?, ?> builder) {
// We support a fully qualified | name |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/multipart/support/StandardMultipartHttpServletRequest.java | {
"start": 6968,
"end": 8776
} | class ____ implements MultipartFile, Serializable {
private final Part part;
private final String filename;
public StandardMultipartFile(Part part, String filename) {
this.part = part;
this.filename = filename;
}
@Override
public String getName() {
return this.part.getName();
}
@Override
public String getOriginalFilename() {
return this.filename;
}
@Override
public String getContentType() {
return this.part.getContentType();
}
@Override
public boolean isEmpty() {
return (this.part.getSize() == 0);
}
@Override
public long getSize() {
return this.part.getSize();
}
@Override
public byte[] getBytes() throws IOException {
return FileCopyUtils.copyToByteArray(this.part.getInputStream());
}
@Override
public InputStream getInputStream() throws IOException {
return this.part.getInputStream();
}
@Override
public void transferTo(File dest) throws IOException, IllegalStateException {
this.part.write(dest.getPath());
if (dest.isAbsolute() && !dest.exists()) {
// Servlet Part.write is not guaranteed to support absolute file paths:
// may translate the given path to a relative location within a temp dir
// (for example, on Jetty whereas Tomcat detects absolute paths).
// At least we offloaded the file from memory storage; it'll get deleted
// from the temp dir eventually in any case. And for our user's purposes,
// we can manually copy it to the requested location as a fallback.
FileCopyUtils.copy(this.part.getInputStream(), Files.newOutputStream(dest.toPath()));
}
}
@Override
public void transferTo(Path dest) throws IOException, IllegalStateException {
FileCopyUtils.copy(this.part.getInputStream(), Files.newOutputStream(dest));
}
}
}
| StandardMultipartFile |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/MaskedClassLoader.java | {
"start": 2014,
"end": 2434
} | class ____ extends JavacFileManager {
public MaskedFileManager(Context context) {
super(context, /* register= */ true, UTF_8);
}
public MaskedFileManager() {
this(new Context());
}
@Override
protected ClassLoader getClassLoader(URL[] urls) {
return new URLClassLoader(
urls, new MaskedClassLoader(JavacFileManager.class.getClassLoader()));
}
}
}
| MaskedFileManager |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.