language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSequence_create_Test.java
|
{
"start": 1178,
"end": 2818
}
|
class ____ {
@Test
void should_create_error_message() {
// GIVEN
ErrorMessageFactory factory = shouldNotContainSequence(list("Yoda", "Luke"), list("Yoda", "Luke"), 1);
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" [\"Yoda\", \"Luke\"]%n" +
"to not contain sequence:%n" +
" [\"Yoda\", \"Luke\"]%n" +
"but was found at index 1%n"));
}
@Test
void should_create_error_message_with_custom_comparison_strategy() {
// GIVEN
ErrorMessageFactory factory = shouldNotContainSequence(list("yoDA", "LUke"), list("Yoda", "Luke"), 1,
new ComparatorBasedComparisonStrategy(CaseInsensitiveStringComparator.INSTANCE));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" [\"yoDA\", \"LUke\"]%n" +
"to not contain sequence:%n" +
" [\"Yoda\", \"Luke\"]%n" +
"but was found at index 1%n" +
"when comparing values using CaseInsensitiveStringComparator"));
}
}
|
ShouldNotContainSequence_create_Test
|
java
|
google__auto
|
factory/src/test/resources/expected/ParameterAnnotationsFactory.java
|
{
"start": 871,
"end": 1983
}
|
class ____ {
private final Provider<@ParameterAnnotations.NullableType String> fooProvider;
@Inject
ParameterAnnotationsFactory(Provider<@ParameterAnnotations.NullableType String> fooProvider) {
this.fooProvider = checkNotNull(fooProvider, 1, 1);
}
ParameterAnnotations create(
@ParameterAnnotations.NullableParameter Integer bar,
@ParameterAnnotations.Nullable Long baz,
@ParameterAnnotations.NullableType Thread buh,
@ParameterAnnotations.NullableParameterAndType String quux) {
return new ParameterAnnotations(
checkNotNull(fooProvider.get(), 1, 5),
checkNotNull(bar, 2, 5),
baz,
checkNotNull(buh, 4, 5),
checkNotNull(quux, 5, 5));
}
private static <T> T checkNotNull(T reference, int argumentNumber, int argumentCount) {
if (reference == null) {
throw new NullPointerException(
"@AutoFactory method argument is null but is not marked @Nullable. Argument "
+ argumentNumber
+ " of "
+ argumentCount);
}
return reference;
}
}
|
ParameterAnnotationsFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/collectionelement/OnDeleteCascadeToElementCollectionTest.java
|
{
"start": 6778,
"end": 7034
}
|
class ____ {
public String serial;
public LocalDate issuedOn;
public Ticket() {
}
public Ticket(String serial, LocalDate issuedOn) {
this.serial = serial;
this.issuedOn = issuedOn;
}
}
@Entity(name = "NonCascading")
public static
|
Ticket
|
java
|
square__retrofit
|
retrofit/src/main/java/retrofit2/DefaultMethodSupport.java
|
{
"start": 1108,
"end": 1897
}
|
class ____ {
private static @Nullable Constructor<Lookup> lookupConstructor;
@IgnoreJRERequirement // Only used on JVM or Android API 24+.
@Nullable
static Object invoke(
Method method, Class<?> declaringClass, Object proxy, @Nullable Object[] args)
throws Throwable {
Constructor<Lookup> constructor = lookupConstructor;
if (constructor == null) {
constructor = Lookup.class.getDeclaredConstructor(Class.class, int.class);
constructor.setAccessible(true);
lookupConstructor = constructor;
}
return constructor
.newInstance(declaringClass, -1 /* trusted */)
.unreflectSpecial(method, declaringClass)
.bindTo(proxy)
.invokeWithArguments(args);
}
private DefaultMethodSupport() {}
}
|
DefaultMethodSupport
|
java
|
elastic__elasticsearch
|
x-pack/plugin/otel-data/src/main/java/org/elasticsearch/xpack/oteldata/otlp/datapoint/DataPoint.java
|
{
"start": 1547,
"end": 12284
}
|
interface ____ {
/**
* Returns the timestamp of the data point in Unix nanoseconds.
*
* @return the timestamp in nanoseconds
*/
long getTimestampUnixNano();
/**
* Returns the start timestamp of the data point in Unix nanoseconds.
* This allows detecting when a sequence of observations is unbroken.
* This field indicates to consumers the start time for points with cumulative and delta temporality,
* and can support correct rate calculation.
*
* @return the start timestamp in nanoseconds
*/
long getStartTimestampUnixNano();
/**
* Returns the attributes associated with the data point.
*
* @return a list of key-value pairs representing the attributes
*/
List<KeyValue> getAttributes();
/**
* Returns the unit of measurement for the data point.
*
* @return the unit as a string
*/
String getUnit();
/**
* Returns the name of the metric associated with the data point.
*
* @return the metric name as a string
*/
String getMetricName();
/**
* Builds the metric value for the data point and writes it to the provided XContentBuilder.
*
* @param mappingHints hints for building the metric value
* @param builder the XContentBuilder to write the metric value to
* @throws IOException if an I/O error occurs while writing to the builder
*/
void buildMetricValue(MappingHints mappingHints, XContentBuilder builder) throws IOException;
/**
* Returns the dynamic template name for the data point based on its type and value.
* This is used to dynamically map the appropriate field type according to the data point's characteristics.
*
* @param mappingHints hints for building the dynamic template
* @return the dynamic template name as a string
*/
String getDynamicTemplate(MappingHints mappingHints);
/**
* Validates whether the data point can be indexed into Elasticsearch.
*
* @param errors a set to collect validation error messages
* @return true if the data point is valid, false otherwise
*/
boolean isValid(Set<String> errors);
/**
* Returns the {@code _doc_count} for the data point.
* This is used when {@link MappingHints#docCount()} is true.
*
* @return the {@code _doc_count}
*/
long getDocCount();
record Number(NumberDataPoint dataPoint, Metric metric) implements DataPoint {
@Override
public long getTimestampUnixNano() {
return dataPoint.getTimeUnixNano();
}
@Override
public List<KeyValue> getAttributes() {
return dataPoint.getAttributesList();
}
@Override
public long getStartTimestampUnixNano() {
return dataPoint.getStartTimeUnixNano();
}
@Override
public String getUnit() {
return metric.getUnit();
}
@Override
public String getMetricName() {
return metric.getName();
}
@Override
public void buildMetricValue(MappingHints mappingHints, XContentBuilder builder) throws IOException {
switch (dataPoint.getValueCase()) {
case AS_DOUBLE -> builder.value(dataPoint.getAsDouble());
case AS_INT -> builder.value(dataPoint.getAsInt());
}
}
@Override
public long getDocCount() {
return 1;
}
@Override
public String getDynamicTemplate(MappingHints mappingHints) {
String type;
if (metric.hasSum()
// TODO add support for delta counters - for now we represent them as gauges
&& metric.getSum().getAggregationTemporality() == AGGREGATION_TEMPORALITY_CUMULATIVE
// TODO add support for up/down counters - for now we represent them as gauges
&& metric.getSum().getIsMonotonic()) {
type = "counter_";
} else {
type = "gauge_";
}
if (dataPoint.getValueCase() == NumberDataPoint.ValueCase.AS_INT) {
return type + "long";
} else if (dataPoint.getValueCase() == NumberDataPoint.ValueCase.AS_DOUBLE) {
return type + "double";
} else {
return null;
}
}
@Override
public boolean isValid(Set<String> errors) {
return true;
}
}
record ExponentialHistogram(ExponentialHistogramDataPoint dataPoint, Metric metric) implements DataPoint {
@Override
public long getTimestampUnixNano() {
return dataPoint.getTimeUnixNano();
}
@Override
public List<KeyValue> getAttributes() {
return dataPoint.getAttributesList();
}
@Override
public long getStartTimestampUnixNano() {
return dataPoint.getStartTimeUnixNano();
}
@Override
public String getUnit() {
return metric.getUnit();
}
@Override
public String getMetricName() {
return metric.getName();
}
@Override
public void buildMetricValue(MappingHints mappingHints, XContentBuilder builder) throws IOException {
if (mappingHints.aggregateMetricDouble()) {
buildAggregateMetricDouble(builder, dataPoint.getSum(), dataPoint.getCount());
} else {
builder.startObject();
builder.startArray("counts");
HistogramConverter.counts(dataPoint, builder::value);
builder.endArray();
builder.startArray("values");
HistogramConverter.centroidValues(dataPoint, builder::value);
builder.endArray();
builder.endObject();
}
}
@Override
public long getDocCount() {
return dataPoint.getCount();
}
@Override
public String getDynamicTemplate(MappingHints mappingHints) {
if (mappingHints.aggregateMetricDouble()) {
return "summary";
} else {
return "histogram";
}
}
@Override
public boolean isValid(Set<String> errors) {
if (metric.getExponentialHistogram().getAggregationTemporality() != AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA) {
errors.add("cumulative exponential histogram metrics are not supported, ignoring " + metric.getName());
return false;
}
return true;
}
}
record Histogram(HistogramDataPoint dataPoint, Metric metric) implements DataPoint {
@Override
public long getTimestampUnixNano() {
return dataPoint.getTimeUnixNano();
}
@Override
public List<KeyValue> getAttributes() {
return dataPoint.getAttributesList();
}
@Override
public long getStartTimestampUnixNano() {
return dataPoint.getStartTimeUnixNano();
}
@Override
public String getUnit() {
return metric.getUnit();
}
@Override
public String getMetricName() {
return metric.getName();
}
@Override
public void buildMetricValue(MappingHints mappingHints, XContentBuilder builder) throws IOException {
if (mappingHints.aggregateMetricDouble()) {
buildAggregateMetricDouble(builder, dataPoint.getSum(), dataPoint.getCount());
} else {
builder.startObject();
builder.startArray("counts");
HistogramConverter.counts(dataPoint, builder::value);
builder.endArray();
builder.startArray("values");
HistogramConverter.centroidValues(dataPoint, builder::value);
builder.endArray();
builder.endObject();
}
}
@Override
public long getDocCount() {
return dataPoint.getCount();
}
@Override
public String getDynamicTemplate(MappingHints mappingHints) {
if (mappingHints.aggregateMetricDouble()) {
return "summary";
} else {
return "histogram";
}
}
@Override
public boolean isValid(Set<String> errors) {
if (metric.getHistogram().getAggregationTemporality() != AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA) {
errors.add("cumulative histogram metrics are not supported, ignoring " + metric.getName());
return false;
}
if (dataPoint.getBucketCountsCount() == 1 && dataPoint.getExplicitBoundsCount() == 0) {
errors.add("histogram with a single bucket and no explicit bounds is not supported, ignoring " + metric.getName());
return false;
}
return true;
}
}
record Summary(SummaryDataPoint dataPoint, Metric metric) implements DataPoint {
@Override
public long getTimestampUnixNano() {
return dataPoint.getTimeUnixNano();
}
@Override
public List<KeyValue> getAttributes() {
return dataPoint.getAttributesList();
}
@Override
public long getStartTimestampUnixNano() {
return dataPoint.getStartTimeUnixNano();
}
@Override
public String getUnit() {
return metric.getUnit();
}
@Override
public String getMetricName() {
return metric.getName();
}
@Override
public void buildMetricValue(MappingHints mappingHints, XContentBuilder builder) throws IOException {
// TODO: Add support for quantiles
buildAggregateMetricDouble(builder, dataPoint.getSum(), dataPoint.getCount());
}
@Override
public long getDocCount() {
return dataPoint.getCount();
}
@Override
public String getDynamicTemplate(MappingHints mappingHints) {
return "summary";
}
@Override
public boolean isValid(Set<String> errors) {
return true;
}
}
private static void buildAggregateMetricDouble(XContentBuilder builder, double sum, long valueCount) throws IOException {
builder.startObject();
builder.field("sum", sum);
builder.field("value_count", valueCount);
builder.endObject();
}
}
|
DataPoint
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webmvc/src/test/java/org/springframework/cloud/gateway/server/mvc/predicate/PredicateBeanFactoryDiscovererTests.java
|
{
"start": 1314,
"end": 1631
}
|
class ____ {
@Autowired
PredicateBeanFactoryDiscoverer discoverer;
@Test
void contextLoads() {
MultiValueMap<String, OperationMethod> operations = discoverer.getOperations();
assertThat(operations).isNotEmpty();
}
@SpringBootConfiguration
@EnableAutoConfiguration
static
|
PredicateBeanFactoryDiscovererTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/registry/classloading/ClassLoaderLeaksUtilityTest.java
|
{
"start": 300,
"end": 927
}
|
class ____ {
@Test
public void testClassLoaderLeaksDetected() {
//N.B. since we expect to timeout in this case, reduce the timeouts to not require
//a significant amount of time during each ORM test run.
Assert.assertFalse( ClassLoaderLeakDetector.verifyActionNotLeakingClassloader( "org.hibernate.orm.test.bootstrap.registry.classloading.LeakingTestAction", 2 , 2 ) );
}
@Test
public void testClassLoaderLeaksNegated() {
Assert.assertTrue( ClassLoaderLeakDetector.verifyActionNotLeakingClassloader( "org.hibernate.orm.test.bootstrap.registry.classloading.NotLeakingTestAction" ) );
}
}
|
ClassLoaderLeaksUtilityTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/demo/sql/OracleCompatibleTest.java
|
{
"start": 185,
"end": 930
}
|
class ____ extends TestCase {
public void test_compatibleTest() throws Exception {
String sql = "select * from t where rownum < 10"; //oracle ppas
OracleLexer lexer = new OracleLexer(sql);
for (; ; ) {
lexer.nextToken();
Token token = lexer.token();
if (token == Token.EOF) {
break;
}
if (token == Token.IDENTIFIER) {
System.out.println(lexer.stringVal());
} else if (token == Token.LITERAL_CHARS
|| token == Token.LITERAL_INT
|| token == Token.LITERAL_ALIAS) {
// skip
}
System.out.println(token);
}
}
}
|
OracleCompatibleTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/rest-client-config/runtime/src/main/java/io/quarkus/restclient/config/RestClientKeysProvider.java
|
{
"start": 134,
"end": 340
}
|
class ____ implements Supplier<Iterable<String>> {
public static List<String> KEYS = new ArrayList<>();
@Override
public Iterable<String> get() {
return KEYS;
}
}
|
RestClientKeysProvider
|
java
|
apache__flink
|
flink-test-utils-parent/flink-test-utils/src/test/java/org/apache/flink/state/benchmark/StateBackendBenchmarkUtils.java
|
{
"start": 15396,
"end": 15534
}
|
enum ____ {
HEAP,
ROCKSDB,
HEAP_CHANGELOG,
ROCKSDB_CHANGELOG,
BATCH_EXECUTION
}
}
|
StateBackendType
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/autoconfigure/DataSourceTransactionManagerAutoConfiguration.java
|
{
"start": 2637,
"end": 3495
}
|
class ____ {
@Bean
@ConditionalOnMissingBean(TransactionManager.class)
DataSourceTransactionManager transactionManager(Environment environment, DataSource dataSource,
ObjectProvider<TransactionManagerCustomizers> transactionManagerCustomizers) {
DataSourceTransactionManager transactionManager = createTransactionManager(environment, dataSource);
transactionManagerCustomizers.ifAvailable((customizers) -> customizers.customize(transactionManager));
return transactionManager;
}
private DataSourceTransactionManager createTransactionManager(Environment environment, DataSource dataSource) {
return environment.getProperty("spring.dao.exceptiontranslation.enabled", Boolean.class, Boolean.TRUE)
? new JdbcTransactionManager(dataSource) : new DataSourceTransactionManager(dataSource);
}
}
}
|
JdbcTransactionManagerConfiguration
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/join/stream/multijoin/StreamingFourWayMixedOuterJoinOperatorTest.java
|
{
"start": 2834,
"end": 5443
}
|
class ____ extends StreamingMultiJoinOperatorTestBase {
private static final List<GeneratedJoinCondition> customJoinConditions;
static {
// Condition for Input 3 (Shipments) LEFT JOIN Input 0 (Users)
// ON u.user_id_0 = s.user_id_3 AND u.details_0 > s.details_3
GeneratedJoinCondition shipmentsJoinCondition =
createAndCondition(
createJoinCondition(
3, 0), // equi-join on user_id (field 0 of input 3 with field 0 of
// input 0)
createFieldLongGreaterThanCondition(
2, 2) // non-equi: users.details_0 (field 2 from left side) >
// shipments.details_3 (field 2 from right side)
// (field 2)
);
customJoinConditions =
Arrays.asList(
null, // Users (Input 0)
createJoinCondition(
1, 0), // Orders (Input 1) JOIN Users (Input 0) on user_id
createJoinCondition(
2, 0), // Payments (Input 2) JOIN Users (Input 0) on user_id
shipmentsJoinCondition // Shipments (Input 3) JOIN Users (Input 0) with
// combined condition
);
}
private static final Map<Integer, List<ConditionAttributeRef>> customAttributeMap =
new HashMap<>();
static {
customAttributeMap.put(1, Collections.singletonList(new ConditionAttributeRef(0, 0, 1, 0)));
customAttributeMap.put(2, Collections.singletonList(new ConditionAttributeRef(0, 0, 2, 0)));
customAttributeMap.put(3, Collections.singletonList(new ConditionAttributeRef(0, 0, 3, 0)));
}
public StreamingFourWayMixedOuterJoinOperatorTest(StateBackendMode stateBackendMode) {
super(
stateBackendMode,
4, // numInputs
List.of(
FlinkJoinType.INNER, // Placeholder for Users (Input 0)
FlinkJoinType.LEFT, // Orders (Input 1)
FlinkJoinType.INNER, // Payments (Input 2)
FlinkJoinType.LEFT // Shipments (Input 3)
),
customJoinConditions,
customAttributeMap,
false // isFullOuterJoin
);
// Input 0 (Users): Defaults to withUniqueKeyContainedByJoinKey (Base
|
StreamingFourWayMixedOuterJoinOperatorTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/testutils/types/IntPairListPairComparator.java
|
{
"start": 941,
"end": 1401
}
|
class ____ extends TypePairComparator<IntPair, IntList> {
private int key;
@Override
public void setReference(IntPair reference) {
this.key = reference.getKey();
}
@Override
public boolean equalToReference(IntList candidate) {
return this.key == candidate.getKey();
}
@Override
public int compareToReference(IntList candidate) {
return this.key - candidate.getKey();
}
}
|
IntPairListPairComparator
|
java
|
quarkusio__quarkus
|
extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/mpmetrics/CounterAdapter.java
|
{
"start": 260,
"end": 1287
}
|
class ____ implements org.eclipse.microprofile.metrics.Counter, MeterHolder {
Counter counter;
public CounterAdapter register(MpMetadata metadata, MetricDescriptor descriptor, MeterRegistry registry) {
if (counter == null || metadata.cleanDirtyMetadata()) {
counter = io.micrometer.core.instrument.Counter.builder(descriptor.name())
.description(metadata.getDescription())
.baseUnit(metadata.getUnit())
.tags(descriptor.tags())
.register(registry);
}
return this;
}
@Override
public void inc() {
counter.increment();
}
@Override
public void inc(long l) {
counter.increment((double) l);
}
@Override
public long getCount() {
return (long) counter.count();
}
@Override
public Meter getMeter() {
return counter;
}
@Override
public MetricType getType() {
return MetricType.COUNTER;
}
}
|
CounterAdapter
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_3000/Issue3057.java
|
{
"start": 359,
"end": 414
}
|
class ____ {
public java.util.Deque q;
}
}
|
Bean
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/web/servlet/assertj/AbstractHttpServletResponseAssertTests.java
|
{
"start": 2218,
"end": 3454
}
|
class ____ {
@Test
void contentType() {
MockHttpServletResponse response = createResponse("text/plain");
assertThat(response).hasContentType(MediaType.TEXT_PLAIN);
}
@Test
void contentTypeAndRepresentation() {
MockHttpServletResponse response = createResponse("text/plain");
assertThat(response).hasContentType("text/plain");
}
@Test
void contentTypeCompatibleWith() {
MockHttpServletResponse response = createResponse("application/json;charset=UTF-8");
assertThat(response).hasContentTypeCompatibleWith(MediaType.APPLICATION_JSON);
}
@Test
void contentTypeCompatibleWithAndStringRepresentation() {
MockHttpServletResponse response = createResponse("text/plain");
assertThat(response).hasContentTypeCompatibleWith("text/*");
}
@Test
void contentTypeCanBeAsserted() {
MockHttpServletResponse response = createResponse("text/plain");
assertThat(response).contentType().isInstanceOf(MediaType.class).isCompatibleWith("text/*").isNotNull();
}
private MockHttpServletResponse createResponse(String contentType) {
MockHttpServletResponse response = new MockHttpServletResponse();
response.setContentType(contentType);
return response;
}
}
@Nested
|
ContentTypeTests
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/json/jackson/JsonObjectDeserializer.java
|
{
"start": 761,
"end": 1104
}
|
class ____ extends JsonDeserializer<JsonObject> {
private static final TypeReference<Map<String, Object>> TYPE_REF = new TypeReference<>() {
};
public JsonObject deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
return new JsonObject(p.<Map<String, Object>>readValueAs(TYPE_REF));
}
}
|
JsonObjectDeserializer
|
java
|
quarkusio__quarkus
|
extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/UserCountryNotSetValidatorLocaleTest.java
|
{
"start": 1704,
"end": 1916
}
|
class ____ {
public Bean(String name) {
super();
this.name = name;
}
@Pattern(regexp = "A.*", message = "{pattern.message}")
private String name;
}
}
|
Bean
|
java
|
quarkusio__quarkus
|
core/devmode-spi/src/main/java/io/quarkus/dev/spi/DeploymentFailedStartHandler.java
|
{
"start": 36,
"end": 321
}
|
interface ____ {
/**
* This method is called if the app fails to start the first time. This allows for hot deployment
* providers to still start, and provide a way for the user to recover their app
*/
void handleFailedInitialStart();
}
|
DeploymentFailedStartHandler
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/wiring/BeanWiringInfoResolver.java
|
{
"start": 736,
"end": 922
}
|
interface ____ be implemented by objects than can resolve bean name
* information, given a newly instantiated bean object. Invocations to the
* {@link #resolveWiringInfo} method on this
|
to
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/sql/exec/onetoone/bidirectional/EntityWithTwoBidirectionalAssociationsOneWithJoinTableTest.java
|
{
"start": 5510,
"end": 6619
}
|
class ____ {
private Integer id;
private String description;
private Child child;
private Child2 child2;
Parent() {
}
public Parent(Integer id, String description) {
this.id = id;
this.description = description;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@OneToOne
@JoinTable(name = "PARENT_CHILD", inverseJoinColumns = @JoinColumn(name = "child_id"), joinColumns = @JoinColumn(name = "parent_id"))
public Child getChild() {
return child;
}
public void setChild(Child other) {
this.child = other;
}
@OneToOne
@JoinTable(name = "PARENT_CHILD_2", inverseJoinColumns = @JoinColumn(name = "child_id"), joinColumns = @JoinColumn(name = "parent_id"))
public Child2 getChild2() {
return child2;
}
public void setChild2(Child2 child2) {
this.child2 = child2;
}
}
@Entity(name = "Child")
@Table(name = "CHILD")
public static
|
Parent
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ClassCanBeStaticTest.java
|
{
"start": 5445,
"end": 5561
}
|
class ____ {
static int outerVar;
// Nested non-static inner
|
ClassCanBeStaticPositiveCase3
|
java
|
grpc__grpc-java
|
core/src/main/java/io/grpc/internal/TransportTracer.java
|
{
"start": 1026,
"end": 4423
}
|
class ____ {
private static final Factory DEFAULT_FACTORY = new Factory(SYSTEM_TIME_PROVIDER);
private final TimeProvider timeProvider;
private long streamsStarted;
private long lastLocalStreamCreatedTimeNanos;
private long lastRemoteStreamCreatedTimeNanos;
private long streamsSucceeded;
private long streamsFailed;
private long keepAlivesSent;
private FlowControlReader flowControlWindowReader;
private long messagesSent;
private long lastMessageSentTimeNanos;
// deframing happens on the application thread, and there's no easy way to avoid synchronization
private final LongCounter messagesReceived = LongCounterFactory.create();
private volatile long lastMessageReceivedTimeNanos;
public TransportTracer() {
this.timeProvider = SYSTEM_TIME_PROVIDER;
}
private TransportTracer(TimeProvider timeProvider) {
this.timeProvider = timeProvider;
}
/**
* Returns a read only set of current stats.
*/
public TransportStats getStats() {
long localFlowControlWindow =
flowControlWindowReader == null ? -1 : flowControlWindowReader.read().localBytes;
long remoteFlowControlWindow =
flowControlWindowReader == null ? -1 : flowControlWindowReader.read().remoteBytes;
return new TransportStats(
streamsStarted,
lastLocalStreamCreatedTimeNanos,
lastRemoteStreamCreatedTimeNanos,
streamsSucceeded,
streamsFailed,
messagesSent,
messagesReceived.value(),
keepAlivesSent,
lastMessageSentTimeNanos,
lastMessageReceivedTimeNanos,
localFlowControlWindow,
remoteFlowControlWindow);
}
/**
* Called by the client to report a stream has started.
*/
public void reportLocalStreamStarted() {
streamsStarted++;
lastLocalStreamCreatedTimeNanos = timeProvider.currentTimeNanos();
}
/**
* Called by the server to report a stream has started.
*/
public void reportRemoteStreamStarted() {
streamsStarted++;
lastRemoteStreamCreatedTimeNanos = timeProvider.currentTimeNanos();
}
/**
* Reports that a stream closed with the specified Status.
*/
public void reportStreamClosed(boolean success) {
if (success) {
streamsSucceeded++;
} else {
streamsFailed++;
}
}
/**
* Reports that some messages were successfully sent. {@code numMessages} must be at least 0.
*/
public void reportMessageSent(int numMessages) {
if (numMessages == 0) {
return;
}
messagesSent += numMessages;
lastMessageSentTimeNanos = timeProvider.currentTimeNanos();
}
/**
* Reports that a message was successfully received. This method is thread safe.
*/
public void reportMessageReceived() {
messagesReceived.add(1);
lastMessageReceivedTimeNanos = timeProvider.currentTimeNanos();
}
/**
* Reports that a keep alive message was sent.
*/
public void reportKeepAliveSent() {
keepAlivesSent++;
}
/**
* Registers a {@link FlowControlReader} that can be used to read the local and remote flow
* control window sizes.
*/
public void setFlowControlWindowReader(FlowControlReader flowControlWindowReader) {
this.flowControlWindowReader = Preconditions.checkNotNull(flowControlWindowReader);
}
/**
* A container that holds the local and remote flow control window sizes.
*/
public static final
|
TransportTracer
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/hint/support/SpringPropertiesRuntimeHints.java
|
{
"start": 981,
"end": 1214
}
|
class ____ implements RuntimeHintsRegistrar {
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
hints.resources().registerPattern("spring.properties");
}
}
|
SpringPropertiesRuntimeHints
|
java
|
google__guice
|
core/src/com/google/inject/name/Names.java
|
{
"start": 889,
"end": 2211
}
|
class ____ {
private Names() {}
/** Creates a {@link Named} annotation with {@code name} as the value. */
public static Named named(String name) {
return new NamedImpl(name);
}
/** Creates a constant binding to {@code @Named(key)} for each entry in {@code properties}. */
public static void bindProperties(Binder binder, Map<String, String> properties) {
binder = binder.skipSources(Names.class);
for (Map.Entry<String, String> entry : properties.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
binder.bind(Key.get(String.class, new NamedImpl(key))).toInstance(value);
}
}
/**
* Creates a constant binding to {@code @Named(key)} for each property. This method binds all
* properties including those inherited from {@link Properties#defaults defaults}.
*/
public static void bindProperties(Binder binder, Properties properties) {
binder = binder.skipSources(Names.class);
// use enumeration to include the default properties
for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements(); ) {
String propertyName = (String) e.nextElement();
String value = properties.getProperty(propertyName);
binder.bind(Key.get(String.class, new NamedImpl(propertyName))).toInstance(value);
}
}
}
|
Names
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/catalog/JavaCatalogTableTest.java
|
{
"start": 9949,
"end": 10470
}
|
class ____ extends GenericInMemoryCatalog {
public CustomCatalog(String name) {
super(name);
}
@Override
public CatalogBaseTable getTable(ObjectPath tablePath) throws TableNotExistException {
CatalogBaseTable table = super.getTable(tablePath);
if (table.getTableKind() == CatalogBaseTable.TableKind.VIEW) {
return new CustomView((CatalogView) table);
}
return table;
}
}
private static
|
CustomCatalog
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/PathInformation.java
|
{
"start": 901,
"end": 2663
}
|
class ____ {
private Boolean pathExists;
private Boolean isDirectory;
private Boolean isImplicit;
private String eTag;
/**
* Constructor.
* @param pathExists The path exists.
* @param isDirectory Is the path a directory?
* @param eTag The ETag of the path.
* @param isImplicit Is the path implicit?
*/
public PathInformation(Boolean pathExists,
Boolean isDirectory,
String eTag,
Boolean isImplicit) {
this.pathExists = pathExists;
this.isDirectory = isDirectory;
this.eTag = eTag;
this.isImplicit = isImplicit;
}
public PathInformation() {
}
/**
* Copy the path information.
* @param pathInformation The path information to copy.
*/
public void copy(PathInformation pathInformation) {
this.pathExists = pathInformation.getPathExists();
this.isDirectory = pathInformation.getIsDirectory();
this.eTag = pathInformation.getETag();
this.isImplicit = pathInformation.getIsImplicit();
}
/**
* Get the ETag of the path.
*
* @return the etag value*/
public String getETag() {
return eTag;
}
/**
* Get value of pathExists.
*
* @return true if path exists, false otherwise.
*/
public Boolean getPathExists() {
return pathExists;
}
/**
* Get value of isDirectory.
*
* @return true if path is a directory, false otherwise.
*/
public Boolean getIsDirectory() {
return isDirectory;
}
/**
* Get value of isImplicit.
*
* @return true if path is implicit, false otherwise.
*/
public Boolean getIsImplicit() {
return isImplicit;
}
/**
* Set the eTag value.
*
* @param eTag The eTag value to set.
*/
void setETag(String eTag) {
this.eTag = eTag;
}
}
|
PathInformation
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-metrics/src/main/java/io/quarkus/it/metrics/inheritance/MetricsInheritanceResource.java
|
{
"start": 370,
"end": 843
}
|
class ____ {
@Inject
MetricRegistry metricRegistry;
@Path("/registration")
@GET
@Produces("application/json")
public List<String> getAllMetricNames() {
return metricRegistry
.getCounters((metricID, metric) -> metricID.getName().contains("Inheritance"))
.keySet()
.stream()
.map(MetricID::getName)
.collect(Collectors.toList());
}
}
|
MetricsInheritanceResource
|
java
|
apache__rocketmq
|
proxy/src/main/java/org/apache/rocketmq/proxy/grpc/v2/common/ResponseBuilder.java
|
{
"start": 1819,
"end": 5185
}
|
class ____ {
private static final Logger log = LoggerFactory.getLogger(LoggerName.PROXY_LOGGER_NAME);
protected static final Map<Integer, Code> RESPONSE_CODE_MAPPING = new ConcurrentHashMap<>();
protected static final Object INSTANCE_CREATE_LOCK = new Object();
protected static volatile ResponseBuilder instance;
static {
RESPONSE_CODE_MAPPING.put(ResponseCode.SUCCESS, Code.OK);
RESPONSE_CODE_MAPPING.put(ResponseCode.SYSTEM_BUSY, Code.TOO_MANY_REQUESTS);
RESPONSE_CODE_MAPPING.put(ResponseCode.REQUEST_CODE_NOT_SUPPORTED, Code.NOT_IMPLEMENTED);
RESPONSE_CODE_MAPPING.put(ResponseCode.SUBSCRIPTION_GROUP_NOT_EXIST, Code.CONSUMER_GROUP_NOT_FOUND);
RESPONSE_CODE_MAPPING.put(ClientErrorCode.ACCESS_BROKER_TIMEOUT, Code.PROXY_TIMEOUT);
}
public static ResponseBuilder getInstance() {
if (instance == null) {
synchronized (INSTANCE_CREATE_LOCK) {
if (instance == null) {
instance = new ResponseBuilder();
}
}
}
return instance;
}
public Status buildStatus(Throwable t) {
t = ExceptionUtils.getRealException(t);
if (t instanceof ProxyException) {
t = new GrpcProxyException((ProxyException) t);
}
if (t instanceof GrpcProxyException) {
GrpcProxyException grpcProxyException = (GrpcProxyException) t;
return buildStatus(grpcProxyException.getCode(), grpcProxyException.getMessage());
}
if (TopicRouteHelper.isTopicNotExistError(t)) {
return buildStatus(Code.TOPIC_NOT_FOUND, t.getMessage());
}
if (t instanceof MQBrokerException) {
MQBrokerException mqBrokerException = (MQBrokerException) t;
return buildStatus(buildCode(mqBrokerException.getResponseCode()), mqBrokerException.getErrorMessage());
}
if (t instanceof MQClientException) {
MQClientException mqClientException = (MQClientException) t;
return buildStatus(buildCode(mqClientException.getResponseCode()), mqClientException.getErrorMessage());
}
if (t instanceof RemotingTimeoutException) {
return buildStatus(Code.PROXY_TIMEOUT, t.getMessage());
}
if (t instanceof AuthenticationException || t instanceof AuthorizationException) {
return buildStatus(Code.UNAUTHORIZED, t.getMessage());
}
log.error("internal server error", t);
return buildStatus(Code.INTERNAL_SERVER_ERROR, ExceptionUtils.getErrorDetailMessage(t));
}
public Status buildStatus(Code code, String message) {
return Status.newBuilder()
.setCode(code)
.setMessage(message != null ? message : code.name())
.build();
}
public Status buildStatus(int remotingResponseCode, String remark) {
String message = remark;
if (message == null) {
message = String.valueOf(remotingResponseCode);
}
return Status.newBuilder()
.setCode(buildCode(remotingResponseCode))
.setMessage(message)
.build();
}
public Code buildCode(int remotingResponseCode) {
return RESPONSE_CODE_MAPPING.getOrDefault(remotingResponseCode, Code.INTERNAL_SERVER_ERROR);
}
}
|
ResponseBuilder
|
java
|
elastic__elasticsearch
|
distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/ProcrunCommand.java
|
{
"start": 1248,
"end": 6295
}
|
class ____ extends Command {
private static final Logger logger = LogManager.getLogger(ProcrunCommand.class);
private final String cmd;
/**
* Constructs CLI subcommand that will internally call procrun.
* @param desc A help description for this subcommand
* @param cmd The procrun command to run
*/
protected ProcrunCommand(String desc, String cmd) {
super(desc);
this.cmd = cmd;
}
/**
* Returns the name of the exe within the Elasticsearch bin dir to run.
*
* <p> Procrun comes with two executables, {@code prunsrv.exe} and {@code prunmgr.exe}. These are renamed by
* Elasticsearch to {@code elasticsearch-service-x64.exe} and {@code elasticsearch-service-mgr.exe}, respectively.
*/
protected String getExecutable() {
return "elasticsearch-service-x64.exe";
}
@Override
protected void execute(Terminal terminal, OptionSet options, ProcessInfo processInfo) throws Exception {
Path procrun = processInfo.workingDir().resolve("bin").resolve(getExecutable()).toAbsolutePath();
if (Files.exists(procrun) == false) {
throw new IllegalStateException("Missing procrun exe: " + procrun);
}
String serviceId = getServiceId(options, processInfo.envVars());
preExecute(terminal, processInfo, serviceId);
List<String> procrunCmd = new ArrayList<>();
procrunCmd.add(quote(procrun.toString()));
procrunCmd.add("//%s/%s".formatted(cmd, serviceId));
if (includeLogArgs()) {
procrunCmd.add(getLogArgs(serviceId, processInfo.workingDir(), processInfo.envVars()));
}
procrunCmd.add(getAdditionalArgs(serviceId, processInfo));
ProcessBuilder processBuilder = new ProcessBuilder("cmd.exe", "/C", String.join(" ", procrunCmd).trim());
logger.debug((Supplier<?>) () -> "Running procrun: " + String.join(" ", processBuilder.command()));
processBuilder.inheritIO();
Process process = startProcess(processBuilder);
int ret = process.waitFor();
if (ret != ExitCodes.OK) {
throw new UserException(ret, getFailureMessage(serviceId));
} else {
terminal.println(getSuccessMessage(serviceId));
}
}
/** Quotes the given String. */
static String quote(String s) {
return '"' + s + '"';
}
/** Determines the service id for the Elasticsearch service that should be used */
private static String getServiceId(OptionSet options, Map<String, String> env) throws UserException {
List<?> args = options.nonOptionArguments();
if (args.size() > 1) {
throw new UserException(ExitCodes.USAGE, "too many arguments, expected one service id");
}
final String serviceId;
if (args.size() > 0) {
serviceId = args.get(0).toString();
} else {
serviceId = env.getOrDefault("SERVICE_ID", "elasticsearch-service-x64");
}
return serviceId;
}
/** Determines the logging arguments that should be passed to the procrun command */
private static String getLogArgs(String serviceId, Path esHome, Map<String, String> env) {
String logArgs = env.get("LOG_OPTS");
if (logArgs != null && logArgs.isBlank() == false) {
return logArgs;
}
String logsDir = env.get("SERVICE_LOG_DIR");
if (logsDir == null || logsDir.isBlank()) {
logsDir = esHome.resolve("logs").toString();
}
String logArgsFormat = "--LogPath \"%s\" --LogPrefix \"%s\" --StdError auto --StdOutput auto --LogLevel Debug";
return String.format(Locale.ROOT, logArgsFormat, logsDir, serviceId);
}
/**
* Gets arguments that should be passed to the procrun command.
*
* @param serviceId The service id of the Elasticsearch service
* @param processInfo The current process info
* @return The additional arguments, space delimited
*/
protected String getAdditionalArgs(String serviceId, ProcessInfo processInfo) {
return "";
}
/** Return whether logging args should be added to the procrun command */
protected boolean includeLogArgs() {
return true;
}
/**
* A hook to add logging and validation before executing the procrun command.
* @throws UserException if there is a problem with the command invocation
*/
protected void preExecute(Terminal terminal, ProcessInfo pinfo, String serviceId) throws UserException {}
/** Returns a message that should be output on success of the procrun command */
protected abstract String getSuccessMessage(String serviceId);
/** Returns a message that should be output on failure of the procrun command */
protected abstract String getFailureMessage(String serviceId);
// package private to allow tests to override
Process startProcess(ProcessBuilder processBuilder) throws IOException {
return processBuilder.start();
}
}
|
ProcrunCommand
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/TestingResourceManagerService.java
|
{
"start": 5506,
"end": 8130
}
|
class ____ {
private RpcService rpcService = null;
private boolean needStopRpcService = true;
private TestingLeaderElection rmLeaderElection = null;
private Function<JobID, LeaderRetrievalService> jmLeaderRetrieverFunction = null;
public Builder setRpcService(RpcService rpcService) {
this.rpcService = checkNotNull(rpcService);
this.needStopRpcService = false;
return this;
}
public Builder setRmLeaderElection(TestingLeaderElection rmLeaderElection) {
this.rmLeaderElection = checkNotNull(rmLeaderElection);
return this;
}
public Builder setJmLeaderRetrieverFunction(
Function<JobID, LeaderRetrievalService> jmLeaderRetrieverFunction) {
this.jmLeaderRetrieverFunction = checkNotNull(jmLeaderRetrieverFunction);
return this;
}
public TestingResourceManagerService build() throws Exception {
rpcService = rpcService != null ? rpcService : new TestingRpcService();
rmLeaderElection =
rmLeaderElection != null ? rmLeaderElection : new TestingLeaderElection();
final TestingHighAvailabilityServices haServices =
new TestingHighAvailabilityServices();
haServices.setResourceManagerLeaderElection(rmLeaderElection);
if (jmLeaderRetrieverFunction != null) {
haServices.setJobMasterLeaderRetrieverFunction(jmLeaderRetrieverFunction);
}
final TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler();
return new TestingResourceManagerService(
ResourceManagerServiceImpl.create(
StandaloneResourceManagerFactory.getInstance(),
new Configuration(),
ResourceID.generate(),
rpcService,
haServices,
new TestingHeartbeatServices(),
new NoOpDelegationTokenManager(),
fatalErrorHandler,
new ClusterInformation("localhost", 1234),
null,
TestingMetricRegistry.builder().build(),
"localhost",
ForkJoinPool.commonPool()),
rmLeaderElection,
fatalErrorHandler,
rpcService,
needStopRpcService);
}
}
}
|
Builder
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/wall/WallContext.java
|
{
"start": 762,
"end": 5561
}
|
class ____ {
private static final ThreadLocal<WallContext> contextLocal = new ThreadLocal<WallContext>();
private WallSqlStat sqlStat;
private Map<String, WallSqlTableStat> tableStats;
private Map<String, WallSqlFunctionStat> functionStats;
private final DbType dbType;
private int commentCount;
private int warnings;
private int unionWarnings;
private int updateNoneConditionWarnings;
private int deleteNoneConditionWarnings;
private int likeNumberWarnings;
private List<WallUpdateCheckItem> wallUpdateCheckItems;
public WallContext(String dbType) {
this(DbType.of(dbType));
}
public WallContext(DbType dbType) {
this.dbType = dbType;
}
public void incrementFunctionInvoke(String tableName) {
if (functionStats == null) {
functionStats = new HashMap<String, WallSqlFunctionStat>();
}
String lowerCaseName = tableName.toLowerCase();
WallSqlFunctionStat stat = functionStats.get(lowerCaseName);
if (stat == null) {
if (functionStats.size() > 100) {
return;
}
stat = new WallSqlFunctionStat();
functionStats.put(tableName, stat);
}
stat.incrementInvokeCount();
}
public WallSqlTableStat getTableStat(String tableName) {
if (tableStats == null) {
tableStats = new HashMap<String, WallSqlTableStat>(2);
}
String lowerCaseName = tableName.toLowerCase();
WallSqlTableStat stat = tableStats.get(lowerCaseName);
if (stat == null) {
if (tableStats.size() > 100) {
return null;
}
stat = new WallSqlTableStat();
tableStats.put(tableName, stat);
}
return stat;
}
public static WallContext createIfNotExists(DbType dbType) {
WallContext context = contextLocal.get();
if (context == null) {
context = new WallContext(dbType);
contextLocal.set(context);
}
return context;
}
public static WallContext create(String dbType) {
return create(DbType.of(dbType));
}
public static WallContext create(DbType dbType) {
WallContext context = new WallContext(dbType);
contextLocal.set(context);
return context;
}
public static WallContext current() {
return contextLocal.get();
}
public static void clearContext() {
contextLocal.remove();
}
public static void setContext(WallContext context) {
contextLocal.set(context);
}
public WallSqlStat getSqlStat() {
return sqlStat;
}
public void setSqlStat(WallSqlStat sqlStat) {
this.sqlStat = sqlStat;
}
public Map<String, WallSqlTableStat> getTableStats() {
return tableStats;
}
public Map<String, WallSqlFunctionStat> getFunctionStats() {
return functionStats;
}
public DbType getDbType() {
return dbType;
}
public int getCommentCount() {
return commentCount;
}
public void incrementCommentCount() {
if (this.commentCount == 0) {
this.warnings++;
}
this.commentCount++;
}
public int getWarnings() {
return warnings;
}
public void incrementWarnings() {
this.warnings++;
}
public int getLikeNumberWarnings() {
return likeNumberWarnings;
}
public void incrementLikeNumberWarnings() {
if (likeNumberWarnings == 0) {
this.warnings++;
}
likeNumberWarnings++;
}
public int getUnionWarnings() {
return unionWarnings;
}
public void incrementUnionWarnings() {
if (this.unionWarnings == 0) {
this.incrementWarnings();
}
this.unionWarnings++;
}
public int getUpdateNoneConditionWarnings() {
return updateNoneConditionWarnings;
}
public void incrementUpdateNoneConditionWarnings() {
// if (this.updateNoneConditionWarnings == 0) {
// this.incrementWarnings();
// }
this.updateNoneConditionWarnings++;
}
public int getDeleteNoneConditionWarnings() {
return deleteNoneConditionWarnings;
}
public void incrementDeleteNoneConditionWarnings() {
// if (this.deleteNoneConditionWarnings == 0) {
// this.incrementWarnings();
// }
this.deleteNoneConditionWarnings++;
}
public List<WallUpdateCheckItem> getWallUpdateCheckItems() {
return wallUpdateCheckItems;
}
public void setWallUpdateCheckItems(List<WallUpdateCheckItem> wallUpdateCheckItems) {
this.wallUpdateCheckItems = wallUpdateCheckItems;
}
}
|
WallContext
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/MultiFilterRecordReader.java
|
{
"start": 1352,
"end": 1527
}
|
class ____ Composite join returning values derived from multiple
* sources, but generally not tuples.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract
|
for
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/filter/logging/Log4jFilter.java
|
{
"start": 779,
"end": 4864
}
|
class ____ extends LogFilter implements Log4jFilterMBean {
private Logger dataSourceLogger = Logger.getLogger(dataSourceLoggerName);
private Logger connectionLogger = Logger.getLogger(connectionLoggerName);
private Logger statementLogger = Logger.getLogger(statementLoggerName);
private Logger resultSetLogger = Logger.getLogger(resultSetLoggerName);
@Override
public String getDataSourceLoggerName() {
return dataSourceLoggerName;
}
@Override
public void setDataSourceLoggerName(String dataSourceLoggerName) {
this.dataSourceLoggerName = dataSourceLoggerName;
dataSourceLogger = Logger.getLogger(dataSourceLoggerName);
}
public void setDataSourceLogger(Logger dataSourceLogger) {
this.dataSourceLogger = dataSourceLogger;
this.dataSourceLoggerName = dataSourceLogger.getName();
}
@Override
public String getConnectionLoggerName() {
return connectionLoggerName;
}
@Override
public void setConnectionLoggerName(String connectionLoggerName) {
this.connectionLoggerName = connectionLoggerName;
connectionLogger = Logger.getLogger(connectionLoggerName);
}
public void setConnectionLogger(Logger connectionLogger) {
this.connectionLogger = connectionLogger;
this.connectionLoggerName = connectionLogger.getName();
}
@Override
public String getStatementLoggerName() {
return statementLoggerName;
}
@Override
public void setStatementLoggerName(String statementLoggerName) {
this.statementLoggerName = statementLoggerName;
statementLogger = Logger.getLogger(statementLoggerName);
}
public void setStatementLogger(Logger statementLogger) {
this.statementLogger = statementLogger;
this.statementLoggerName = statementLogger.getName();
}
@Override
public String getResultSetLoggerName() {
return resultSetLoggerName;
}
@Override
public void setResultSetLoggerName(String resultSetLoggerName) {
this.resultSetLoggerName = resultSetLoggerName;
resultSetLogger = Logger.getLogger(resultSetLoggerName);
}
public void setResultSetLogger(Logger resultSetLogger) {
this.resultSetLogger = resultSetLogger;
this.resultSetLoggerName = resultSetLogger.getName();
}
@Override
public boolean isConnectionLogErrorEnabled() {
return connectionLogger.isEnabledFor(Level.ERROR) && super.isConnectionLogErrorEnabled();
}
@Override
public boolean isDataSourceLogEnabled() {
return dataSourceLogger.isDebugEnabled() && super.isDataSourceLogEnabled();
}
@Override
public boolean isConnectionLogEnabled() {
return connectionLogger.isDebugEnabled() && super.isConnectionLogEnabled();
}
@Override
public boolean isStatementLogEnabled() {
return statementLogger.isDebugEnabled() && super.isStatementLogEnabled();
}
@Override
public boolean isResultSetLogEnabled() {
return resultSetLogger.isDebugEnabled() && super.isResultSetLogEnabled();
}
@Override
public boolean isResultSetLogErrorEnabled() {
return resultSetLogger.isEnabledFor(Level.ERROR) && super.isResultSetLogErrorEnabled();
}
@Override
public boolean isStatementLogErrorEnabled() {
return statementLogger.isEnabledFor(Level.ERROR) && super.isStatementLogErrorEnabled();
}
@Override
protected void connectionLog(String message) {
connectionLogger.debug(message);
}
@Override
protected void statementLog(String message) {
statementLogger.debug(message);
}
@Override
protected void resultSetLog(String message) {
resultSetLogger.debug(message);
}
@Override
protected void resultSetLogError(String message, Throwable error) {
resultSetLogger.error(message, error);
}
@Override
protected void statementLogError(String message, Throwable error) {
statementLogger.error(message, error);
}
}
|
Log4jFilter
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/util/EnvironmentProperties.java
|
{
"start": 3469,
"end": 5421
}
|
class ____ of Lambda to reduce the Lambda invocation overhead during the startup
return cache.computeIfAbsent(env, new Function<String, List<String>>() {
@Override
public List<String> apply(String env1) {
return computePropertiesFor(env1);
}
});
}
private static List<String> computePropertiesFor(String env) {
env = env.toLowerCase(Locale.ENGLISH);
char[] charArray = env.toCharArray();
int[] separatorIndexes = new int[charArray.length];
int separatorCount = 0;
for (int i = 0; i < charArray.length; i++) {
if (charArray[i] == '_') {
separatorIndexes[separatorCount++] = i;
}
}
if (separatorCount == 0) {
return List.of(env);
}
//halves is used to determine when to flip the separator
int[] halves = new int[separatorCount];
//stores the separator per half
byte[] separator = new byte[separatorCount];
//the total number of permutations. 2 to the power of the number of separators
int permutations = (int) Math.pow(2, separatorCount);
//initialize the halves
//ex 4, 2, 1 for A_B_C_D
for (int i = 0; i < halves.length; i++) {
int start = (i == 0) ? permutations : halves[i - 1];
halves[i] = start / 2;
}
String[] properties = new String[permutations];
for (int i = 0; i < permutations; i++) {
int round = i + 1;
for (int s = 0; s < separatorCount; s++) {
//mutate the array with the separator
charArray[separatorIndexes[s]] = DOT_DASH[separator[s]];
if (round % halves[s] == 0) {
separator[s] ^= 1;
}
}
properties[i] = new String(charArray);
}
return List.of(properties);
}
}
|
instead
|
java
|
apache__camel
|
components/camel-cometd/src/main/java/org/apache/camel/component/cometd/CometdConsumer.java
|
{
"start": 2493,
"end": 4322
}
|
class ____ extends AbstractService {
private final CometdEndpoint endpoint;
private final CometdConsumer consumer;
private final CometdBinding binding;
private final String channelName;
public ConsumerService(String channel, BayeuxServerImpl bayeux, CometdConsumer consumer) {
super(bayeux, channel);
this.endpoint = consumer.getEndpoint();
this.binding = createBinding(bayeux);
this.consumer = consumer;
this.channelName = channel;
addService(channel, "push");
}
private CometdBinding createBinding(BayeuxServerImpl bayeux) {
boolean enableSessionHeaders = endpoint != null && endpoint.isSessionHeadersEnabled();
return new CometdBinding(bayeux, enableSessionHeaders);
}
public void push(ServerSession remote, ServerMessage cometdMessage) throws Exception {
Object data = null;
Message message = binding.createCamelMessage(endpoint.getCamelContext(), remote, cometdMessage, data);
Exchange exchange = consumer.createExchange(false);
try {
exchange.setIn(message);
consumer.getProcessor().process(exchange);
if (ExchangeHelper.isOutCapable(exchange)) {
ServerChannel channel = getBayeux().getChannel(channelName);
ServerSession serverSession = getServerSession();
ServerMessage.Mutable outMessage = binding.createCometdMessage(channel, serverSession, exchange.getOut());
remote.deliver(serverSession, outMessage, Promise.noop());
}
} finally {
consumer.releaseExchange(exchange, false);
}
}
}
}
|
ConsumerService
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/support/PropertyBindingSupportListTest.java
|
{
"start": 1402,
"end": 7673
}
|
class ____ extends ContextTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
Company work1 = new Company();
work1.setId(123);
work1.setName("Acme");
context.getRegistry().bind("company1", work1);
Company work2 = new Company();
work2.setId(456);
work2.setName("Acme 2");
context.getRegistry().bind("company2", work2);
Properties placeholders = new Properties();
placeholders.put("companyName", "Acme");
placeholders.put("committer", "rider");
context.getPropertiesComponent().setInitialProperties(placeholders);
return context;
}
@Test
public void testPropertiesList() {
Foo foo = new Foo();
Map<String, Object> prop = new LinkedHashMap<>();
prop.put("name", "James");
prop.put("bar.age", "33");
prop.put("bar.{{committer}}", "true");
prop.put("bar.gold-customer", "true");
prop.put("bar.works[0]", "#bean:company1");
prop.put("bar.works[1]", "#bean:company2");
PropertyBindingSupport.build().bind(context, foo, prop);
assertEquals("James", foo.getName());
assertEquals(33, foo.getBar().getAge());
assertTrue(foo.getBar().isRider());
assertTrue(foo.getBar().isGoldCustomer());
assertEquals(2, foo.getBar().getWorks().size());
assertEquals(123, foo.getBar().getWorks().get(0).getId());
assertEquals("Acme", foo.getBar().getWorks().get(0).getName());
assertEquals(456, foo.getBar().getWorks().get(1).getId());
assertEquals("Acme 2", foo.getBar().getWorks().get(1).getName());
}
@Test
public void testPropertiesListWithGaps() {
Foo foo = new Foo();
Map<String, Object> prop = new LinkedHashMap<>();
prop.put("name", "James");
prop.put("bar.age", "33");
prop.put("bar.{{committer}}", "true");
prop.put("bar.gold-customer", "true");
prop.put("bar.works[5]", "#bean:company1");
prop.put("bar.works[9]", "#bean:company2");
PropertyBindingSupport.build().bind(context, foo, prop);
assertEquals("James", foo.getName());
assertEquals(33, foo.getBar().getAge());
assertTrue(foo.getBar().isRider());
assertTrue(foo.getBar().isGoldCustomer());
assertEquals(10, foo.getBar().getWorks().size());
assertEquals(123, foo.getBar().getWorks().get(5).getId());
assertEquals("Acme", foo.getBar().getWorks().get(5).getName());
assertEquals(456, foo.getBar().getWorks().get(9).getId());
assertEquals("Acme 2", foo.getBar().getWorks().get(9).getName());
}
@Test
public void testPropertiesListNested() {
Foo foo = new Foo();
Map<String, Object> prop = new LinkedHashMap<>();
prop.put("name", "James");
prop.put("bar.age", "33");
prop.put("bar.{{committer}}", "true");
prop.put("bar.gold-customer", "true");
prop.put("bar.works[0]", "#bean:company1");
prop.put("bar.works[0].id", "666");
prop.put("bar.works[1]", "#bean:company2");
prop.put("bar.works[1].name", "I changed this");
PropertyBindingSupport.build().bind(context, foo, prop);
assertEquals("James", foo.getName());
assertEquals(33, foo.getBar().getAge());
assertTrue(foo.getBar().isRider());
assertTrue(foo.getBar().isGoldCustomer());
assertEquals(2, foo.getBar().getWorks().size());
assertEquals(666, foo.getBar().getWorks().get(0).getId());
assertEquals("Acme", foo.getBar().getWorks().get(0).getName());
assertEquals(456, foo.getBar().getWorks().get(1).getId());
assertEquals("I changed this", foo.getBar().getWorks().get(1).getName());
}
@Test
public void testPropertiesListNestedWithType() {
Foo foo = new Foo();
// use CollectionHelper::mapOf to avoid insertion ordered iteration
PropertyBindingSupport.build().bind(context, foo, mapOf(
"bar.works[0]", "#class:" + Company.class.getName(),
"bar.works[0].name", "first",
"bar.works[1]", "#class:" + Company.class.getName(),
"bar.works[1].name", "second"));
assertEquals(2, foo.getBar().getWorks().size());
assertEquals(0, foo.getBar().getWorks().get(0).getId());
assertEquals("first", foo.getBar().getWorks().get(0).getName());
assertEquals(0, foo.getBar().getWorks().get(1).getId());
assertEquals("second", foo.getBar().getWorks().get(1).getName());
}
@Test
public void testPropertiesListFirst() {
Bar bar = new Bar();
Map<String, Object> prop = new LinkedHashMap<>();
prop.put("works[0]", "#bean:company1");
prop.put("works[0].id", "666");
prop.put("works[1]", "#bean:company2");
prop.put("works[1].name", "I changed this");
PropertyBindingSupport.build().bind(context, bar, prop);
assertEquals(2, bar.getWorks().size());
assertEquals(666, bar.getWorks().get(0).getId());
assertEquals("Acme", bar.getWorks().get(0).getName());
assertEquals(456, bar.getWorks().get(1).getId());
assertEquals("I changed this", bar.getWorks().get(1).getName());
}
@Test
public void testPropertiesNotList() {
Foo foo = new Foo();
Map<String, Object> prop = new LinkedHashMap<>();
prop.put("name", "James");
prop.put("bar.age", "33");
prop.put("bar.gold-customer[]", "true");
try {
PropertyBindingSupport.build().bind(context, foo, prop);
fail("Should have thrown exception");
} catch (PropertyBindingException e) {
assertEquals("gold-customer[]", e.getPropertyName());
IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertTrue(iae.getMessage().startsWith(
"Cannot set property: gold-customer[] as either a Map/List/array because target bean is not a Map, List or array type"));
}
}
public static
|
PropertyBindingSupportListTest
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/output/IntegerListOutputUnitTests.java
|
{
"start": 3018,
"end": 3698
}
|
class ____ {
final CommandOutput<Object, Object, List<Object>> commandOutput;
final StreamingOutput<?> streamingOutput;
final byte[] valueBytes;
final Object value;
Fixture(CommandOutput<?, ?, ?> commandOutput, StreamingOutput<?> streamingOutput, byte[] valueBytes, Object value) {
this.commandOutput = (CommandOutput) commandOutput;
this.streamingOutput = streamingOutput;
this.valueBytes = valueBytes;
this.value = value;
}
@Override
public String toString() {
return commandOutput.getClass().getSimpleName() + "/" + value;
}
}
}
|
Fixture
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/lock/remote/LockOperationEnum.java
|
{
"start": 753,
"end": 1018
}
|
enum ____ implements Serializable {
/**
* Acquire.
*/
ACQUIRE,
/**
* Release.
*/
RELEASE,
/**
* Expire.
*/
EXPIRE;
private static final long serialVersionUID = -241044344531890549L;
}
|
LockOperationEnum
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/model/ResourceLoader.java
|
{
"start": 1190,
"end": 2551
}
|
class ____<Data> implements ModelLoader<Integer, Data> {
private static final String TAG = "ResourceLoader";
private final ModelLoader<Uri, Data> uriLoader;
private final Resources resources;
// Public API.
@SuppressWarnings("WeakerAccess")
public ResourceLoader(Resources resources, ModelLoader<Uri, Data> uriLoader) {
this.resources = resources;
this.uriLoader = uriLoader;
}
@Override
public LoadData<Data> buildLoadData(
@NonNull Integer model, int width, int height, @NonNull Options options) {
Uri uri = getResourceUri(model);
return uri == null ? null : uriLoader.buildLoadData(uri, width, height, options);
}
@Nullable
private Uri getResourceUri(Integer model) {
try {
return Uri.parse(
ContentResolver.SCHEME_ANDROID_RESOURCE
+ "://"
+ resources.getResourcePackageName(model)
+ '/'
+ model);
} catch (Resources.NotFoundException e) {
if (Log.isLoggable(TAG, Log.WARN)) {
Log.w(TAG, "Received invalid resource id: " + model, e);
}
return null;
}
}
@Override
public boolean handles(@NonNull Integer model) {
// TODO: check that this is in fact a resource id.
return true;
}
/** Factory for loading {@link InputStream}s from Android resource ids. */
public static
|
ResourceLoader
|
java
|
google__guava
|
android/guava/src/com/google/common/graph/Graphs.java
|
{
"start": 16683,
"end": 24668
}
|
class ____<N, E> extends ForwardingNetwork<N, E> {
private final Network<N, E> network;
TransposedNetwork(Network<N, E> network) {
this.network = network;
}
@Override
Network<N, E> delegate() {
return network;
}
@Override
public Set<N> predecessors(N node) {
return delegate().successors(node); // transpose
}
@Override
public Set<N> successors(N node) {
return delegate().predecessors(node); // transpose
}
@Override
public int inDegree(N node) {
return delegate().outDegree(node); // transpose
}
@Override
public int outDegree(N node) {
return delegate().inDegree(node); // transpose
}
@Override
public Set<E> inEdges(N node) {
return delegate().outEdges(node); // transpose
}
@Override
public Set<E> outEdges(N node) {
return delegate().inEdges(node); // transpose
}
@Override
public EndpointPair<N> incidentNodes(E edge) {
EndpointPair<N> endpointPair = delegate().incidentNodes(edge);
return EndpointPair.of(network, endpointPair.nodeV(), endpointPair.nodeU()); // transpose
}
@Override
public Set<E> edgesConnecting(N nodeU, N nodeV) {
return delegate().edgesConnecting(nodeV, nodeU); // transpose
}
@Override
public Set<E> edgesConnecting(EndpointPair<N> endpoints) {
return delegate().edgesConnecting(transpose(endpoints));
}
@Override
public @Nullable E edgeConnectingOrNull(N nodeU, N nodeV) {
return delegate().edgeConnectingOrNull(nodeV, nodeU); // transpose
}
@Override
public @Nullable E edgeConnectingOrNull(EndpointPair<N> endpoints) {
return delegate().edgeConnectingOrNull(transpose(endpoints));
}
@Override
public boolean hasEdgeConnecting(N nodeU, N nodeV) {
return delegate().hasEdgeConnecting(nodeV, nodeU); // transpose
}
@Override
public boolean hasEdgeConnecting(EndpointPair<N> endpoints) {
return delegate().hasEdgeConnecting(transpose(endpoints));
}
}
// Graph copy methods
/**
* Returns the subgraph of {@code graph} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Graph#edges() edges}
* from {@code graph} for which both nodes are contained by {@code nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N> MutableGraph<N> inducedSubgraph(Graph<N> graph, Iterable<? extends N> nodes) {
MutableGraph<N> subgraph =
(nodes instanceof Collection)
? GraphBuilder.from(graph).expectedNodeCount(((Collection) nodes).size()).build()
: GraphBuilder.from(graph).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (N successorNode : graph.successors(node)) {
if (subgraph.nodes().contains(successorNode)) {
subgraph.putEdge(node, successorNode);
}
}
}
return subgraph;
}
/**
* Returns the subgraph of {@code graph} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Graph#edges() edges}
* (and associated edge values) from {@code graph} for which both nodes are contained by {@code
* nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N, V> MutableValueGraph<N, V> inducedSubgraph(
ValueGraph<N, V> graph, Iterable<? extends N> nodes) {
MutableValueGraph<N, V> subgraph =
(nodes instanceof Collection)
? ValueGraphBuilder.from(graph).expectedNodeCount(((Collection) nodes).size()).build()
: ValueGraphBuilder.from(graph).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (N successorNode : graph.successors(node)) {
if (subgraph.nodes().contains(successorNode)) {
// requireNonNull is safe because the endpoint pair comes from the graph.
subgraph.putEdgeValue(
node,
successorNode,
requireNonNull(graph.edgeValueOrDefault(node, successorNode, null)));
}
}
}
return subgraph;
}
/**
* Returns the subgraph of {@code network} induced by {@code nodes}. This subgraph is a new graph
* that contains all of the nodes in {@code nodes}, and all of the {@link Network#edges() edges}
* from {@code network} for which the {@link Network#incidentNodes(Object) incident nodes} are
* both contained by {@code nodes}.
*
* @throws IllegalArgumentException if any element in {@code nodes} is not a node in the graph
*/
public static <N, E> MutableNetwork<N, E> inducedSubgraph(
Network<N, E> network, Iterable<? extends N> nodes) {
MutableNetwork<N, E> subgraph =
(nodes instanceof Collection)
? NetworkBuilder.from(network).expectedNodeCount(((Collection) nodes).size()).build()
: NetworkBuilder.from(network).build();
for (N node : nodes) {
subgraph.addNode(node);
}
for (N node : subgraph.nodes()) {
for (E edge : network.outEdges(node)) {
N successorNode = network.incidentNodes(edge).adjacentNode(node);
if (subgraph.nodes().contains(successorNode)) {
subgraph.addEdge(node, successorNode, edge);
}
}
}
return subgraph;
}
/** Creates a mutable copy of {@code graph} with the same nodes and edges. */
public static <N> MutableGraph<N> copyOf(Graph<N> graph) {
MutableGraph<N> copy = GraphBuilder.from(graph).expectedNodeCount(graph.nodes().size()).build();
for (N node : graph.nodes()) {
copy.addNode(node);
}
for (EndpointPair<N> edge : graph.edges()) {
copy.putEdge(edge.nodeU(), edge.nodeV());
}
return copy;
}
/** Creates a mutable copy of {@code graph} with the same nodes, edges, and edge values. */
public static <N, V> MutableValueGraph<N, V> copyOf(ValueGraph<N, V> graph) {
MutableValueGraph<N, V> copy =
ValueGraphBuilder.from(graph).expectedNodeCount(graph.nodes().size()).build();
for (N node : graph.nodes()) {
copy.addNode(node);
}
for (EndpointPair<N> edge : graph.edges()) {
// requireNonNull is safe because the endpoint pair comes from the graph.
copy.putEdgeValue(
edge.nodeU(),
edge.nodeV(),
requireNonNull(graph.edgeValueOrDefault(edge.nodeU(), edge.nodeV(), null)));
}
return copy;
}
/** Creates a mutable copy of {@code network} with the same nodes and edges. */
public static <N, E> MutableNetwork<N, E> copyOf(Network<N, E> network) {
MutableNetwork<N, E> copy =
NetworkBuilder.from(network)
.expectedNodeCount(network.nodes().size())
.expectedEdgeCount(network.edges().size())
.build();
for (N node : network.nodes()) {
copy.addNode(node);
}
for (E edge : network.edges()) {
EndpointPair<N> endpointPair = network.incidentNodes(edge);
copy.addEdge(endpointPair.nodeU(), endpointPair.nodeV(), edge);
}
return copy;
}
@CanIgnoreReturnValue
static int checkNonNegative(int value) {
checkArgument(value >= 0, "Not true that %s is non-negative.", value);
return value;
}
@CanIgnoreReturnValue
static long checkNonNegative(long value) {
checkArgument(value >= 0, "Not true that %s is non-negative.", value);
return value;
}
@CanIgnoreReturnValue
static int checkPositive(int value) {
checkArgument(value > 0, "Not true that %s is positive.", value);
return value;
}
@CanIgnoreReturnValue
static long checkPositive(long value) {
checkArgument(value > 0, "Not true that %s is positive.", value);
return value;
}
/**
* An
|
TransposedNetwork
|
java
|
apache__camel
|
tooling/maven/camel-api-component-maven-plugin/src/main/java/org/apache/camel/maven/ApiComponentGeneratorMojo.java
|
{
"start": 5939,
"end": 6780
}
|
class ____
final AbstractApiMethodGeneratorMojo apiMethodGenerator = getApiMethodGenerator(api);
if (apiMethodGenerator != null) {
// configure API method properties and generate Proxy classes
configureMethodGenerator(apiMethodGenerator, api);
try {
apiMethodGenerator.setProjectClassLoader(getProjectClassLoader()); // supply pre-constructed ClassLoader
apiMethodGenerator.executeInternal(); // Call internal execute method
} catch (Exception e) {
final String msg = "Error generating source for " + api.getProxyClass() + ": " + e.getMessage();
throw new MojoExecutionException(msg, e);
}
} else {
// make sure the proxy
|
references
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/ReactiveCommandSegmentCommandFactory.java
|
{
"start": 490,
"end": 2257
}
|
class ____ extends CommandSegmentCommandFactory {
private boolean streamingExecution;
ReactiveCommandSegmentCommandFactory(CommandSegments commandSegments, CommandMethod commandMethod,
RedisCodec<?, ?> redisCodec, CommandOutputFactoryResolver outputResolver) {
super(commandSegments, commandMethod, redisCodec, outputResolver);
if (commandMethod.getParameters() instanceof ExecutionSpecificParameters) {
ExecutionSpecificParameters executionAwareParameters = (ExecutionSpecificParameters) commandMethod.getParameters();
if (executionAwareParameters.hasTimeoutIndex()) {
throw new CommandCreationException(commandMethod, "Reactive command methods do not support Timeout parameters");
}
}
}
@Override
protected CommandOutputFactory resolveCommandOutputFactory(OutputSelector outputSelector) {
streamingExecution = ReactiveTypes.isMultiValueType(outputSelector.getOutputType().getRawClass());
OutputSelector componentType = new OutputSelector(outputSelector.getOutputType().getGeneric(0),
outputSelector.getRedisCodec());
if (streamingExecution) {
CommandOutputFactory streamingFactory = getOutputResolver().resolveStreamingCommandOutput(componentType);
if (streamingExecution && streamingFactory != null) {
return streamingFactory;
}
}
return super.resolveCommandOutputFactory(componentType);
}
/**
* @return {@code true} if the resolved {@link io.lettuce.core.output.CommandOutput} should use streaming.
*/
boolean isStreamingExecution() {
return streamingExecution;
}
}
|
ReactiveCommandSegmentCommandFactory
|
java
|
apache__flink
|
flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java
|
{
"start": 57461,
"end": 58335
}
|
class ____ {
private final Scanner scanner;
private final int numLinesBuffered;
private final List<String> bufferedLines;
BufferingScanner(Scanner scanner, int numLinesBuffered) {
this.scanner = scanner;
this.numLinesBuffered = numLinesBuffered;
this.bufferedLines = new ArrayList<>(numLinesBuffered);
}
public boolean hasNextLine() {
return scanner.hasNextLine();
}
public String nextLine() {
if (bufferedLines.size() == numLinesBuffered) {
bufferedLines.remove(0);
}
String line = scanner.nextLine();
bufferedLines.add(line);
return line;
}
public List<String> getPreviousLines() {
return new ArrayList<>(bufferedLines);
}
}
}
|
BufferingScanner
|
java
|
spring-projects__spring-boot
|
module/spring-boot-http-client/src/main/java/org/springframework/boot/http/client/HttpRedirects.java
|
{
"start": 779,
"end": 1089
}
|
enum ____ {
/**
* Follow redirects (if the underlying library has support).
*/
FOLLOW_WHEN_POSSIBLE,
/**
* Follow redirects (fail if the underlying library has no support).
*/
FOLLOW,
/**
* Don't follow redirects (fail if the underlying library has no support).
*/
DONT_FOLLOW
}
|
HttpRedirects
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/datetime/FastDatePrinter.java
|
{
"start": 27114,
"end": 27169
}
|
class ____ a numeric rule.</p>
*/
private
|
defining
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/ImportTests.java
|
{
"start": 10698,
"end": 10842
}
|
class ____ {
@Bean
ITestBean right() {
return new TestBean();
}
}
@Configuration
@Import(NonConfigAnnotated.class)
static
|
RightConfig
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/shortarray/ShortArrayAssert_doesNotContain_at_Index_with_Integer_Argument_Test.java
|
{
"start": 1114,
"end": 1550
}
|
class ____ extends ShortArrayAssertBaseTest {
private final Index index = someIndex();
@Override
protected ShortArrayAssert invoke_api_method() {
return assertions.doesNotContain(8, index);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertDoesNotContain(getInfo(assertions), getActual(assertions), (short) 8, index);
}
}
|
ShortArrayAssert_doesNotContain_at_Index_with_Integer_Argument_Test
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java
|
{
"start": 1232,
"end": 4116
}
|
interface ____ {
int getValue();
}
private URLClassLoader buildProviderJar(Map<String, CharSequence> sources) throws Exception {
var classToBytes = InMemoryJavaCompiler.compile(sources);
Map<String, byte[]> jarEntries = new HashMap<>();
for (var entry : sources.entrySet()) {
var classname = entry.getKey();
var filename = classname.replace(".", "/") + ".class";
jarEntries.put(filename, classToBytes.get(classname));
}
String serviceFile = String.join("\n", sources.keySet());
jarEntries.put(
"META-INF/services/org.elasticsearch.plugins.ExtensionLoaderTests$TestService",
serviceFile.getBytes(StandardCharsets.UTF_8)
);
Path topLevelDir = createTempDir(getTestName());
Path jar = topLevelDir.resolve("provider.jar");
JarUtils.createJarWithEntries(jar, jarEntries);
URL[] urls = new URL[] { jar.toUri().toURL() };
return URLClassLoader.newInstance(urls, this.getClass().getClassLoader());
}
private String defineProvider(String name, int value) {
return String.format(Locale.ROOT, """
package p;
import org.elasticsearch.plugins.ExtensionLoaderTests.TestService;
public class %s implements TestService {
@Override
public int getValue() {
return %d;
}
}
""", name, value);
}
public void testNoProvider() {
Optional<TestService> service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class));
assertThat(service, isEmpty());
}
public void testOneProvider() throws Exception {
Map<String, CharSequence> sources = Map.of("p.FooService", defineProvider("FooService", 1));
try (var loader = buildProviderJar(sources)) {
TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader))
.orElseThrow(AssertionError::new);
assertThat(service, not(nullValue()));
assertThat(service.getValue(), equalTo(1));
}
}
public void testManyProviders() throws Exception {
Map<String, CharSequence> sources = Map.of(
"p.FooService",
defineProvider("FooService", 1),
"p.BarService",
defineProvider("BarService", 2)
);
try (var loader = buildProviderJar(sources)) {
var e = expectThrows(
IllegalStateException.class,
() -> ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader))
);
assertThat(e.getMessage(), containsString("More than one extension found"));
assertThat(e.getMessage(), containsString("TestService"));
}
}
}
|
TestService
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ScopeInfo.java
|
{
"start": 382,
"end": 1798
}
|
class ____ {
private final DotName dotName;
private final boolean isNormal;
private boolean declaresInherited;
public ScopeInfo(Class<? extends Annotation> clazz, boolean isNormal) {
this.dotName = DotName.createSimple(clazz.getName());
this.isNormal = isNormal;
declaresInherited = clazz.getAnnotation(Inherited.class) != null;
}
public ScopeInfo(DotName clazz, boolean isNormal) {
this.dotName = clazz;
this.isNormal = isNormal;
declaresInherited = true;
}
public ScopeInfo(DotName clazz, boolean isNormal, boolean declaresInherited) {
this.dotName = clazz;
this.isNormal = isNormal;
this.declaresInherited = declaresInherited;
}
public DotName getDotName() {
return dotName;
}
public boolean isNormal() {
return isNormal;
}
public boolean declaresInherited() {
return declaresInherited;
}
@Override
public int hashCode() {
return Objects.hash(dotName);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ScopeInfo other = (ScopeInfo) obj;
return Objects.equals(dotName, other.dotName);
}
}
|
ScopeInfo
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/concurrent/locks/LockingVisitors.java
|
{
"start": 1848,
"end": 2781
}
|
class ____ a {@link ReentrantLock}:
* </p>
* <ol>
* <li>In single threaded mode, call {@link #reentrantLockVisitor(Object)}, passing the object to protect. This creates a
* {@link LockingVisitors.ReentrantLockVisitor}
* </li>
* <li>To access the protected object, create a {@link FailableConsumer} lambda. The consumer will receive the object as a parameter while the visitor holds the
* lock. Then call
* {@link LockingVisitors.LockVisitor#acceptReadLocked(FailableConsumer)}, or
* {@link LockingVisitors.LockVisitor#acceptWriteLocked(FailableConsumer)}, passing the consumer.
* </li>
* <li>Alternatively, to receive a result object, use a {@link FailableFunction} lambda. To have the function executed, call
* {@link LockingVisitors.LockVisitor#applyReadLocked(FailableFunction)}, or
* {@link LockingVisitors.LockVisitor#applyWriteLocked(FailableFunction)}.
* </li>
* </ol>
* <p>
* Example 1: A thread safe logger
|
with
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java
|
{
"start": 748,
"end": 2113
}
|
class ____ {
public static QuantileStates.SingleState initSingle(DriverContext driverContext, double percentile) {
return new QuantileStates.SingleState(driverContext.breaker(), percentile);
}
public static void combine(QuantileStates.SingleState current, int v) {
current.add(v);
}
public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) {
state.add(inValue);
}
public static Block evaluateFinal(QuantileStates.SingleState state, DriverContext driverContext) {
return state.evaluatePercentile(driverContext);
}
public static QuantileStates.GroupingState initGrouping(DriverContext driverContext, double percentile) {
return new QuantileStates.GroupingState(driverContext.breaker(), driverContext.bigArrays(), percentile);
}
public static void combine(QuantileStates.GroupingState state, int groupId, int v) {
state.add(groupId, v);
}
public static void combineIntermediate(QuantileStates.GroupingState state, int groupId, BytesRef inValue) {
state.add(groupId, inValue);
}
public static Block evaluateFinal(QuantileStates.GroupingState state, IntVector selected, GroupingAggregatorEvaluationContext ctx) {
return state.evaluatePercentile(selected, ctx.driverContext());
}
}
|
PercentileIntAggregator
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/HttpRequest.java
|
{
"start": 887,
"end": 1440
}
|
interface ____ extends HttpMessage {
/**
* Return the HTTP method of the request.
* @return the HTTP method as an HttpMethod value
* @see HttpMethod#valueOf(String)
*/
HttpMethod getMethod();
/**
* Return the URI of the request (including a query string if any,
* but only if it is well-formed for a URI representation).
* @return the URI of the request (never {@code null})
*/
URI getURI();
/**
* Return a mutable map of request attributes for this request.
* @since 6.2
*/
Map<String, Object> getAttributes();
}
|
HttpRequest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/converter/ConvertBodyAllowNullTest.java
|
{
"start": 1170,
"end": 7191
}
|
class ____ extends ContextTestSupport {
@Test
public void testConvertMyBean() throws Exception {
MyBean custom = context.getTypeConverter().convertTo(MyBean.class, "1:2");
Assertions.assertNotNull(custom);
custom = context.getTypeConverter().convertTo(MyBean.class, "");
Assertions.assertNull(custom);
}
@Test
public void testCustomConvertToAllowNullOptional() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).body().isInstanceOf(MyBean.class);
template.sendBody("direct:custom-optional", "1:2");
assertMockEndpointsSatisfied();
}
@Test
public void testCustomConvertToAllowNull() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).body().isNull();
template.sendBody("direct:custom-mandatory", "");
assertMockEndpointsSatisfied();
}
@Test
public void testConvertAllowNull() throws Exception {
Object val = context.getTypeConverter().convertTo(Integer.class, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().mandatoryConvertTo(Integer.class, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().tryConvertTo(Integer.class, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().convertTo(Integer.class, 123);
Assertions.assertEquals(123, val);
val = context.getTypeConverter().mandatoryConvertTo(Integer.class, 123);
Assertions.assertEquals(123, val);
val = context.getTypeConverter().tryConvertTo(Integer.class, 123);
Assertions.assertEquals(123, val);
}
@Test
public void testConvertAllowNullWithExchange() throws Exception {
Exchange exchange = context.getEndpoint("mock:result").createExchange();
Object val = context.getTypeConverter().convertTo(Integer.class, exchange, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().mandatoryConvertTo(Integer.class, exchange, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().tryConvertTo(Integer.class, exchange, Float.NaN);
Assertions.assertNull(val);
val = context.getTypeConverter().convertTo(Integer.class, exchange, 123);
Assertions.assertEquals(123, val);
val = context.getTypeConverter().mandatoryConvertTo(Integer.class, exchange, 123);
Assertions.assertEquals(123, val);
val = context.getTypeConverter().tryConvertTo(Integer.class, exchange, 123);
Assertions.assertEquals(123, val);
}
@Test
public void testConvertToAllowNullOptional() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).body().isNull();
template.sendBody("direct:optional", Float.NaN);
assertMockEndpointsSatisfied();
}
@Test
public void testConvertToAllowNull() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).body().isNull();
template.sendBody("direct:mandatory", Float.NaN);
assertMockEndpointsSatisfied();
}
@Test
public void testHeaderConvertToAllowNullOptional() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).header("foo").isNull();
template.sendBodyAndHeader("direct:header-optional", "Hello World", "foo", Float.NaN);
assertMockEndpointsSatisfied();
}
@Test
public void testHeaderConvertToAllowNull() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).header("foo").isNull();
template.sendBodyAndHeader("direct:header-mandatory", "Hello World", "foo", Float.NaN);
assertMockEndpointsSatisfied();
}
@Test
public void testVarConvertToAllowNullOptional() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).variable("foo").isNull();
fluentTemplate.withVariable("foo", Float.NaN).withBody("Hello World").to("direct:var-optional").send();
assertMockEndpointsSatisfied();
}
@Test
public void testVarConvertToAllowNull() throws Exception {
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(1);
result.message(0).variable("foo").isNull();
fluentTemplate.withVariable("foo", Float.NaN).withBody("Hello World").to("direct:var-mandatory").send();
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
context.setStreamCaching(false);
from("direct:optional").convertBodyTo(Integer.class, false).to("mock:result");
from("direct:mandatory").convertBodyTo(Integer.class).to("mock:result");
from("direct:header-optional").convertHeaderTo("foo", Integer.class, false).to("mock:result");
from("direct:header-mandatory").convertHeaderTo("foo", Integer.class).to("mock:result");
from("direct:var-optional").convertVariableTo("foo", Integer.class, false).to("mock:result");
from("direct:var-mandatory").convertVariableTo("foo", Integer.class).to("mock:result");
from("direct:custom-optional").convertBodyTo(MyBean.class, false).to("mock:result");
from("direct:custom-mandatory").convertBodyTo(MyBean.class).to("mock:result");
}
};
}
}
|
ConvertBodyAllowNullTest
|
java
|
quarkusio__quarkus
|
extensions/jdbc/jdbc-postgresql/runtime/src/main/java/io/quarkus/jdbc/postgresql/runtime/PostgreSQLServiceBindingConverter.java
|
{
"start": 629,
"end": 1236
}
|
class ____ implements ServiceBindingConverter {
public static final String BINDING_TYPE = "postgresql";
public static final String SSL_MODE = "sslmode";
public static final String SSL_ROOT_CERT = "sslrootcert";
public static final String OPTIONS = "options";
@Override
public Optional<ServiceBindingConfigSource> convert(List<ServiceBinding> serviceBindings) {
return ServiceBinding.singleMatchingByType(BINDING_TYPE, serviceBindings)
.map(new PostgreSQLDatasourceServiceBindingConfigSourceFactory());
}
private static
|
PostgreSQLServiceBindingConverter
|
java
|
elastic__elasticsearch
|
modules/aggregations/src/test/java/org/elasticsearch/aggregations/pipeline/MovFnAggregatorTests.java
|
{
"start": 2388,
"end": 8153
}
|
class ____ extends AggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
private static final String VALUE_FIELD = "value_field";
private static final List<String> datasetTimes = Arrays.asList(
"2017-01-01T01:07:45",
"2017-01-02T03:43:34",
"2017-01-03T04:11:00",
"2017-01-04T05:11:31",
"2017-01-05T08:24:05",
"2017-01-06T13:09:32",
"2017-01-07T13:47:43",
"2017-01-08T16:14:34",
"2017-01-09T17:09:50",
"2017-01-10T22:55:46"
);
private static final List<Integer> datasetValues = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
@Override
protected ScriptService getMockScriptService() {
ScriptEngine scriptEngine = new ScriptEngine() {
@Override
public String getType() {
return "test";
}
@Override
public <FactoryType> FactoryType compile(
String name,
String code,
ScriptContext<FactoryType> context,
Map<String, String> params
) {
if (getSupportedContexts().contains(context) == false) {
return null;
}
MovingFunctionScript.Factory factory = () -> new MovingFunctionScript() {
@Override
public double execute(Map<String, Object> params, double[] values) {
return MovingFunctions.max(values);
}
};
return context.factoryClazz.cast(factory);
}
@Override
public Set<ScriptContext<?>> getSupportedContexts() {
return Set.of(MovingFunctionScript.CONTEXT);
}
};
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(
Settings.EMPTY,
engines,
Map.of(MovingFunctionScript.CONTEXT.name, MovingFunctionScript.CONTEXT),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
}
public void testMatchAllDocs() throws IOException {
check(0, 3, List.of(Double.NaN, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0));
}
public void testShift() throws IOException {
check(1, 3, List.of(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0));
check(5, 3, List.of(5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 10.0, 10.0, Double.NaN, Double.NaN));
check(-5, 3, List.of(Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN, 1.0, 2.0, 3.0, 4.0));
}
public void testWideWindow() throws IOException {
check(50, 100, List.of(10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0));
}
private void check(int shift, int window, List<Double> expected) throws IOException {
Script script = new Script(ScriptType.INLINE, "test", "test", Collections.emptyMap());
MovFnPipelineAggregationBuilder builder = new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, window);
builder.setShift(shift);
Query query = new MatchAllDocsQuery();
DateHistogramAggregationBuilder aggBuilder = new DateHistogramAggregationBuilder("histo");
aggBuilder.calendarInterval(DateHistogramInterval.DAY).field(DATE_FIELD);
aggBuilder.subAggregation(new AvgAggregationBuilder("avg").field(VALUE_FIELD));
aggBuilder.subAggregation(builder);
executeTestCase(query, aggBuilder, histogram -> {
List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
List<Double> actual = buckets.stream()
.map(bucket -> ((InternalSimpleValue) (bucket.getAggregations().get("mov_fn"))).value())
.toList();
assertThat(actual, equalTo(expected));
});
}
private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBuilder, Consumer<Histogram> verify) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
Document document = new Document();
int counter = 0;
for (String date : datasetTimes) {
long instant = asLong(date);
document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
document.add(new LongPoint(INSTANT_FIELD, instant));
document.add(new NumericDocValuesField(VALUE_FIELD, datasetValues.get(counter)));
indexWriter.addDocument(document);
document.clear();
counter += 1;
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
DateFieldMapper.DateFieldType fieldType = new DateFieldMapper.DateFieldType(aggBuilder.field());
MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType("value_field", NumberFieldMapper.NumberType.LONG);
InternalDateHistogram histogram;
histogram = searchAndReduce(
indexReader,
new AggTestConfig(aggBuilder, fieldType, valueFieldType).withMaxBuckets(1000).withQuery(query)
);
verify.accept(histogram);
}
}
}
private static long asLong(String dateTime) {
return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
}
|
MovFnAggregatorTests
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/util/ClassUtilsTests.java
|
{
"start": 45130,
"end": 45335
}
|
class ____ extends MethodsInterfaceImplementation {
@Override
protected void protectedPrint() {
}
@Override
public void packageAccessiblePrint() {
}
}
private
|
SubMethodsInterfaceImplementation
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/asm/TestASM_Byte_0.java
|
{
"start": 431,
"end": 613
}
|
class ____ {
private Byte i = 12;
public Byte getI() {
return i;
}
public void setI(Byte i) {
this.i = i;
}
}
}
|
V0
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java
|
{
"start": 45678,
"end": 45985
}
|
class ____<SubPlan extends PhysicalPlan> extends Rule<SubPlan, PhysicalPlan> {
@Override
public final PhysicalPlan apply(PhysicalPlan plan) {
return plan.transformUp(typeToken(), this::rule);
}
protected abstract PhysicalPlan rule(SubPlan plan);
}
}
|
FoldingRule
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/TestUtil.java
|
{
"start": 2161,
"end": 2851
}
|
class ____ only classes in @resources).
*
* @param resources classes used in deployment as resources
*/
public static Archive<?> finishContainerPrepare(WebArchive war, Map<String, String> contextParams,
final Class<?>... resources) {
return finishContainerPrepare(war, contextParams, null, resources);
}
/**
* Finish preparing war deployment and deploy it.
*
* Add classes in @resources to deployment. Also, all subclasses of classes in @resources are added to deployment.
* But only classes in @resources (not subclasses of classes in @resources) can be used as resources
* (getClasses function of TestApplication
|
return
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetomany/Parent.java
|
{
"start": 467,
"end": 1237
}
|
class ____ implements Serializable {
@Id
public ParentPk id;
public int age;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "parent")
@BatchSize(size = 5)
@jakarta.persistence.OrderBy("favoriteSuperhero asc, favoriteSinger desc")
public Set<Child> children;
public int hashCode() {
//a NPE can occurs, but I don't expect hashcode to be used before pk is set
return id.hashCode();
}
public boolean equals(Object obj) {
//a NPE can occurs, but I don't expect equals to be used before pk is set
if ( obj instanceof Parent p ) {
return id.equals( p.id );
}
else {
return false;
}
}
public void addChild(Child child) {
if ( children == null ) {
children = new HashSet<>();
}
child.parent = this;
children.add( child );
}
}
|
Parent
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerTool.java
|
{
"start": 11876,
"end": 20656
}
|
class ____ {
/**
* Exit code to return if an exception was not raised.
*/
private int exitCode;
/**
* Text to include if raising an exception.
*/
private String exitText = "";
/**
* Count of all markers found.
*/
private int totalMarkerCount;
/**
* Count of all markers found after excluding
* any from a [-nonauth] qualification.
*/
private int filteredMarkerCount;
/**
* The tracker.
*/
private DirMarkerTracker tracker;
/**
* Scan summary.
*/
private MarkerPurgeSummary purgeSummary;
private ScanResult() {
}
@Override
public String toString() {
return "ScanResult{" +
"exitCode=" + exitCode +
", exitText=" + exitText +
", totalMarkerCount=" + totalMarkerCount +
", filteredMarkerCount=" + filteredMarkerCount +
", tracker=" + tracker +
", purgeSummary=" + purgeSummary +
'}';
}
/**
* @return Exit code to report.
*/
public int getExitCode() {
return exitCode;
}
/**
* @return Tracker which did the scan.
*/
public DirMarkerTracker getTracker() {
return tracker;
}
/**
* @return Summary of purge. Null if none took place.
*/
public MarkerPurgeSummary getPurgeSummary() {
return purgeSummary;
}
public int getTotalMarkerCount() {
return totalMarkerCount;
}
public int getFilteredMarkerCount() {
return filteredMarkerCount;
}
/**
* Throw an exception if the exit code is non-zero.
* @return 0 if everything is good.
* @throws ExitUtil.ExitException if code != 0
*/
public int finish() throws ExitUtil.ExitException {
if (exitCode != 0) {
throw new ExitUtil.ExitException(exitCode, exitText);
}
return 0;
}
}
/**
* Do the scan/purge.
* @param path path to scan.
* @param doPurge purge rather than just scan/audit?
* @param minMarkerCount min marker count (ignored on purge)
* @param maxMarkerCount max marker count (ignored on purge)
* @param limit limit of files to scan; 0 for 'unlimited'
* @return result.
* @throws IOException IO failure
* @throws ExitUtil.ExitException explicitly raised failure
*/
@Retries.RetryTranslated
private ScanResult scan(
final Path path,
final boolean doPurge,
final int minMarkerCount,
final int maxMarkerCount,
final int limit)
throws IOException, ExitUtil.ExitException {
// safety check: min and max are correctly ordered at this point.
Preconditions.checkArgument(minMarkerCount <= maxMarkerCount,
"The min marker count of %d is greater than the max value of %d",
minMarkerCount, maxMarkerCount);
ScanResult result = new ScanResult();
// Mission Accomplished
result.exitCode = EXIT_SUCCESS;
// Now do the work.
DirMarkerTracker tracker = new DirMarkerTracker(path, true);
result.tracker = tracker;
boolean completed;
try (DurationInfo ignored =
new DurationInfo(LOG, "marker scan %s", path)) {
completed = scanDirectoryTree(path, tracker, limit);
}
int objectsFound = tracker.getObjectsFound();
println(out, "Listed %d object%s under %s%n",
objectsFound,
suffix(objectsFound),
path);
// scan done. what have we got?
Map<Path, DirMarkerTracker.Marker> surplusMarkers
= tracker.getSurplusMarkers();
Map<Path, DirMarkerTracker.Marker> leafMarkers
= tracker.getLeafMarkers();
// determine marker count
int markerCount = surplusMarkers.size();
result.totalMarkerCount = markerCount;
result.filteredMarkerCount = markerCount;
if (markerCount == 0) {
println(out, "No surplus directory markers were found under %s", path);
} else {
println(out, "Found %d surplus directory marker%s under %s",
markerCount,
suffix(markerCount),
path);
for (Path markers : surplusMarkers.keySet()) {
println(out, " %s/", markers);
}
}
if (!leafMarkers.isEmpty()) {
println(out, "Found %d empty directory 'leaf' marker%s under %s",
leafMarkers.size(),
suffix(leafMarkers.size()),
path);
for (Path markers : leafMarkers.keySet()) {
println(out, " %s/", markers);
}
println(out, "These are required to indicate empty directories");
}
if (doPurge) {
// clean: remove the markers, do not worry about their
// presence when reporting success/failure
int deletePageSize = storeContext.getConfiguration()
.getInt(BULK_DELETE_PAGE_SIZE,
BULK_DELETE_PAGE_SIZE_DEFAULT);
result.purgeSummary = purgeMarkers(tracker, deletePageSize);
} else {
// this is an audit, so validate the marker count
if (markerCount < minMarkerCount || markerCount > maxMarkerCount) {
// failure
return failScan(result, EXIT_NOT_ACCEPTABLE,
"Marker count %d out of range "
+ "[%d - %d]",
markerCount, minMarkerCount, maxMarkerCount);
}
}
// now one little check for whether a limit was reached.
if (!completed) {
failScan(result, EXIT_INTERRUPTED,
"Listing limit (%d) reached before completing the scan", limit);
}
return result;
}
/**
* Fail the scan; print the formatted error and update the result.
* @param result result to update
* @param code Exit code
* @param message Error message
* @param args arguments for the error message
* @return scan result
*/
private ScanResult failScan(
ScanResult result,
int code,
String message,
Object...args) {
String text = String.format(message, args);
result.exitCode = code;
result.exitText = text;
return result;
}
/**
* Suffix for plurals.
* @param size size to generate a suffix for
* @return "" or "s", depending on size
*/
private String suffix(final int size) {
return size == 1 ? "" : "s";
}
/**
* Scan a directory tree.
* @param path path to scan
* @param tracker tracker to update
* @param limit limit of files to scan; -1 for 'unlimited'
* @return true if the scan completely scanned the entire tree
* @throws IOException IO failure
*/
@Retries.RetryTranslated
private boolean scanDirectoryTree(
final Path path,
final DirMarkerTracker tracker,
final int limit) throws IOException {
int count = 0;
boolean result = true;
// the path/key stuff loses any trailing / passed in.
// but this may actually be needed.
RemoteIterator<S3AFileStatus> listing = null;
String listkey = storeContext.pathToKey(path);
if (listkey.isEmpty()) {
// root. always give it a path to keep ranger happy.
listkey = "/";
}
try {
listing = operations.listObjects(path, listkey);
} catch (AWSBadRequestException e) {
// endpoint was unhappy. this is generally unrecoverable, but some
// third party stores do insist on a / here.
LOG.debug("Failed to list \"{}\"", listkey, e);
// now retry with a trailing / in case that works
if (listkey.endsWith("/")) {
// already has a trailing /, so fail
throw e;
}
// try again.
listing = operations.listObjects(path, listkey + "/");
}
while (listing.hasNext()) {
count++;
S3AFileStatus status = listing.next();
Path statusPath = status.getPath();
S3ALocatedFileStatus locatedStatus = new S3ALocatedFileStatus(
status, null);
String key = storeContext.pathToKey(statusPath);
if (status.isDirectory()) {
if (verbose) {
println(out, " Directory Marker %s/", key);
}
LOG.debug("{}", key);
tracker.markerFound(statusPath,
key + "/",
locatedStatus);
} else {
tracker.fileFound(statusPath,
key,
locatedStatus);
}
if ((count % 1000) == 0) {
println(out, "Scanned %,d objects", count);
}
if (limit > 0 && count >= limit) {
println(out, "Limit of scan reached - %,d object%s",
limit, suffix(limit));
result = false;
break;
}
}
LOG.debug("Listing summary {}", listing);
if (verbose) {
println(out, "%nListing statistics:%n %s%n",
ioStatisticsSourceToString(listing));
}
return result;
}
/**
* Result of a call of {@link #purgeMarkers(DirMarkerTracker, int)};
* included in {@link ScanResult} so must share visibility.
*/
public static final
|
ScanResult
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/ComponentContainer.java
|
{
"start": 1375,
"end": 1783
}
|
class ____ {
private int code;
private int plus4;
public Zip() {
}
public Zip(int code, int plus4) {
this.code = code;
this.plus4 = plus4;
}
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
public int getPlus4() {
return plus4;
}
public void setPlus4(int plus4) {
this.plus4 = plus4;
}
}
}
}
|
Zip
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/module/SimpleModuleTest.java
|
{
"start": 1559,
"end": 2143
}
|
class ____ extends ValueDeserializer<CustomBean>
{
@Override
public CustomBean deserialize(JsonParser p, DeserializationContext ctxt)
{
String text = p.getString();
int ix = text.indexOf('|');
if (ix < 0) {
throw new StreamReadException(p, "Failed to parse String value of \""+text+"\"");
}
String str = text.substring(0, ix);
int num = Integer.parseInt(text.substring(ix+1));
return new CustomBean(str, num);
}
}
static
|
CustomBeanDeserializer
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java
|
{
"start": 1029,
"end": 1137
}
|
class ____ represents filter to be applied based on existence of a
* value.
*/
@Private
@Unstable
public
|
which
|
java
|
quarkusio__quarkus
|
extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/graal/PropertiesFactorySubstitution.java
|
{
"start": 346,
"end": 523
}
|
class ____ {
@Substitute
public static Properties getDefaultProperties() {
return NarayanaJtaRecorder.getDefaultProperties();
}
}
|
PropertiesFactorySubstitution
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/mapping/Contributable.java
|
{
"start": 471,
"end": 590
}
|
interface ____ {
/**
* The name of the contributor which contributed this
*/
String getContributor();
}
|
Contributable
|
java
|
google__auto
|
factory/src/it/functional/src/main/java/com/google/auto/factory/DependencyImpl.java
|
{
"start": 660,
"end": 739
}
|
class ____ implements Dependency {
@Inject
DependencyImpl() {}
}
|
DependencyImpl
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/fields/RecursiveComparisonAssert_isEqualTo_ignoringArrayOrder_Test.java
|
{
"start": 12120,
"end": 14117
}
|
class ____ {
public int val;
public Inner(int val) {
this.val = val;
}
public String toString() {
return "I" + val;
}
}
@Test
public void should_fix_3598() {
// GIVEN
Inner i1 = new Inner(1);
Inner i2 = new Inner(2);
Inner i3 = new Inner(3);
Outer o1A = new Outer(i1);
Outer o2A = new Outer(i2);
Outer o3A = new Outer(i3);
Outer o1B = new Outer(i1);
Outer o2B = new Outer(i2);
Outer o3B = new Outer(i3);
Outer[] arrayA = array(o1A, o2A, o3A);
Outer[] arrayB = array(o2B, o1B, o3B);
Outer[] arrayACopy = array(o1A, o2A, o3A);
Outer[] arrayBCopy = array(o2B, o1B, o3B);
WithObject actual = new WithObject(array(arrayA, arrayACopy));
WithObject expected = new WithObject(array(arrayB, arrayBCopy));
// WHEN/THEN
then(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.ignoringArrayOrder()
.isEqualTo(expected);
}
record Item(String name, int quantity) {
}
@Test
void should_honor_representation_in_unmatched_elements_when_comparing_iterables_ignoring_order() {
// GIVEN
WithObject actual = new WithObject(array(new Item("Pants", 3), new Item("Loafers", 1)));
WithObject expected = new WithObject(array(new Item("Shoes", 2), new Item("Pants", 3)));
registerFormatterForType(Item.class, item -> "Item(%s, %d)".formatted(item.name(), item.quantity()));
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).usingRecursiveComparison(recursiveComparisonConfiguration)
.ignoringArrayOrder()
.isEqualTo(expected));
// THEN
then(assertionError).hasMessageContaining(format("The following expected elements were not matched in the actual ArrayList:%n"
+ " [Item(Shoes, 2)]"));
}
}
|
Inner
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForFloat.java
|
{
"start": 653,
"end": 2238
}
|
class ____ implements KeyExtractor {
static KeyExtractorForFloat extractorFor(TopNEncoder encoder, boolean ascending, byte nul, byte nonNul, FloatBlock block) {
FloatVector v = block.asVector();
if (v != null) {
return new KeyExtractorForFloat.FromVector(encoder, nul, nonNul, v);
}
if (ascending) {
return block.mvSortedAscending()
? new KeyExtractorForFloat.MinFromAscendingBlock(encoder, nul, nonNul, block)
: new KeyExtractorForFloat.MinFromUnorderedBlock(encoder, nul, nonNul, block);
}
return block.mvSortedAscending()
? new KeyExtractorForFloat.MaxFromAscendingBlock(encoder, nul, nonNul, block)
: new KeyExtractorForFloat.MaxFromUnorderedBlock(encoder, nul, nonNul, block);
}
private final byte nul;
private final byte nonNul;
KeyExtractorForFloat(TopNEncoder encoder, byte nul, byte nonNul) {
assert encoder == TopNEncoder.DEFAULT_SORTABLE;
this.nul = nul;
this.nonNul = nonNul;
}
protected final int nonNul(BreakingBytesRefBuilder key, float value) {
key.append(nonNul);
TopNEncoder.DEFAULT_SORTABLE.encodeFloat(value, key);
return Float.BYTES + 1;
}
protected final int nul(BreakingBytesRefBuilder key) {
key.append(nul);
return 1;
}
@Override
public final String toString() {
return String.format(Locale.ROOT, "KeyExtractorForFloat%s(%s, %s)", getClass().getSimpleName(), nul, nonNul);
}
static
|
KeyExtractorForFloat
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/TimeoutMap.java
|
{
"start": 2816,
"end": 2852
}
|
interface ____<K, V> {
|
Listener
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/joda/JodaTest_6_Duration.java
|
{
"start": 583,
"end": 639
}
|
class ____ {
public Duration duration;
}
}
|
Model
|
java
|
apache__logging-log4j2
|
log4j-layout-template-json/src/main/java/org/apache/logging/log4j/layout/template/json/resolver/SourceResolver.java
|
{
"start": 1774,
"end": 5385
}
|
class ____ implements EventResolver {
private static final EventResolver NULL_RESOLVER =
(final LogEvent value, final JsonWriter jsonWriter) -> jsonWriter.writeNull();
private static final EventResolver CLASS_NAME_RESOLVER = (final LogEvent logEvent, final JsonWriter jsonWriter) -> {
final StackTraceElement logEventSource = logEvent.getSource();
if (logEventSource == null) {
jsonWriter.writeNull();
} else {
final String sourceClassName = logEventSource.getClassName();
jsonWriter.writeString(sourceClassName);
}
};
private static final EventResolver FILE_NAME_RESOLVER = (final LogEvent logEvent, final JsonWriter jsonWriter) -> {
final StackTraceElement logEventSource = logEvent.getSource();
if (logEventSource == null) {
jsonWriter.writeNull();
} else {
final String sourceFileName = logEventSource.getFileName();
jsonWriter.writeString(sourceFileName);
}
};
private static final EventResolver LINE_NUMBER_RESOLVER =
(final LogEvent logEvent, final JsonWriter jsonWriter) -> {
final StackTraceElement logEventSource = logEvent.getSource();
if (logEventSource == null) {
jsonWriter.writeNull();
} else {
final int sourceLineNumber = logEventSource.getLineNumber();
jsonWriter.writeNumber(sourceLineNumber);
}
};
private static final EventResolver METHOD_NAME_RESOLVER =
(final LogEvent logEvent, final JsonWriter jsonWriter) -> {
final StackTraceElement logEventSource = logEvent.getSource();
if (logEventSource == null) {
jsonWriter.writeNull();
} else {
final String sourceMethodName = logEventSource.getMethodName();
jsonWriter.writeString(sourceMethodName);
}
};
private final boolean locationInfoEnabled;
private final EventResolver internalResolver;
SourceResolver(final EventResolverContext context, final TemplateResolverConfig config) {
this.locationInfoEnabled = context.isLocationInfoEnabled();
this.internalResolver = createInternalResolver(context, config);
}
private static EventResolver createInternalResolver(
final EventResolverContext context, final TemplateResolverConfig config) {
if (!context.isLocationInfoEnabled()) {
return NULL_RESOLVER;
}
final String fieldName = config.getString("field");
if ("className".equals(fieldName)) {
return CLASS_NAME_RESOLVER;
} else if ("fileName".equals(fieldName)) {
return FILE_NAME_RESOLVER;
} else if ("lineNumber".equals(fieldName)) {
return LINE_NUMBER_RESOLVER;
} else if ("methodName".equals(fieldName)) {
return METHOD_NAME_RESOLVER;
}
throw new IllegalArgumentException("unknown field: " + config);
}
static String getName() {
return "source";
}
@Override
public boolean isResolvable() {
return locationInfoEnabled;
}
@Override
public boolean isResolvable(final LogEvent logEvent) {
return locationInfoEnabled && logEvent.getSource() != null;
}
@Override
public void resolve(final LogEvent logEvent, final JsonWriter jsonWriter) {
internalResolver.resolve(logEvent, jsonWriter);
}
}
|
SourceResolver
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/attributebinder/typebinder/ResultCheckBinderTest.java
|
{
"start": 487,
"end": 876
}
|
class ____ {
@Test void test(SessionFactoryScope scope) {
Entity entity = new Entity();
scope.inStatelessTransaction(s -> s.insert(entity) );
scope.inStatelessTransaction(s -> s.delete(entity) );
scope.inStatelessTransaction( s -> s.update(entity) );
scope.inStatelessTransaction(s -> s.delete(entity) );
}
@NoResultCheck
@jakarta.persistence.Entity
static
|
ResultCheckBinderTest
|
java
|
apache__spark
|
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
|
{
"start": 12765,
"end": 16084
}
|
class ____ implements MetricSet {
private final Map<String, Metric> allMetrics;
// Time latency for open block request in ms
private final Timer openBlockRequestLatencyMillis =
new TimerWithCustomTimeUnit(TimeUnit.MILLISECONDS);
// Time latency for executor registration latency in ms
private final Timer registerExecutorRequestLatencyMillis =
new TimerWithCustomTimeUnit(TimeUnit.MILLISECONDS);
// Time latency for processing fetch merged blocks meta request latency in ms
private final Timer fetchMergedBlocksMetaLatencyMillis =
new TimerWithCustomTimeUnit(TimeUnit.MILLISECONDS);
// Time latency for processing finalize shuffle merge request latency in ms
private final Timer finalizeShuffleMergeLatencyMillis =
new TimerWithCustomTimeUnit(TimeUnit.MILLISECONDS);
// Block transfer rate in blocks per second
private final Meter blockTransferRate = new Meter();
// Block fetch message rate per second. When using non-batch fetches
// (`OpenBlocks` or `FetchShuffleBlocks` with `batchFetchEnabled` as false), this will be the
// same as the `blockTransferRate`. When batch fetches are enabled, this will represent the
// number of batch fetches, and `blockTransferRate` will represent the number of blocks
// returned by the fetches.
private final Meter blockTransferMessageRate = new Meter();
// Block transfer rate in byte per second
private final Meter blockTransferRateBytes = new Meter();
// Number of active connections to the shuffle service
private Counter activeConnections = new Counter();
// Number of exceptions caught in connections to the shuffle service
private Counter caughtExceptions = new Counter();
public ShuffleMetrics() {
allMetrics = new HashMap<>();
allMetrics.put("openBlockRequestLatencyMillis", openBlockRequestLatencyMillis);
allMetrics.put("registerExecutorRequestLatencyMillis", registerExecutorRequestLatencyMillis);
allMetrics.put("fetchMergedBlocksMetaLatencyMillis", fetchMergedBlocksMetaLatencyMillis);
allMetrics.put("finalizeShuffleMergeLatencyMillis", finalizeShuffleMergeLatencyMillis);
allMetrics.put("blockTransferRate", blockTransferRate);
allMetrics.put("blockTransferMessageRate", blockTransferMessageRate);
allMetrics.put("blockTransferRateBytes", blockTransferRateBytes);
allMetrics.put("blockTransferAvgSize_1min", new RatioGauge() {
@Override
protected Ratio getRatio() {
return Ratio.of(
blockTransferRateBytes.getOneMinuteRate(),
// use blockTransferMessageRate here instead of blockTransferRate to represent the
// average size of the disk read / network message which has more operational impact
// than the actual size of the block
blockTransferMessageRate.getOneMinuteRate());
}
});
allMetrics.put("registeredExecutorsSize",
(Gauge<Integer>) () -> blockManager.getRegisteredExecutorsSize());
allMetrics.put("numActiveConnections", activeConnections);
allMetrics.put("numCaughtExceptions", caughtExceptions);
}
@Override
public Map<String, Metric> getMetrics() {
return allMetrics;
}
}
private
|
ShuffleMetrics
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/operators/windowing/triggers/AsyncProcessingTimeTrigger.java
|
{
"start": 1430,
"end": 3497
}
|
class ____ extends AsyncTrigger<Object, TimeWindow> {
private static final long serialVersionUID = 1L;
private AsyncProcessingTimeTrigger() {}
@Override
public StateFuture<TriggerResult> onElement(
Object element, long timestamp, TimeWindow window, TriggerContext ctx) {
ctx.registerProcessingTimeTimer(window.maxTimestamp());
return StateFutureUtils.completedFuture(TriggerResult.CONTINUE);
}
@Override
public StateFuture<TriggerResult> onEventTime(long time, TimeWindow window, TriggerContext ctx)
throws Exception {
return StateFutureUtils.completedFuture(TriggerResult.CONTINUE);
}
@Override
public StateFuture<TriggerResult> onProcessingTime(
long time, TimeWindow window, TriggerContext ctx) {
return StateFutureUtils.completedFuture(TriggerResult.FIRE);
}
@Override
public StateFuture<Void> clear(TimeWindow window, TriggerContext ctx) throws Exception {
ctx.deleteProcessingTimeTimer(window.maxTimestamp());
return StateFutureUtils.completedVoidFuture();
}
@Override
public boolean canMerge() {
return true;
}
@Override
public void onMerge(TimeWindow window, OnMergeContext ctx) {
// only register a timer if the time is not yet past the end of the merged window
// this is in line with the logic in onElement(). If the time is past the end of
// the window onElement() will fire and setting a timer here would fire the window twice.
long windowMaxTimestamp = window.maxTimestamp();
if (windowMaxTimestamp > ctx.getCurrentProcessingTime()) {
ctx.registerProcessingTimeTimer(windowMaxTimestamp);
}
}
@Override
public String toString() {
return "ProcessingTimeTrigger()";
}
/** Creates a new trigger that fires once system time passes the end of the window. */
public static AsyncProcessingTimeTrigger create() {
return new AsyncProcessingTimeTrigger();
}
}
|
AsyncProcessingTimeTrigger
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/MyOtherTestResource.java
|
{
"start": 348,
"end": 680
}
|
class ____ {
@GET
@Path("{id}")
@Produces(MediaType.APPLICATION_JSON)
public MyOtherEntity get(@PathParam long id) {
MyOtherEntity ret = MyOtherEntity.findById(id);
if (ret == null)
throw new WebApplicationException(Response.Status.NOT_FOUND);
return ret;
}
}
|
MyOtherTestResource
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/athena/Athena.java
|
{
"start": 129,
"end": 221
}
|
class ____ {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.athena);
}
|
Athena
|
java
|
elastic__elasticsearch
|
libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/ExternalAccess.java
|
{
"start": 610,
"end": 2220
}
|
enum ____ {
PUBLIC_CLASS,
PUBLIC_METHOD,
PROTECTED_METHOD;
private static final String DELIMITER = ":";
public static String toString(EnumSet<ExternalAccess> externalAccesses) {
return externalAccesses.stream().map(Enum::toString).collect(Collectors.joining(DELIMITER));
}
public static EnumSet<ExternalAccess> fromPermissions(boolean publicAccessible, boolean publicMethod, boolean protectedMethod) {
if (publicMethod && protectedMethod) {
throw new IllegalArgumentException();
}
EnumSet<ExternalAccess> externalAccesses = EnumSet.noneOf(ExternalAccess.class);
if (publicMethod) {
externalAccesses.add(ExternalAccess.PUBLIC_METHOD);
} else if (protectedMethod) {
externalAccesses.add(ExternalAccess.PROTECTED_METHOD);
}
if (publicAccessible) {
externalAccesses.add(ExternalAccess.PUBLIC_CLASS);
}
return externalAccesses;
}
public static boolean isExternallyAccessible(EnumSet<ExternalAccess> access) {
return access.contains(ExternalAccess.PUBLIC_CLASS)
&& (access.contains(ExternalAccess.PUBLIC_METHOD) || access.contains(ExternalAccess.PROTECTED_METHOD));
}
public static EnumSet<ExternalAccess> fromString(String accessAsString) {
if ("PUBLIC".equals(accessAsString)) {
return EnumSet.of(ExternalAccess.PUBLIC_CLASS, ExternalAccess.PUBLIC_METHOD);
}
// used by JDK public API extractor (only), describing protected method access
// in this case public
|
ExternalAccess
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_description_Test.java
|
{
"start": 1392,
"end": 2474
}
|
class ____ {
@Test
void should_return_description_for_nested_conditions() {
// GIVEN
Condition<Customer> condition = customer(name(first("John")),
address(firstLine("11, Downing Street"),
postcode("SW1A 2AA")));
// THEN
then(condition.description().value()).isEqualTo(format("customer:[%n" +
" name:[%n" +
" first: John%n" +
" ],%n" +
" address:[%n" +
" first line: 11, Downing Street,%n" +
" postcode: SW1A 2AA%n" +
" ]%n" +
"]"));
}
}
|
NestableCondition_description_Test
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/service/result/ResultFetcherTest.java
|
{
"start": 3053,
"end": 21169
}
|
class ____ {
private static ResolvedSchema schema;
private static List<RowData> data;
@RegisterExtension
private static final TestExecutorExtension<ExecutorService> EXECUTOR_EXTENSION =
new TestExecutorExtension<>(
() ->
Executors.newCachedThreadPool(
new ExecutorThreadFactory(
"Result Fetcher Test Pool",
IgnoreExceptionHandler.INSTANCE)));
@BeforeAll
static void setUp() {
schema =
ResolvedSchema.of(
Column.physical("boolean", DataTypes.BOOLEAN()),
Column.physical("int", DataTypes.INT()),
Column.physical("bigint", DataTypes.BIGINT()),
Column.physical("varchar", DataTypes.STRING()),
Column.physical("decimal(10, 5)", DataTypes.DECIMAL(10, 5)),
Column.physical(
"timestamp", DataTypes.TIMESTAMP(6).bridgedTo(Timestamp.class)),
Column.physical("binary", DataTypes.BYTES()));
data =
Arrays.asList(
GenericRowData.ofKind(
RowKind.INSERT,
null,
1,
2L,
"abc",
BigDecimal.valueOf(1.23),
Timestamp.valueOf("2020-03-01 18:39:14"),
new byte[] {50, 51, 52, -123, 54, 93, 115, 126}),
GenericRowData.ofKind(
RowKind.UPDATE_BEFORE,
false,
null,
0L,
"",
BigDecimal.valueOf(1),
Timestamp.valueOf("2020-03-01 18:39:14.1"),
new byte[] {100, -98, 32, 121, -125}),
GenericRowData.ofKind(
RowKind.UPDATE_AFTER,
true,
Integer.MAX_VALUE,
null,
"abcdefg",
BigDecimal.valueOf(12345),
Timestamp.valueOf("2020-03-01 18:39:14.12"),
new byte[] {-110, -23, 1, 2}),
GenericRowData.ofKind(
RowKind.DELETE,
false,
Integer.MIN_VALUE,
Long.MAX_VALUE,
null,
BigDecimal.valueOf(12345.06789),
Timestamp.valueOf("2020-03-01 18:39:14.123"),
new byte[] {50, 51, 52, -123, 54, 93, 115, 126}),
GenericRowData.ofKind(
RowKind.INSERT,
true,
100,
Long.MIN_VALUE,
"abcdefg111",
null,
Timestamp.valueOf("2020-03-01 18:39:14.123456"),
new byte[] {110, 23, -1, -2}),
GenericRowData.ofKind(
RowKind.DELETE,
null,
-1,
-1L,
"abcdefghijklmnopqrstuvwxyz",
BigDecimal.valueOf(-12345.06789),
null,
null),
GenericRowData.ofKind(
RowKind.INSERT,
null,
-1,
-1L,
"这是一段中文",
BigDecimal.valueOf(-12345.06789),
Timestamp.valueOf("2020-03-04 18:39:14"),
new byte[] {-3, -2, -1, 0, 1, 2, 3}),
GenericRowData.ofKind(
RowKind.DELETE,
null,
-1,
-1L,
"これは日本語をテストするための文です",
BigDecimal.valueOf(-12345.06789),
Timestamp.valueOf("2020-03-04 18:39:14"),
new byte[] {-3, -2, -1, 0, 1, 2, 3}));
}
@Test
void testFetchResultsMultipleTimesWithLimitedBufferSize() {
int bufferSize = data.size() / 2;
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), bufferSize);
int fetchSize = data.size();
runFetchMultipleTimes(
bufferSize, fetchSize, token -> fetcher.fetchResults(token, fetchSize));
}
@Test
void testFetchResultsMultipleTimesWithLimitedFetchSize() {
int bufferSize = data.size();
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), bufferSize);
int fetchSize = data.size() / 2;
runFetchMultipleTimes(
bufferSize, fetchSize, token -> fetcher.fetchResults(token, fetchSize));
}
@Test
void testFetchResultsInWithLimitedBufferSizeInOrientation() {
int bufferSize = data.size() / 2;
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), bufferSize);
int fetchSize = data.size();
runFetchMultipleTimes(
bufferSize,
fetchSize,
token -> fetcher.fetchResults(FetchOrientation.FETCH_NEXT, fetchSize));
}
@Test
void testFetchResultsMultipleTimesWithLimitedFetchSizeInOrientation() {
int bufferSize = data.size();
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), bufferSize);
int fetchSize = data.size() / 2;
runFetchMultipleTimes(
bufferSize,
fetchSize,
token -> fetcher.fetchResults(FetchOrientation.FETCH_NEXT, fetchSize));
}
@Test
void testFetchResultInParallel() throws Exception {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size() / 2);
CommonTestUtils.waitUtil(
() -> fetcher.getResultStore().getBufferedRecordSize() > 0,
Duration.ofSeconds(10),
"Failed to wait the buffer has data.");
checkFetchResultInParallel(fetcher);
}
@Test
void testFetchResultInOrientationInParallel() throws Exception {
List<Iterator<RowData>> dataSuppliers =
data.stream()
.map(
row ->
new TestIterator(
() -> {
try {
Thread.sleep(1);
return row;
} catch (Exception e) {
throw new SqlExecutionException(
"Failed to return the row.", e);
}
}))
.collect(Collectors.toList());
int fetchThreadNum = 100;
CountDownLatch latch = new CountDownLatch(fetchThreadNum);
ResultFetcher fetcher = buildResultFetcher(dataSuppliers, 1);
Map<Long, List<RowData>> rows = new ConcurrentHashMap<>();
AtomicReference<Boolean> payloadHasData = new AtomicReference<>(true);
for (int i = 0; i < fetchThreadNum; i++) {
EXECUTOR_EXTENSION
.getExecutor()
.submit(
() -> {
ResultSet resultSet =
fetcher.fetchResults(FetchOrientation.FETCH_NEXT, 1);
if (resultSet.getResultType().equals(ResultSet.ResultType.PAYLOAD)
&& resultSet.getData().isEmpty()) {
payloadHasData.set(false);
}
rows.compute(
Thread.currentThread().getId(),
(k, v) -> {
if (v == null) {
return resultSet.getData();
} else {
v.addAll(resultSet.getData());
return v;
}
});
latch.countDown();
});
}
latch.await();
assertEquals(true, payloadHasData.get());
assertEquals(
new HashSet<>(data),
rows.values().stream().flatMap(List::stream).collect(Collectors.toSet()));
}
@Test
void testFetchResultFromDummyStoreInParallel() throws Exception {
checkFetchResultInParallel(
ResultFetcher.fromResults(OperationHandle.create(), schema, data));
}
@Test
void testFetchResultAfterClose() throws Exception {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size() + 1);
List<RowData> actual = Collections.emptyList();
long token = 0L;
while (actual.size() < 1) {
// fill the fetcher buffer
ResultSet resultSet = fetcher.fetchResults(token, 1);
token = checkNotNull(resultSet.getNextToken());
actual = resultSet.getData();
}
assertEquals(data.subList(0, 1), actual);
fetcher.close();
long testToken = token;
AtomicReference<Boolean> meetEnd = new AtomicReference<>(false);
EXECUTOR_EXTENSION
.getExecutor()
.submit(
() -> {
// Should meet EOS in the end.
long nextToken = testToken;
while (true) {
ResultSet resultSet =
fetcher.fetchResults(nextToken, Integer.MAX_VALUE);
if (resultSet.getResultType() == ResultSet.ResultType.EOS) {
break;
}
nextToken = checkNotNull(resultSet.getNextToken());
}
meetEnd.set(true);
});
CommonTestUtils.waitUtil(
meetEnd::get,
Duration.ofSeconds(10),
"Should get EOS when fetch results from the closed fetcher.");
}
@Test
void testFetchResultWithToken() {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size());
Long nextToken = 0L;
List<RowData> actual = new ArrayList<>();
ResultSet resultSetBefore = null;
while (nextToken != null) {
if (resultSetBefore != null) {
assertEquals(resultSetBefore, fetcher.fetchResults(nextToken - 1, data.size()));
}
ResultSet resultSet = fetcher.fetchResults(nextToken, data.size());
ResultSet resultSetWithSameToken = fetcher.fetchResults(nextToken, data.size());
assertEquals(resultSet, resultSetWithSameToken);
if (resultSet.getResultType() == ResultSet.ResultType.EOS) {
break;
}
resultSetBefore = resultSet;
actual.addAll(checkNotNull(resultSet.getData()));
nextToken = resultSet.getNextToken();
}
assertEquals(data, actual);
}
// --------------------------------------------------------------------------------------------
// Negative cases
// --------------------------------------------------------------------------------------------
@Test
void testFetchFailedResult() {
String message = "Artificial Exception";
ResultFetcher fetcher =
buildResultFetcher(
Arrays.asList(TestIterator.createErrorIterator(message), data.iterator()),
data.size());
assertThatThrownBy(
() -> {
Long token = 0L;
while (token != null) {
// Use loop to fetch results from the ErrorIterator
token =
fetcher.fetchResults(token, Integer.MAX_VALUE)
.getNextToken();
}
})
.satisfies(FlinkAssertions.anyCauseMatches(message));
}
@Test
void testFetchIllegalToken() {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size());
assertThatThrownBy(() -> fetcher.fetchResults(2, Integer.MAX_VALUE))
.satisfies(FlinkAssertions.anyCauseMatches("Expecting token to be 0, but found 2"));
}
@Test
void testFetchBeforeWithDifferentSize() throws Exception {
ResultFetcher fetcher =
buildResultFetcher(Collections.singletonList(data.iterator()), data.size() / 2);
CommonTestUtils.waitUtil(
() -> fetcher.getResultStore().getBufferedRecordSize() > 1,
Duration.ofSeconds(10),
"Failed to make cached records num larger than 1.");
ResultSet firstFetch = fetcher.fetchResults(0, Integer.MAX_VALUE);
int firstFetchSize = firstFetch.getData().size();
assertThatThrownBy(() -> fetcher.fetchResults(0, firstFetchSize - 1))
.satisfies(
FlinkAssertions.anyCauseMatches(
String.format(
"As the same token is provided, fetch size must be not less than the previous returned buffer size."
+ " Previous returned result size is %s, current max_fetch_size to be %s.",
firstFetch.getData().size(), firstFetchSize - 1)));
}
// --------------------------------------------------------------------------------------------
@SuppressWarnings("unchecked")
private ResultFetcher buildResultFetcher(List<Iterator<RowData>> rows, int bufferSize) {
OperationHandle operationHandle = OperationHandle.create();
return new ResultFetcher(
operationHandle,
schema,
CloseableIterator.adapterForIterator(new IteratorChain(rows)),
StaticResultProvider.SIMPLE_ROW_DATA_TO_STRING_CONVERTER,
false,
null,
ResultKind.SUCCESS_WITH_CONTENT,
bufferSize,
TableResultUtils.buildPrintStyle(
schema, StaticResultProvider.SIMPLE_ROW_DATA_TO_STRING_CONVERTER));
}
private void runFetchMultipleTimes(
int bufferSize, int fetchSize, Function<Long, ResultSet> fetchResults) {
List<RowData> fetchedRows = new ArrayList<>();
ResultSet currentResult;
Long token = 0L;
do {
currentResult = fetchResults.apply(token);
assertTrue(
checkNotNull(currentResult.getData()).size()
<= Math.min(bufferSize, fetchSize));
token = currentResult.getNextToken();
fetchedRows.addAll(currentResult.getData());
} while (currentResult.getResultType() != ResultSet.ResultType.EOS);
assertEquals(ResultSet.ResultType.EOS, checkNotNull(currentResult).getResultType());
assertEquals(data, fetchedRows);
}
private void checkFetchResultInParallel(ResultFetcher fetcher) throws Exception {
AtomicReference<Boolean> isEqual = new AtomicReference<>(true);
int fetchThreadNum = 100;
CountDownLatch latch = new CountDownLatch(fetchThreadNum);
List<RowData> firstFetch = fetcher.fetchResults(0, Integer.MAX_VALUE).getData();
for (int i = 0; i < fetchThreadNum; i++) {
EXECUTOR_EXTENSION
.getExecutor()
.submit(
() -> {
ResultSet resultSet = fetcher.fetchResults(0, Integer.MAX_VALUE);
if (!firstFetch.equals(resultSet.getData())) {
isEqual.set(false);
}
latch.countDown();
});
}
latch.await();
assertEquals(true, isEqual.get());
}
// --------------------------------------------------------------------------------------------
private static
|
ResultFetcherTest
|
java
|
reactor__reactor-core
|
reactor-core/src/jcstress/java/reactor/core/scheduler/BasicSchedulersStressTest.java
|
{
"start": 5166,
"end": 6715
}
|
class ____ {
private final CountDownLatch latch = new CountDownLatch(2);
private final ParallelScheduler scheduler =
new ParallelScheduler(2, Thread::new);
{
scheduler.init();
}
@Actor
public void disposeGracefully1(IIZ_Result r) {
scheduler.disposeGracefully().doFinally(sig -> latch.countDown()).subscribe();
r.r1 = scheduler.state.initialResource.hashCode();
}
@Actor
public void disposeGracefully2(IIZ_Result r) {
scheduler.disposeGracefully().doFinally(sig -> latch.countDown()).subscribe();
r.r2 = scheduler.state.initialResource.hashCode();
}
@Arbiter
public void arbiter(IIZ_Result r) {
try {
latch.await(5, TimeUnit.SECONDS);
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
// Validate both disposals left the Scheduler in consistent state,
// assuming the await process coordinates on the resources as identified
// by r.r1 and r.r2, which should be equal.
boolean consistentState = r.r1 == r.r2;
r.r3 = consistentState && scheduler.isDisposed();
if (consistentState) {
//when that condition is true, we erase the r1/r2 state. that should greatly limit
//the output of "interesting acceptable state" in the dump should and error occur
r.r1 = r.r2 = 0;
}
}
}
@JCStressTest
@Outcome(id = {".*, true"}, expect = Expect.ACCEPTABLE,
desc = "Scheduler in consistent state upon concurrent dispose and " +
"disposeGracefully, eventually disposed.")
@State
public static
|
ParallelSchedulerDisposeGracefullyStressTest
|
java
|
google__guava
|
guava/src/com/google/common/io/MoreFiles.java
|
{
"start": 2528,
"end": 2789
}
|
class ____ a sibling method from {@code Files} appears to be missing from this class.
*
* @since 21.0 (but only since 33.4.0 in the Android flavor)
* @author Colin Decker
*/
@J2ktIncompatible
@GwtIncompatible
@J2ObjCIncompatible // java.nio.file
public final
|
if
|
java
|
apache__camel
|
components/camel-ehcache/src/test/java/org/apache/camel/component/ehcache/EhcacheSpringConfigurationTest.java
|
{
"start": 1507,
"end": 3511
}
|
class ____ extends CamelSpringTestSupport {
@EndpointInject("ehcache://myProgrammaticCacheConf?configuration=#myProgrammaticConfiguration")
private EhcacheEndpoint ehcacheConf;
@EndpointInject("ehcache://myFileCacheConf?keyType=java.lang.String&valueType=java.lang.String&configurationUri=classpath:ehcache/ehcache-file-config.xml")
private EhcacheEndpoint ehcacheFileConf;
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/ehcache/EhcacheSpringConfigurationTest.xml");
}
// *****************************
// Test
// *****************************
@Test
void testProgrammaticConfiguration() throws Exception {
Cache<String, String> cache = getCache(ehcacheConf, "myProgrammaticCacheConf");
ResourcePools pools = cache.getRuntimeConfiguration().getResourcePools();
SizedResourcePool h = pools.getPoolForResource(ResourceType.Core.HEAP);
assertNotNull(h);
assertEquals(100, h.getSize());
assertEquals(EntryUnit.ENTRIES, h.getUnit());
SizedResourcePool o = pools.getPoolForResource(ResourceType.Core.OFFHEAP);
assertNotNull(o);
assertEquals(1, o.getSize());
assertEquals(MemoryUnit.MB, o.getUnit());
}
@Test
void testFileConfiguration() throws Exception {
Cache<String, String> cache = getCache(ehcacheFileConf, "myFileCacheConf");
ResourcePools pools = cache.getRuntimeConfiguration().getResourcePools();
SizedResourcePool h = pools.getPoolForResource(ResourceType.Core.HEAP);
assertNotNull(h);
assertEquals(150, h.getSize());
assertEquals(EntryUnit.ENTRIES, h.getUnit());
}
protected Cache<String, String> getCache(EhcacheEndpoint endpoint, String cacheName) throws Exception {
return endpoint.getManager().getCache(cacheName, String.class, String.class);
}
}
|
EhcacheSpringConfigurationTest
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/graph/StandardImmutableUndirectedGraphTest.java
|
{
"start": 1000,
"end": 1903
}
|
class ____
extends AbstractStandardUndirectedGraphTest {
@Parameters(name = "allowsSelfLoops={0}")
public static Collection<Object[]> parameters() {
return Arrays.asList(new Object[][] {{false}, {true}});
}
private final boolean allowsSelfLoops;
private ImmutableGraph.Builder<Integer> graphBuilder;
public StandardImmutableUndirectedGraphTest(boolean allowsSelfLoops) {
this.allowsSelfLoops = allowsSelfLoops;
}
@Override
public Graph<Integer> createGraph() {
graphBuilder = GraphBuilder.undirected().allowsSelfLoops(allowsSelfLoops).immutable();
return graphBuilder.build();
}
@Override
final void addNode(Integer n) {
graphBuilder.addNode(n);
graph = graphBuilder.build();
}
@Override
final void putEdge(Integer n1, Integer n2) {
graphBuilder.putEdge(n1, n2);
graph = graphBuilder.build();
}
}
|
StandardImmutableUndirectedGraphTest
|
java
|
netty__netty
|
microbench/src/main/java/io/netty/microbench/util/AbstractSharedExecutorMicrobenchmark.java
|
{
"start": 1307,
"end": 2608
}
|
class ____ extends AbstractMicrobenchmarkBase {
protected static final int DEFAULT_FORKS = 1;
protected static final String[] JVM_ARGS;
static {
final String[] customArgs = {
"-Xms2g", "-Xmx2g", "-XX:MaxDirectMemorySize=2g", "-Djmh.executor=CUSTOM",
"-Djmh.executor.class=io.netty.microbench.util.AbstractSharedExecutorMicrobenchmark$DelegateHarnessExecutor" };
JVM_ARGS = new String[BASE_JVM_ARGS.length + customArgs.length];
System.arraycopy(BASE_JVM_ARGS, 0, JVM_ARGS, 0, BASE_JVM_ARGS.length);
System.arraycopy(customArgs, 0, JVM_ARGS, BASE_JVM_ARGS.length, customArgs.length);
}
/**
* Set the executor (in the form of an {@link EventLoop}) which JMH will use.
* <p>
* This must be called before JMH requires an executor to execute objects.
* @param eventLoop Used as an executor by JMH to run benchmarks.
*/
public static void executor(EventLoop eventLoop) {
DelegateHarnessExecutor.executor(eventLoop);
}
/**
* This executor allows Netty and JMH to share a common executor.
* This is achieved by using {@link DelegateHarnessExecutor#executor(EventLoop)}
* with the {@link EventLoop} used by Netty.
*/
public static final
|
AbstractSharedExecutorMicrobenchmark
|
java
|
apache__spark
|
core/src/main/java/org/apache/spark/unsafe/map/BytesToBytesMap.java
|
{
"start": 9048,
"end": 16210
}
|
class ____ implements Iterator<Location> {
private int numRecords;
private final Location loc;
private MemoryBlock currentPage = null;
private int recordsInPage = 0;
private Object pageBaseObject;
private long offsetInPage;
// If this iterator destructive or not. When it is true, it frees each page as it moves onto
// next one.
private boolean destructive = false;
private UnsafeSorterSpillReader reader = null;
private MapIterator(int numRecords, Location loc, boolean destructive) {
this.numRecords = numRecords;
this.loc = loc;
this.destructive = destructive;
if (destructive) {
destructiveIterator = this;
// longArray will not be used anymore if destructive is true, release it now.
if (longArray != null) {
freeArray(longArray);
longArray = null;
}
}
}
private void advanceToNextPage() {
// SPARK-26265: We will first lock this `MapIterator` and then `TaskMemoryManager` when going
// to free a memory page by calling `freePage`. At the same time, it is possibly that another
// memory consumer first locks `TaskMemoryManager` and then this `MapIterator` when it
// acquires memory and causes spilling on this `MapIterator`. To avoid deadlock here, we keep
// reference to the page to free and free it after releasing the lock of `MapIterator`.
MemoryBlock pageToFree = null;
try {
synchronized (this) {
int nextIdx = dataPages.indexOf(currentPage) + 1;
if (destructive && currentPage != null) {
dataPages.remove(currentPage);
pageToFree = currentPage;
nextIdx--;
}
if (dataPages.size() > nextIdx) {
currentPage = dataPages.get(nextIdx);
pageBaseObject = currentPage.getBaseObject();
offsetInPage = currentPage.getBaseOffset();
recordsInPage = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage);
offsetInPage += UnsafeAlignedOffset.getUaoSize();
} else {
currentPage = null;
if (reader != null) {
handleFailedDelete();
}
try {
Closeables.close(reader, /* swallowIOException = */ false);
reader = spillWriters.getFirst().getReader(serializerManager);
recordsInPage = -1;
} catch (IOException e) {
// Scala iterator does not handle exception
Platform.throwException(e);
}
}
}
} finally {
if (pageToFree != null) {
freePage(pageToFree);
}
}
}
@Override
public boolean hasNext() {
if (numRecords == 0) {
if (reader != null) {
handleFailedDelete();
}
}
return numRecords > 0;
}
@Override
public Location next() {
if (recordsInPage == 0) {
advanceToNextPage();
}
numRecords--;
if (currentPage != null) {
int totalLength = UnsafeAlignedOffset.getSize(pageBaseObject, offsetInPage);
loc.with(currentPage, offsetInPage);
// [total size] [key size] [key] [value] [pointer to next]
offsetInPage += UnsafeAlignedOffset.getUaoSize() + totalLength + 8;
recordsInPage --;
return loc;
} else {
assert(reader != null);
if (!reader.hasNext()) {
advanceToNextPage();
}
try {
reader.loadNext();
} catch (IOException e) {
try {
reader.close();
} catch(IOException e2) {
logger.error("Error while closing spill reader", e2);
}
// Scala iterator does not handle exception
Platform.throwException(e);
}
loc.with(reader.getBaseObject(), reader.getBaseOffset(), reader.getRecordLength());
return loc;
}
}
public synchronized long spill(long numBytes) throws IOException {
if (!destructive || dataPages.size() == 1) {
return 0L;
}
updatePeakMemoryUsed();
// TODO: use existing ShuffleWriteMetrics
ShuffleWriteMetrics writeMetrics = new ShuffleWriteMetrics();
long released = 0L;
while (dataPages.size() > 0) {
MemoryBlock block = dataPages.getLast();
// The currentPage is used, cannot be released
if (block == currentPage) {
break;
}
Object base = block.getBaseObject();
long offset = block.getBaseOffset();
int numRecords = UnsafeAlignedOffset.getSize(base, offset);
int uaoSize = UnsafeAlignedOffset.getUaoSize();
offset += uaoSize;
final UnsafeSorterSpillWriter writer =
new UnsafeSorterSpillWriter(blockManager, 32 * 1024, writeMetrics, numRecords);
while (numRecords > 0) {
int length = UnsafeAlignedOffset.getSize(base, offset);
writer.write(base, offset + uaoSize, length, 0);
offset += uaoSize + length + 8;
numRecords--;
}
writer.close();
spillWriters.add(writer);
dataPages.removeLast();
released += block.size();
freePage(block);
if (released >= numBytes) {
break;
}
}
return released;
}
private void handleFailedDelete() {
if (spillWriters.size() > 0) {
// remove the spill file from disk
File file = spillWriters.removeFirst().getFile();
if (file != null && file.exists() && !file.delete()) {
logger.error("Was unable to delete spill file {}",
MDC.of(LogKeys.PATH, file.getAbsolutePath()));
}
}
}
}
/**
* Returns an iterator for iterating over the entries of this map.
*
* For efficiency, all calls to `next()` will return the same {@link Location} object.
*
* The returned iterator is thread-safe. However if the map is modified while iterating over it,
* the behavior of the returned iterator is undefined.
*/
public MapIterator iterator() {
return new MapIterator(numValues, new Location(), false);
}
/**
* Returns a destructive iterator for iterating over the entries of this map. It frees each page
* as it moves onto next one. Notice: it is illegal to call any method on the map after
* `destructiveIterator()` has been called.
*
* For efficiency, all calls to `next()` will return the same {@link Location} object.
*
* The returned iterator is thread-safe. However if the map is modified while iterating over it,
* the behavior of the returned iterator is undefined.
*/
public MapIterator destructiveIterator() {
updatePeakMemoryUsed();
return new MapIterator(numValues, new Location(), true);
}
/**
* Iterator for the entries of this map. This is to first iterate over key indices in
* `longArray` then accessing values in `dataPages`. NOTE: this is different from `MapIterator`
* in the sense that key index is preserved here
* (See `UnsafeHashedRelation` for example of usage).
*/
public final
|
MapIterator
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/state/ChangelogCompatibilityITCase.java
|
{
"start": 12270,
"end": 14074
}
|
enum ____ {
CANONICAL_SAVEPOINT,
NATIVE_SAVEPOINT,
CHECKPOINT
}
private void submit(JobGraph jobGraph, ClusterClient<?> client) throws Exception {
client.submitJob(jobGraph).get();
waitForAllTaskRunning(miniClusterResource.getMiniCluster(), jobGraph.getJobID(), true);
}
private static String pathToString(File path) {
return path.toURI().toString();
}
@ClassRule public static final TemporaryFolder TEMPORARY_FOLDER = new TemporaryFolder();
private File checkpointDir;
private File savepointDir;
private MiniClusterWithClientResource miniClusterResource;
public ChangelogCompatibilityITCase(TestCase testCase) {
this.testCase = testCase;
}
@Before
public void before() throws Exception {
checkpointDir = TEMPORARY_FOLDER.newFolder();
savepointDir = TEMPORARY_FOLDER.newFolder();
Configuration config = new Configuration();
config.set(CHECKPOINTS_DIRECTORY, pathToString(checkpointDir));
config.set(SAVEPOINT_DIRECTORY, pathToString(savepointDir));
FsStateChangelogStorageFactory.configure(
config, TEMPORARY_FOLDER.newFolder(), Duration.ofMinutes(1), 10);
miniClusterResource =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(config)
.setNumberTaskManagers(11)
.setNumberSlotsPerTaskManager(1)
.build());
miniClusterResource.before();
}
@After
public void after() {
if (miniClusterResource != null) {
miniClusterResource.after();
}
}
}
|
RestoreSource
|
java
|
apache__flink
|
flink-python/src/test/java/org/apache/flink/table/runtime/operators/python/aggregate/AbstractPythonStreamAggregateOperatorTest.java
|
{
"start": 1996,
"end": 2183
}
|
class ____ {@link PythonStreamGroupAggregateOperatorTest}, {@link
* PythonStreamGroupTableAggregateOperatorTest} and {@link
* PythonStreamGroupWindowAggregateOperatorTest}.
*/
abstract
|
for
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/web/ServletEndpointRegistrarTests.java
|
{
"start": 2060,
"end": 7985
}
|
class ____ {
@Mock
@SuppressWarnings("NullAway.Init")
private ServletContext servletContext;
@Mock
@SuppressWarnings("NullAway.Init")
private ServletRegistration.Dynamic servletDynamic;
@Mock
@SuppressWarnings("NullAway.Init")
private FilterRegistration.Dynamic filterDynamic;
@Test
@SuppressWarnings("NullAway") // Test null check
void createWhenServletEndpointsIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> new ServletEndpointRegistrar(null, null))
.withMessageContaining("'servletEndpoints' must not be null");
}
@Test
void onStartupShouldRegisterServlets() throws ServletException {
assertBasePath(null, "/test/*");
}
@Test
void onStartupWhenHasBasePathShouldIncludeBasePath() throws ServletException {
assertBasePath("/actuator", "/actuator/test/*");
}
@Test
void onStartupWhenHasEmptyBasePathShouldPrefixWithSlash() throws ServletException {
assertBasePath("", "/test/*");
}
@Test
void onStartupWhenHasRootBasePathShouldNotAddDuplicateSlash() throws ServletException {
assertBasePath("/", "/test/*");
}
private void assertBasePath(@Nullable String basePath, String expectedMapping) throws ServletException {
given(this.servletContext.addServlet(any(String.class), any(Servlet.class))).willReturn(this.servletDynamic);
ExposableServletEndpoint endpoint = mockEndpoint(new EndpointServlet(TestServlet.class));
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar(basePath, Collections.singleton(endpoint),
(endpointId, defaultAccess) -> Access.UNRESTRICTED);
registrar.onStartup(this.servletContext);
then(this.servletContext).should()
.addServlet(eq("test-actuator-endpoint"),
(Servlet) assertArg((servlet) -> assertThat(servlet).isInstanceOf(TestServlet.class)));
then(this.servletDynamic).should().addMapping(expectedMapping);
then(this.servletContext).shouldHaveNoMoreInteractions();
}
@Test
void onStartupWhenHasInitParametersShouldRegisterInitParameters() throws Exception {
given(this.servletContext.addServlet(any(String.class), any(Servlet.class))).willReturn(this.servletDynamic);
ExposableServletEndpoint endpoint = mockEndpoint(
new EndpointServlet(TestServlet.class).withInitParameter("a", "b"));
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar("/actuator", Collections.singleton(endpoint),
(endpointId, defaultAccess) -> Access.UNRESTRICTED);
registrar.onStartup(this.servletContext);
then(this.servletDynamic).should().setInitParameters(Collections.singletonMap("a", "b"));
}
@Test
void onStartupWhenHasLoadOnStartupShouldRegisterLoadOnStartup() throws Exception {
given(this.servletContext.addServlet(any(String.class), any(Servlet.class))).willReturn(this.servletDynamic);
ExposableServletEndpoint endpoint = mockEndpoint(new EndpointServlet(TestServlet.class).withLoadOnStartup(7));
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar("/actuator", Collections.singleton(endpoint),
(endpointId, defaultAccess) -> Access.UNRESTRICTED);
registrar.onStartup(this.servletContext);
then(this.servletDynamic).should().setLoadOnStartup(7);
}
@Test
void onStartupWhenHasNotLoadOnStartupShouldRegisterDefaultValue() throws Exception {
given(this.servletContext.addServlet(any(String.class), any(Servlet.class))).willReturn(this.servletDynamic);
ExposableServletEndpoint endpoint = mockEndpoint(new EndpointServlet(TestServlet.class));
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar("/actuator", Collections.singleton(endpoint),
(endpointId, defaultAccess) -> Access.UNRESTRICTED);
registrar.onStartup(this.servletContext);
then(this.servletDynamic).should().setLoadOnStartup(-1);
}
@Test
void onStartupWhenAccessIsDisabledShouldNotRegister() throws Exception {
ExposableServletEndpoint endpoint = mock(ExposableServletEndpoint.class);
given(endpoint.getEndpointId()).willReturn(EndpointId.of("test"));
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar("/actuator", Collections.singleton(endpoint));
registrar.onStartup(this.servletContext);
then(this.servletContext).shouldHaveNoInteractions();
}
@Test
void onStartupWhenAccessIsReadOnlyShouldRegisterServletWithFilter() throws Exception {
ExposableServletEndpoint endpoint = mockEndpoint(new EndpointServlet(TestServlet.class));
given(endpoint.getEndpointId()).willReturn(EndpointId.of("test"));
given(this.servletContext.addServlet(any(String.class), any(Servlet.class))).willReturn(this.servletDynamic);
given(this.servletContext.addFilter(any(String.class), any(Filter.class))).willReturn(this.filterDynamic);
ServletEndpointRegistrar registrar = new ServletEndpointRegistrar("/actuator", Collections.singleton(endpoint),
(endpointId, defaultAccess) -> Access.READ_ONLY);
registrar.onStartup(this.servletContext);
then(this.servletContext).should()
.addServlet(eq("test-actuator-endpoint"),
(Servlet) assertArg((servlet) -> assertThat(servlet).isInstanceOf(TestServlet.class)));
then(this.servletDynamic).should().addMapping("/actuator/test/*");
then(this.servletContext).should()
.addFilter(eq("test-actuator-endpoint-access-filter"), (Filter) assertArg((filter) -> assertThat(filter)
.isInstanceOf(
org.springframework.boot.actuate.endpoint.web.ServletEndpointRegistrar.ReadOnlyAccessFilter.class)));
then(this.filterDynamic).should()
.addMappingForServletNames(EnumSet.allOf(DispatcherType.class), false, "test-actuator-endpoint");
}
private ExposableServletEndpoint mockEndpoint(EndpointServlet endpointServlet) {
ExposableServletEndpoint endpoint = mock(ExposableServletEndpoint.class);
given(endpoint.getEndpointId()).willReturn(EndpointId.of("test"));
given(endpoint.getEndpointServlet()).willReturn(endpointServlet);
given(endpoint.getRootPath()).willReturn("test");
return endpoint;
}
static
|
ServletEndpointRegistrarTests
|
java
|
quarkusio__quarkus
|
integration-tests/kubernetes/quarkus-standard-way/src/test/java/io/quarkus/it/kubernetes/OpenshiftWithKubernetesConfigDeploymentConfigTest.java
|
{
"start": 683,
"end": 3089
}
|
class ____ {
private static final String NAME = "openshift-with-kubernetes-config";
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.withApplicationRoot((jar) -> jar.addClasses(GreetingResource.class))
.setApplicationName(NAME)
.setApplicationVersion("0.1-SNAPSHOT")
.overrideConfigKey("quarkus.openshift.deployment-kind", "deployment-config")
.setForcedDependencies(List.of(Dependency.of("io.quarkus", "quarkus-openshift", Version.getVersion()),
Dependency.of("io.quarkus", "quarkus-kubernetes-config", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("openshift.yml"));
List<HasMetadata> openshiftList = DeserializationUtil.deserializeAsList(
kubernetesDir.resolve("openshift.yml"));
assertThat(openshiftList).filteredOn(h -> "DeploymentConfig".equals(h.getKind())).singleElement().satisfies(h -> {
assertThat(h.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo(NAME);
assertThat(m.getLabels().get("app.openshift.io/runtime")).isEqualTo("quarkus");
});
AbstractObjectAssert<?, ?> specAssert = assertThat(h).extracting("spec");
specAssert.extracting("template").extracting("spec").isInstanceOfSatisfying(PodSpec.class,
podSpec -> {
assertThat(podSpec.getContainers()).singleElement().satisfies(container -> {
List<EnvVar> envVars = container.getEnv();
assertThat(envVars).anySatisfy(envVar -> {
assertThat(envVar.getName()).isEqualTo("JAVA_APP_JAR");
assertThat(envVar.getValue()).isEqualTo("/deployments/quarkus-run.jar");
});
});
});
});
}
}
|
OpenshiftWithKubernetesConfigDeploymentConfigTest
|
java
|
apache__thrift
|
lib/javame/src/org/apache/thrift/transport/TTransportFactory.java
|
{
"start": 858,
"end": 1115
}
|
class ____ to create wrapped instance of Transports.
* This is used primarily in servers, which get Transports from
* a ServerTransport and then may want to mutate them (i.e. create
* a BufferedTransport from the underlying base transport)
*
*/
public
|
used
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/customwordembedding/ContinuousFeatureValue.java
|
{
"start": 410,
"end": 784
}
|
class ____ extends FeatureValue {
private final int id;
private final double weight;
public ContinuousFeatureValue(int id, double weight) {
this.id = id;
this.weight = weight;
}
@Override
public int getRow() {
return id;
}
@Override
public double getWeight() {
return weight;
}
}
|
ContinuousFeatureValue
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
|
{
"start": 3213,
"end": 3727
}
|
class ____ extends Configured implements Tool {
static final String DESCRIPTION
= "A map/reduce program that estimates Pi using a quasi-Monte Carlo method.";
/** tmp directory for input/output */
static private final String TMP_DIR_PREFIX = QuasiMonteCarlo.class.getSimpleName();
/** 2-dimensional Halton sequence {H(i)},
* where H(i) is a 2-dimensional point and i >= 1 is the index.
* Halton sequence is used to generate sample points for Pi estimation.
*/
private static
|
QuasiMonteCarlo
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatMapEager.java
|
{
"start": 1179,
"end": 2051
}
|
class ____<T, R> extends AbstractFlowableWithUpstream<T, R> {
final Function<? super T, ? extends Publisher<? extends R>> mapper;
final int maxConcurrency;
final int prefetch;
final ErrorMode errorMode;
public FlowableConcatMapEager(Flowable<T> source,
Function<? super T, ? extends Publisher<? extends R>> mapper,
int maxConcurrency,
int prefetch,
ErrorMode errorMode) {
super(source);
this.mapper = mapper;
this.maxConcurrency = maxConcurrency;
this.prefetch = prefetch;
this.errorMode = errorMode;
}
@Override
protected void subscribeActual(Subscriber<? super R> s) {
source.subscribe(new ConcatMapEagerDelayErrorSubscriber<>(
s, mapper, maxConcurrency, prefetch, errorMode));
}
static final
|
FlowableConcatMapEager
|
java
|
dropwizard__dropwizard
|
dropwizard-util/src/main/java/io/dropwizard/util/DataSize.java
|
{
"start": 591,
"end": 13051
}
|
class ____ implements Comparable<DataSize>, Serializable {
private static final long serialVersionUID = 8517642678733072800L;
private static final Pattern SIZE_PATTERN = Pattern.compile("(\\d+)\\s*(\\S*)");
private static final SortedMap<String, DataSizeUnit> SUFFIXES;
static {
final SortedMap<String, DataSizeUnit> suffixes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
suffixes.put("B", DataSizeUnit.BYTES);
suffixes.put("byte", DataSizeUnit.BYTES);
suffixes.put("bytes", DataSizeUnit.BYTES);
suffixes.put("K", DataSizeUnit.KILOBYTES);
suffixes.put("KB", DataSizeUnit.KILOBYTES);
suffixes.put("KiB", DataSizeUnit.KIBIBYTES);
suffixes.put("kilobyte", DataSizeUnit.KILOBYTES);
suffixes.put("kibibyte", DataSizeUnit.KIBIBYTES);
suffixes.put("kilobytes", DataSizeUnit.KILOBYTES);
suffixes.put("kibibytes", DataSizeUnit.KIBIBYTES);
suffixes.put("M", DataSizeUnit.MEGABYTES);
suffixes.put("MB", DataSizeUnit.MEGABYTES);
suffixes.put("MiB", DataSizeUnit.MEBIBYTES);
suffixes.put("megabyte", DataSizeUnit.MEGABYTES);
suffixes.put("mebibyte", DataSizeUnit.MEBIBYTES);
suffixes.put("megabytes", DataSizeUnit.MEGABYTES);
suffixes.put("mebibytes", DataSizeUnit.MEBIBYTES);
suffixes.put("G", DataSizeUnit.GIGABYTES);
suffixes.put("GB", DataSizeUnit.GIGABYTES);
suffixes.put("GiB", DataSizeUnit.GIBIBYTES);
suffixes.put("gigabyte", DataSizeUnit.GIGABYTES);
suffixes.put("gibibyte", DataSizeUnit.GIBIBYTES);
suffixes.put("gigabytes", DataSizeUnit.GIGABYTES);
suffixes.put("gibibytes", DataSizeUnit.GIBIBYTES);
suffixes.put("T", DataSizeUnit.TERABYTES);
suffixes.put("TB", DataSizeUnit.TERABYTES);
suffixes.put("TiB", DataSizeUnit.TEBIBYTES);
suffixes.put("terabyte", DataSizeUnit.TERABYTES);
suffixes.put("tebibyte", DataSizeUnit.TEBIBYTES);
suffixes.put("terabytes", DataSizeUnit.TERABYTES);
suffixes.put("tebibytes", DataSizeUnit.TEBIBYTES);
suffixes.put("P", DataSizeUnit.PETABYTES);
suffixes.put("PB", DataSizeUnit.PETABYTES);
suffixes.put("PiB", DataSizeUnit.PEBIBYTES);
suffixes.put("petabyte", DataSizeUnit.PETABYTES);
suffixes.put("pebibyte", DataSizeUnit.PEBIBYTES);
suffixes.put("petabytes", DataSizeUnit.PETABYTES);
suffixes.put("pebibytes", DataSizeUnit.PEBIBYTES);
SUFFIXES = Collections.unmodifiableSortedMap(suffixes);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of bytes.
*
* @param count the amount of bytes
* @return the newly created {@link DataSize} object
*/
public static DataSize bytes(long count) {
return new DataSize(count, DataSizeUnit.BYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of kilobytes.
*
* @param count the amount of kilobytes
* @return the newly created {@link DataSize} object
*/
public static DataSize kilobytes(long count) {
return new DataSize(count, DataSizeUnit.KILOBYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of megabytes.
*
* @param count the amount of megabytes
* @return the newly created {@link DataSize} object
*/
public static DataSize megabytes(long count) {
return new DataSize(count, DataSizeUnit.MEGABYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of gigabytes.
*
* @param count the amount of gigabytes
* @return the newly created {@link DataSize} object
*/
public static DataSize gigabytes(long count) {
return new DataSize(count, DataSizeUnit.GIGABYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of terabytes.
*
* @param count the amount of terabytes
* @return the newly created {@link DataSize} object
*/
public static DataSize terabytes(long count) {
return new DataSize(count, DataSizeUnit.TERABYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of petabytes.
*
* @param count the amount of petabytes
* @return the newly created {@link DataSize} object
*/
public static DataSize petabytes(long count) {
return new DataSize(count, DataSizeUnit.PETABYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of kibibytes.
*
* @param count the amount of kibibytes
* @return the newly created {@link DataSize} object
*/
public static DataSize kibibytes(long count) {
return new DataSize(count, DataSizeUnit.KIBIBYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of mebibytes.
*
* @param count the amount of mebibytes
* @return the newly created {@link DataSize} object
*/
public static DataSize mebibytes(long count) {
return new DataSize(count, DataSizeUnit.MEBIBYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of gibibytes.
*
* @param count the amount of gibibytes
* @return the newly created {@link DataSize} object
*/
public static DataSize gibibytes(long count) {
return new DataSize(count, DataSizeUnit.GIBIBYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of tebibytes.
*
* @param count the amount of tebibytes
* @return the newly created {@link DataSize} object
*/
public static DataSize tebibytes(long count) {
return new DataSize(count, DataSizeUnit.TEBIBYTES);
}
/**
* Constructs a new {@link DataSize} object representing the specified amount of pebibytes.
*
* @param count the amount of pebibytes
* @return the newly created {@link DataSize} object
*/
public static DataSize pebibytes(long count) {
return new DataSize(count, DataSizeUnit.PEBIBYTES);
}
/**
* Parses a given {@link CharSequence} to a {@link DataSize} object.
* If no unit is provided by the input sequence, a default unit of {@link DataSizeUnit#BYTES} is used.
*
* @param size the string representation of the {@link DataSize} to parse
* @return a valid new {@link DataSize} object representing the parsed string
*/
@JsonCreator
public static DataSize parse(CharSequence size) {
return parse(size, DataSizeUnit.BYTES);
}
/**
* Parses a given {@link CharSequence} to a {@link DataSize} object.
* If no unit is provided by the input sequence, the default unit parameter is used.
*
* @param size the string representation of the {@link DataSize} to parse
* @param defaultUnit the fallback default unit to use for the newly created {@link DataSize}
* @return a valid new {@link DataSize} object representing the parsed string
* @throws IllegalArgumentException if the input sequence cannot be parsed correctly
*/
public static DataSize parse(CharSequence size, DataSizeUnit defaultUnit) {
final Matcher matcher = SIZE_PATTERN.matcher(size);
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid size: " + size);
}
final long count = Long.parseLong(matcher.group(1));
final String unit = matcher.group(2);
final DataSizeUnit dataSizeUnit = unit == null || unit.isEmpty() ? defaultUnit : SUFFIXES.get(unit);
if (dataSizeUnit == null) {
throw new IllegalArgumentException("Invalid size: " + size + ". Wrong size unit");
}
return new DataSize(count, dataSizeUnit);
}
/**
* The quantity of the current data size
*/
private final long count;
/**
* The unit of the current data size
*/
private final DataSizeUnit unit;
private DataSize(long count, DataSizeUnit unit) {
this.count = count;
this.unit = requireNonNull(unit);
}
/**
* Gets the quantity of the current {@link DataSize} object.
*
* @return the quantity of the current data size
*/
public long getQuantity() {
return count;
}
/**
* Returns the {@link DataSizeUnit data size unit} of the current {@link DataSize} object.
*
* @return the unit of the current data size
*/
public DataSizeUnit getUnit() {
return unit;
}
/**
* Returns the quantity of the current {@link DataSize} object in bytes.
*
* @return the converted quantity
*/
public long toBytes() {
return DataSizeUnit.BYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in kilobytes.
*
* @return the converted quantity
*/
public long toKilobytes() {
return DataSizeUnit.KILOBYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in megabytes.
*
* @return the converted quantity
*/
public long toMegabytes() {
return DataSizeUnit.MEGABYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in gigabytes.
*
* @return the converted quantity
*/
public long toGigabytes() {
return DataSizeUnit.GIGABYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in terabytes.
*
* @return the converted quantity
*/
public long toTerabytes() {
return DataSizeUnit.TERABYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in petabytes.
*
* @return the converted quantity
*/
public long toPetabytes() {
return DataSizeUnit.PETABYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in kibibytes.
*
* @return the converted quantity
*/
public long toKibibytes() {
return DataSizeUnit.KIBIBYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in mebibytes.
*
* @return the converted quantity
*/
public long toMebibytes() {
return DataSizeUnit.MEBIBYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in gibibytes.
*
* @return the converted quantity
*/
public long toGibibytes() {
return DataSizeUnit.GIBIBYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in gebibytes.
*
* @return the converted quantity
*/
public long toTebibytes() {
return DataSizeUnit.TEBIBYTES.convert(count, unit);
}
/**
* Returns the quantity of the current {@link DataSize} object in pebibytes.
*
* @return the converted quantity
*/
public long toPebibytes() {
return DataSizeUnit.PEBIBYTES.convert(count, unit);
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
final DataSize size = (DataSize) obj;
return (count == size.count) && (unit == size.unit);
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return (31 * (int) (count ^ (count >>> 32))) + unit.hashCode();
}
/**
* {@inheritDoc}
*/
@Override
@JsonValue
public String toString() {
String units = unit.toString().toLowerCase(Locale.ENGLISH);
if (count == 1L) {
units = units.substring(0, units.length() - 1);
}
return Long.toString(count) + ' ' + units;
}
/**
* {@inheritDoc}
*/
@Override
public int compareTo(DataSize other) {
if (unit == other.unit) {
return Long.compare(count, other.count);
}
return Long.compare(toBytes(), other.toBytes());
}
}
|
DataSize
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/utils/AsyncForEachRun.java
|
{
"start": 1158,
"end": 1433
}
|
class ____ part of the asynchronous operation utilities
* within the Hadoop Distributed File System (HDFS) Federation router.
* It provides the functionality to perform asynchronous operations on each
* element of an Iterator, applying a given async function.
*
* <p>This
|
is
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/authentication/logout/CookieClearingLogoutHandler.java
|
{
"start": 1335,
"end": 2835
}
|
class ____ implements LogoutHandler {
private final List<Function<HttpServletRequest, Cookie>> cookiesToClear;
public CookieClearingLogoutHandler(String... cookiesToClear) {
Assert.notNull(cookiesToClear, "List of cookies cannot be null");
List<Function<HttpServletRequest, Cookie>> cookieList = new ArrayList<>();
for (String cookieName : cookiesToClear) {
cookieList.add((request) -> {
Cookie cookie = new Cookie(cookieName, null);
String contextPath = request.getContextPath();
String cookiePath = StringUtils.hasText(contextPath) ? contextPath : "/";
cookie.setPath(cookiePath);
cookie.setMaxAge(0);
cookie.setSecure(request.isSecure());
return cookie;
});
}
this.cookiesToClear = cookieList;
}
/**
* @param cookiesToClear - One or more Cookie objects that must have maxAge of 0
* @since 5.2
*/
public CookieClearingLogoutHandler(Cookie... cookiesToClear) {
Assert.notNull(cookiesToClear, "List of cookies cannot be null");
List<Function<HttpServletRequest, Cookie>> cookieList = new ArrayList<>();
for (Cookie cookie : cookiesToClear) {
Assert.isTrue(cookie.getMaxAge() == 0, "Cookie maxAge must be 0");
cookieList.add((request) -> cookie);
}
this.cookiesToClear = cookieList;
}
@Override
public void logout(HttpServletRequest request, HttpServletResponse response,
@Nullable Authentication authentication) {
this.cookiesToClear.forEach((f) -> response.addCookie(f.apply(request)));
}
}
|
CookieClearingLogoutHandler
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/type/TypeHandlerRegistryTest.java
|
{
"start": 5573,
"end": 5621
}
|
enum ____ implements SomeInterface {
}
|
SomeEnum
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairSchedulerUtilities.java
|
{
"start": 1267,
"end": 2505
}
|
class ____ {
@Test
public void testTrimQueueNameEquals() throws Exception {
final String[] equalsStrings = {
// no spaces
"a",
// leading spaces
" a",
" \u3000a",
"\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000a",
"\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680a",
"\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a",
"\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000a",
// trailing spaces
"a\u200A",
"a \u0085 ",
// spaces on both sides
" a ",
" a\u00A0",
"\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009a" +
"\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000",
};
for (String s : equalsStrings) {
assertEquals("a", trimQueueName(s));
}
}
@Test
public void testTrimQueueNamesEmpty() throws Exception {
assertNull(trimQueueName(null));
final String spaces = "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000"
+ "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680"
+ "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009"
+ "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000";
assertTrue(trimQueueName(spaces).isEmpty());
}
}
|
TestFairSchedulerUtilities
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.