language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/atomic/long_/AtomicLongAssert_overridingErrorMessage_Test.java
|
{
"start": 953,
"end": 2658
}
|
class ____ {
@Test
void should_honor_custom_error_message_set_with_withFailMessage() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicLong(0)).withFailMessage(error)
.hasValueLessThan(-1);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_overridingErrorMessage() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicLong(0)).overridingErrorMessage(error)
.hasValueLessThan(-1);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_withFailMessage_using_supplier() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicLong(0)).withFailMessage(() -> error)
.hasValueLessThan(-1);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
@Test
void should_honor_custom_error_message_set_with_overridingErrorMessage_using_supplier() {
// GIVEN
String error = "ssss";
// WHEN
ThrowingCallable code = () -> assertThat(new AtomicLong(0)).overridingErrorMessage(() -> error)
.hasValueLessThan(-1);
// THEN
assertThatAssertionErrorIsThrownBy(code).withMessageContaining(error);
}
}
|
AtomicLongAssert_overridingErrorMessage_Test
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/defaultbean/DefaultProducerMethodTest.java
|
{
"start": 1646,
"end": 1794
}
|
class ____ {
@Produces
GreetingBean greetingBean() {
return new GreetingBean("ciao");
}
}
static
|
Producer3
|
java
|
quarkusio__quarkus
|
extensions/amazon-lambda/event-server/src/main/java/io/quarkus/amazon/lambda/runtime/MockBodyHandler.java
|
{
"start": 753,
"end": 3789
}
|
class ____ implements BodyHandler {
private static final Logger LOG = LoggerFactory.getLogger(io.vertx.ext.web.handler.impl.BodyHandlerImpl.class);
private long bodyLimit = DEFAULT_BODY_LIMIT;
private String uploadsDir;
private boolean mergeFormAttributes = DEFAULT_MERGE_FORM_ATTRIBUTES;
private boolean isPreallocateBodyBuffer = DEFAULT_PREALLOCATE_BODY_BUFFER;
private static final int DEFAULT_INITIAL_BODY_BUFFER_SIZE = 1024; //bytes
public MockBodyHandler() {
}
@Override
public void handle(RoutingContext context) {
HttpServerRequest request = context.request();
if (request.headers().contains(HttpHeaders.UPGRADE, HttpHeaders.WEBSOCKET, true)) {
context.next();
return;
}
// we need to keep state since we can be called again on reroute
if (!((RoutingContextInternal) context).seenHandler(RoutingContextInternal.BODY_HANDLER)) {
long contentLength = isPreallocateBodyBuffer ? parseContentLengthHeader(request) : -1;
BHandler handler = new BHandler(context, contentLength);
request.handler(handler);
request.endHandler(v -> handler.end());
((RoutingContextInternal) context).visitHandler(RoutingContextInternal.BODY_HANDLER);
} else {
// on reroute we need to re-merge the form params if that was desired
if (mergeFormAttributes && request.isExpectMultipart()) {
request.params().addAll(request.formAttributes());
}
context.next();
}
}
@Override
public BodyHandler setHandleFileUploads(boolean handleFileUploads) {
throw new IllegalStateException("Not Allowed");
}
@Override
public BodyHandler setBodyLimit(long bodyLimit) {
this.bodyLimit = bodyLimit;
return this;
}
@Override
public BodyHandler setUploadsDirectory(String uploadsDirectory) {
this.uploadsDir = uploadsDirectory;
return this;
}
@Override
public BodyHandler setMergeFormAttributes(boolean mergeFormAttributes) {
this.mergeFormAttributes = mergeFormAttributes;
return this;
}
@Override
public BodyHandler setDeleteUploadedFilesOnEnd(boolean deleteUploadedFilesOnEnd) {
return this;
}
@Override
public BodyHandler setPreallocateBodyBuffer(boolean isPreallocateBodyBuffer) {
this.isPreallocateBodyBuffer = isPreallocateBodyBuffer;
return this;
}
private long parseContentLengthHeader(HttpServerRequest request) {
String contentLength = request.getHeader(HttpHeaders.CONTENT_LENGTH);
if (contentLength == null || contentLength.isEmpty()) {
return -1;
}
try {
long parsedContentLength = Long.parseLong(contentLength);
return parsedContentLength < 0 ? -1 : parsedContentLength;
} catch (NumberFormatException ex) {
return -1;
}
}
private
|
MockBodyHandler
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldContainOneOrMoreWhitespaces.java
|
{
"start": 801,
"end": 1449
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldContainOneOrMoreWhitespaces}</code>.
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldContainOneOrMoreWhitespaces(CharSequence actual) {
return new ShouldContainOneOrMoreWhitespaces(actual);
}
private ShouldContainOneOrMoreWhitespaces(Object actual) {
super("%n" +
"Expecting string to contain one or more whitespaces but did not, string was:%n" +
" %s",
actual);
}
}
|
ShouldContainOneOrMoreWhitespaces
|
java
|
apache__flink
|
flink-python/src/main/java/org/apache/flink/formats/csv/CsvRowDeserializationSchema.java
|
{
"start": 2511,
"end": 3893
}
|
class ____ implements DeserializationSchema<Row> {
private static final long serialVersionUID = 2135553495874539201L;
/** Type information describing the result type. */
private final TypeInformation<Row> typeInfo;
/** Runtime instance that performs the actual work. */
private final RuntimeConverter runtimeConverter;
/** Schema describing the input CSV data. */
private final CsvSchema csvSchema;
/** Object reader used to read rows. It is configured by {@link CsvSchema}. */
private transient ObjectReader objectReader;
/** Flag indicating whether to ignore invalid fields/rows (default: throw an exception). */
private final boolean ignoreParseErrors;
private CsvRowDeserializationSchema(
RowTypeInfo typeInfo, CsvSchema csvSchema, boolean ignoreParseErrors) {
this.typeInfo = typeInfo;
this.runtimeConverter = createRowRuntimeConverter(typeInfo, ignoreParseErrors, true);
this.csvSchema = csvSchema;
this.ignoreParseErrors = ignoreParseErrors;
}
@Override
public void open(InitializationContext context) throws Exception {
objectReader =
JacksonMapperFactory.createCsvMapper().readerFor(JsonNode.class).with(csvSchema);
}
/** A builder for creating a {@link CsvRowDeserializationSchema}. */
public static
|
CsvRowDeserializationSchema
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/RepartitionedInternal.java
|
{
"start": 1105,
"end": 1780
}
|
class ____<K, V> extends Repartitioned<K, V> {
public RepartitionedInternal(final Repartitioned<K, V> repartitioned) {
super(repartitioned);
}
InternalTopicProperties toInternalTopicProperties() {
return new InternalTopicProperties(numberOfPartitions());
}
public String name() {
return name;
}
public Serde<K> keySerde() {
return keySerde;
}
public Serde<V> valueSerde() {
return valueSerde;
}
public StreamPartitioner<K, V> streamPartitioner() {
return partitioner;
}
public Integer numberOfPartitions() {
return numberOfPartitions;
}
}
|
RepartitionedInternal
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/pattern/GroupPattern.java
|
{
"start": 993,
"end": 1190
}
|
class ____ a group pattern definition.
*
* @param <T> Base type of the elements appearing in the pattern
* @param <F> Subtype of T to which the current pattern operator is constrained
*/
public
|
for
|
java
|
playframework__playframework
|
core/play/src/test/java/play/utils/Parent.java
|
{
"start": 369,
"end": 1824
}
|
class ____ {
private final Long createdAt;
private final Child child;
private final Long updatedAt;
private final String updatedBy;
@JsonCreator
public Parent(
@JsonProperty("createdAt") Long createdAt,
@JsonProperty("child") Child child,
@JsonProperty("updatedAt") Long updatedAt,
@JsonProperty("updatedBy") String updatedBy) {
this.createdAt = createdAt;
this.child = child;
this.updatedAt = updatedAt;
this.updatedBy = updatedBy;
}
public Long getCreatedAt() {
return createdAt;
}
public Child getChild() {
return child;
}
public Long getUpdatedAt() {
return updatedAt;
}
public String getUpdatedBy() {
return updatedBy;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Parent parent = (Parent) o;
return createdAt.equals(parent.createdAt)
&& child.equals(parent.child)
&& updatedAt.equals(parent.updatedAt)
&& updatedBy.equals(parent.updatedBy);
}
@Override
public int hashCode() {
return Objects.hash(createdAt, child, updatedAt, updatedBy);
}
@Override
public String toString() {
return "Parent{"
+ "createdAt="
+ createdAt
+ ", child="
+ child
+ ", updatedAt="
+ updatedAt
+ ", updatedBy='"
+ updatedBy
+ '\''
+ '}';
}
}
|
Parent
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/FieldMissingNullableTest.java
|
{
"start": 14180,
"end": 14700
}
|
class ____ {
private String message = "hello";
public void setMessage(String message) {
this.message = message;
}
}
""")
.doTest();
}
@Test
public void negativeCases_this() {
createCompilationTestHelper()
.addSourceLines(
"com/google/errorprone/bugpatterns/nullness/ThisTest.java",
"""
package com.google.errorprone.bugpatterns.nullness;
public
|
NonNullParameterTest
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/util/introspection/IntrospectionError.java
|
{
"start": 905,
"end": 2367
}
|
class ____ extends RuntimeException {
@Serial
private static final long serialVersionUID = 1L;
/**
* This (nullable) field holds the original Exception thrown by the tested code
* during the invocation of a getter/accessor method. This allows us to reference
* or rethrow it if alternative means of resolving the field are unsuccessful.
*/
private final Throwable getterInvocationException;
/**
* Creates a new <code>{@link IntrospectionError}</code>.
* @param message the detail message.
*/
public IntrospectionError(String message) {
super(message);
this.getterInvocationException = null;
}
/**
* Creates a new <code>{@link IntrospectionError}</code>.
* @param message the detail message.
* @param cause the original cause.
*/
public IntrospectionError(String message, Throwable cause) {
this(message, cause, null);
}
/**
* Creates a new <code>{@link IntrospectionError}</code>.
* @param message the detail message.
* @param cause the original cause.
* @param getterInvocationException the original exception thrown by the tested code.
*/
public IntrospectionError(String message, Throwable cause, Throwable getterInvocationException) {
super(message, cause);
this.getterInvocationException = getterInvocationException;
}
public Optional<Throwable> getterInvocationException() {
return Optional.ofNullable(getterInvocationException);
}
}
|
IntrospectionError
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/jdbc/SelectBuilderTest.java
|
{
"start": 1310,
"end": 4325
}
|
class ____ {
@Test
void shouldProduceExpectedSimpleSelectStatement() {
String expected = """
SELECT P.ID, P.USERNAME, P.PASSWORD, P.FIRST_NAME, P.LAST_NAME
FROM PERSON P
WHERE (P.ID like #id# AND P.FIRST_NAME like #firstName# AND P.LAST_NAME like #lastName#)
ORDER BY P.LAST_NAME""";
assertEquals(expected, example2("a", "b", "c"));
}
@Test
void shouldProduceExpectedSimpleSelectStatementMissingFirstParam() {
String expected = """
SELECT P.ID, P.USERNAME, P.PASSWORD, P.FIRST_NAME, P.LAST_NAME
FROM PERSON P
WHERE (P.FIRST_NAME like #firstName# AND P.LAST_NAME like #lastName#)
ORDER BY P.LAST_NAME""";
assertEquals(expected, example2(null, "b", "c"));
}
@Test
void shouldProduceExpectedSimpleSelectStatementMissingFirstTwoParams() {
String expected = """
SELECT P.ID, P.USERNAME, P.PASSWORD, P.FIRST_NAME, P.LAST_NAME
FROM PERSON P
WHERE (P.LAST_NAME like #lastName#)
ORDER BY P.LAST_NAME""";
assertEquals(expected, example2(null, null, "c"));
}
@Test
void shouldProduceExpectedSimpleSelectStatementMissingAllParams() {
String expected = """
SELECT P.ID, P.USERNAME, P.PASSWORD, P.FIRST_NAME, P.LAST_NAME
FROM PERSON P
ORDER BY P.LAST_NAME""";
assertEquals(expected, example2(null, null, null));
}
@Test
void shouldProduceExpectedComplexSelectStatement() {
String expected = """
SELECT P.ID, P.USERNAME, P.PASSWORD, P.FULL_NAME, P.LAST_NAME, P.CREATED_ON, P.UPDATED_ON
FROM PERSON P, ACCOUNT A
INNER JOIN DEPARTMENT D on D.ID = P.DEPARTMENT_ID
INNER JOIN COMPANY C on D.COMPANY_ID = C.ID
WHERE (P.ID = A.ID AND P.FIRST_NAME like ?)\s
OR (P.LAST_NAME like ?)
GROUP BY P.ID
HAVING (P.LAST_NAME like ?)\s
OR (P.FIRST_NAME like ?)
ORDER BY P.ID, P.FULL_NAME""";
assertEquals(expected, example1());
}
private static String example1() {
SELECT("P.ID, P.USERNAME, P.PASSWORD, P.FULL_NAME");
SELECT("P.LAST_NAME, P.CREATED_ON, P.UPDATED_ON");
FROM("PERSON P");
FROM("ACCOUNT A");
INNER_JOIN("DEPARTMENT D on D.ID = P.DEPARTMENT_ID");
INNER_JOIN("COMPANY C on D.COMPANY_ID = C.ID");
WHERE("P.ID = A.ID");
WHERE("P.FIRST_NAME like ?");
OR();
WHERE("P.LAST_NAME like ?");
GROUP_BY("P.ID");
HAVING("P.LAST_NAME like ?");
OR();
HAVING("P.FIRST_NAME like ?");
ORDER_BY("P.ID");
ORDER_BY("P.FULL_NAME");
return SQL();
}
private static String example2(String id, String firstName, String lastName) {
SELECT("P.ID, P.USERNAME, P.PASSWORD, P.FIRST_NAME, P.LAST_NAME");
FROM("PERSON P");
if (id != null) {
WHERE("P.ID like #id#");
}
if (firstName != null) {
WHERE("P.FIRST_NAME like #firstName#");
}
if (lastName != null) {
WHERE("P.LAST_NAME like #lastName#");
}
ORDER_BY("P.LAST_NAME");
return SQL();
}
}
|
SelectBuilderTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/AssignedDevice.java
|
{
"start": 1161,
"end": 2313
}
|
class ____ implements Serializable, Comparable {
private static final long serialVersionUID = -544285507952217366L;
private final Device device;
private final String containerId;
public AssignedDevice(ContainerId cId, Device dev) {
this.device = dev;
this.containerId = cId.toString();
}
public Device getDevice() {
return device;
}
public String getContainerId() {
return containerId;
}
@Override
public int compareTo(Object o) {
if (!(o instanceof AssignedDevice)) {
return -1;
}
AssignedDevice other = (AssignedDevice) o;
int result = getDevice().compareTo(other.getDevice());
if (0 != result) {
return result;
}
return getContainerId().compareTo(other.getContainerId());
}
@Override
public boolean equals(Object o) {
if (!(o instanceof AssignedDevice)) {
return false;
}
AssignedDevice other = (AssignedDevice) o;
return getDevice().equals(other.getDevice())
&& getContainerId().equals(other.getContainerId());
}
@Override
public int hashCode() {
return Objects.hash(getDevice(), getContainerId());
}
}
|
AssignedDevice
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/CRC64.java
|
{
"start": 917,
"end": 1806
}
|
class ____ {
private static final long POLY = 0x9a6c9329ac4bc9b5L;
private static final int TABLE_LENGTH = 256;
private static final long[] TABLE = new long[TABLE_LENGTH];
private long value = -1;
/**
* @param input byte arrays.
* @return long value of the CRC-64 checksum of the data.
* */
public long compute(byte[] input) {
init();
for (int i = 0; i < input.length; i++) {
value = TABLE[(input[i] ^ (int) value) & 0xFF] ^ (value >>> 8);
}
return ~value;
}
/*
* Initialize a table constructed from POLY (0x9a6c9329ac4bc9b5L).
* */
private void init() {
value = -1;
for (int n = 0; n < TABLE_LENGTH; ++n) {
long crc = n;
for (int i = 0; i < 8; ++i) {
if ((crc & 1) == 1) {
crc = (crc >>> 1) ^ POLY;
} else {
crc >>>= 1;
}
}
TABLE[n] = crc;
}
}
}
|
CRC64
|
java
|
spring-projects__spring-boot
|
module/spring-boot-web-server/src/testFixtures/java/org/springframework/boot/web/server/reactive/AbstractReactiveWebServerFactoryTests.java
|
{
"start": 28576,
"end": 29377
}
|
class ____ implements HttpHandler {
private static final DefaultDataBufferFactory factory = new DefaultDataBufferFactory();
private final DataBuffer bytes;
private final String mediaType;
CharsHandler(int contentSize, String mediaType) {
char[] chars = new char[contentSize];
Arrays.fill(chars, 'F');
this.bytes = factory.wrap(new String(chars).getBytes(StandardCharsets.UTF_8));
this.mediaType = mediaType;
}
@Override
public Mono<Void> handle(ServerHttpRequest request, ServerHttpResponse response) {
response.setStatusCode(HttpStatus.OK);
response.getHeaders().set(HttpHeaders.CONTENT_TYPE, this.mediaType);
response.getHeaders().setContentLength(this.bytes.readableByteCount());
return response.writeWith(Mono.just(this.bytes));
}
}
static
|
CharsHandler
|
java
|
spring-projects__spring-framework
|
spring-aop/src/main/java/org/springframework/aop/interceptor/CustomizableTraceInterceptor.java
|
{
"start": 2110,
"end": 2953
}
|
class ____ of the method arguments</li>
* <li>{@code $[arguments]} - replaced with a comma-separated list of the
* {@code String} representation of the method arguments</li>
* <li>{@code $[exception]} - replaced with the {@code String} representation
* of any {@code Throwable} raised during the invocation</li>
* <li>{@code $[invocationTime]} - replaced with the time, in milliseconds,
* taken by the method invocation</li>
* </ul>
*
* <p>There are restrictions on which placeholders can be used in which messages:
* see the individual message properties for details on the valid placeholders.
*
* @author Rob Harrop
* @author Juergen Hoeller
* @author Sam Brannen
* @since 1.2
* @see #setEnterMessage
* @see #setExitMessage
* @see #setExceptionMessage
* @see SimpleTraceInterceptor
*/
@SuppressWarnings("serial")
public
|
names
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/concurrent/package-info.java
|
{
"start": 19583,
"end": 20016
}
|
class ____ with the creation of threads. When using the <em>Executor</em> framework new in JDK 1.5
* the developer usually does not have to care about creating threads; the executors create the threads they need on
* demand. However, sometimes it is desired to set some properties of the newly created worker threads. This is possible
* through the {@link java.util.concurrent.ThreadFactory} interface; an implementation of this
|
deals
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/utils/ClassUtils.java
|
{
"start": 6021,
"end": 6099
}
|
class ____ not found");
}
}
/**
* Determines if the
|
name
|
java
|
apache__dubbo
|
dubbo-common/src/test/java/org/apache/dubbo/common/beanutil/JavaBeanSerializeUtilTest.java
|
{
"start": 15261,
"end": 16182
}
|
class ____ {
public String gender;
public String email;
String name;
int age;
Child child;
private String securityEmail;
public static Parent getNewParent() {
return new Parent();
}
public String getEmail() {
return this.securityEmail;
}
public void setEmail(String email) {
this.securityEmail = email;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public Child getChild() {
return child;
}
public void setChild(Child child) {
this.child = child;
}
}
public static
|
Parent
|
java
|
apache__camel
|
components/camel-telegram/src/main/java/org/apache/camel/component/telegram/service/TelegramBodyPublisher.java
|
{
"start": 2092,
"end": 5499
}
|
class ____<T> implements TelegramBodyPart {
private final String contentType;
private final Map<String, Object> headers = new LinkedHashMap<>();
private final T body;
private final String charset;
public MultilineBodyPart(String name, T body, String contentType) {
this(name, body, contentType, StandardCharsets.UTF_8.name());
}
public MultilineBodyPart(String name, T body, String contentType, String charset) {
this.body = body;
this.contentType = contentType;
this.charset = charset;
addHeader("name", name);
}
public void addHeader(String key, String value) {
headers.put(key, value);
}
@Override
public void serialize(ByteBuffer buffer, String boundary) {
String partHeader = "--" + boundary + "\r\n";
buffer.put(partHeader.getBytes());
String contentDisposition = "Content-Disposition: form-data; ";
// this creates the key-pair part of the content disposition (i.e.: name="myName"; file="myFile.doc")
contentDisposition += headers.entrySet()
.stream()
.map(e -> e.getKey().toLowerCase() + "=\"" + e.getValue().toString() + "\"")
.collect(Collectors.joining("; ")) + "\r\n";
buffer.put(contentDisposition.getBytes());
String contentTypePart = "Content-Type: " + contentType;
if (charset != null) {
contentTypePart += "; charset=" + charset;
}
contentTypePart += "\r\n\r\n";
buffer.put(contentTypePart.getBytes());
if (body instanceof String) {
buffer.put(((String) body).getBytes());
} else {
if (body instanceof byte[]) {
buffer.put((byte[]) body);
}
}
buffer.put("\r\n".getBytes());
}
public static void serializeEnd(ByteBuffer buffer, String separator) {
String partHeader = "--" + separator + "--\r\n";
buffer.put(partHeader.getBytes());
}
}
public void addBodyPart(TelegramBodyPart bodyPart) {
bodyParts.add(bodyPart);
}
public HttpRequest.BodyPublisher newPublisher() {
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
LOG.debug("Allocating {} bytes", bufferSize);
serialize(bodyParts, buffer, boundary);
int written = buffer.capacity() - buffer.remaining();
return HttpRequest.BodyPublishers.ofByteArray(buffer.array(), 0, written);
}
static void serialize(Set<TelegramBodyPart> bodyParts, ByteBuffer buffer, String separator) {
try {
boolean isMultiBody = false;
for (TelegramBodyPart bodyPart : bodyParts) {
bodyPart.serialize(buffer, separator);
if (bodyPart instanceof MultilineBodyPart) {
isMultiBody = true;
}
}
if (isMultiBody) {
MultilineBodyPart.serializeEnd(buffer, separator);
}
} finally {
bodyParts.clear();
}
}
Set<TelegramBodyPart> getBodyParts() {
return bodyParts;
}
String getBoundary() {
return boundary;
}
}
|
MultilineBodyPart
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
|
{
"start": 1338,
"end": 1454
}
|
class ____ not be used directly (use CommonConfigurationKeys
* instead)
*
*/
@InterfaceAudience.Public
public
|
should
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/operations/utils/ValuesOperationTreeBuilderTest.java
|
{
"start": 2674,
"end": 26600
}
|
class ____ {
static Stream<TestSpec> parameters() {
return Stream.of(
TestSpec.test("Flattening row constructor")
.values(row(1, "ABC"), row(2, "EFG"))
.equalTo(
new ValuesQueryOperation(
asList(
asList(valueLiteral(1), valueLiteral("ABC")),
asList(valueLiteral(2), valueLiteral("EFG"))),
ResolvedSchema.of(
Column.physical("f0", DataTypes.INT().notNull()),
Column.physical(
"f1", DataTypes.CHAR(3).notNull())))),
TestSpec.test("Finding common type")
.values(row(1L, "ABC"), row(3.1f, "DEFG"))
.equalTo(
new ValuesQueryOperation(
asList(
asList(
cast(
valueLiteral(1L),
DataTypes.FLOAT().notNull()),
valueLiteral(
"ABC",
DataTypes.VARCHAR(4).notNull())),
asList(
valueLiteral(3.1f),
valueLiteral(
"DEFG",
DataTypes.VARCHAR(4).notNull()))),
ResolvedSchema.of(
Column.physical("f0", DataTypes.FLOAT().notNull()),
Column.physical(
"f1", DataTypes.VARCHAR(4).notNull())))),
TestSpec.test("Explicit common type")
.values(
DataTypes.ROW(
DataTypes.FIELD("id", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("name", DataTypes.STRING())),
row(1L, "ABC"),
row(3.1f, "DEFG"))
.equalTo(
new ValuesQueryOperation(
asList(
asList(
cast(
valueLiteral(1L),
DataTypes.DECIMAL(10, 2)),
cast(
valueLiteral(
"ABC",
DataTypes.STRING()
.notNull()),
DataTypes.STRING())),
asList(
cast(
valueLiteral(3.1f),
DataTypes.DECIMAL(10, 2)),
cast(
valueLiteral(
"DEFG",
DataTypes.STRING()
.notNull()),
DataTypes.STRING()))),
ResolvedSchema.of(
Column.physical("id", DataTypes.DECIMAL(10, 2)),
Column.physical("name", DataTypes.STRING())))),
TestSpec.test("Explicit common type for nested rows")
.values(
DataTypes.ROW(
DataTypes.FIELD("id", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD(
"details",
DataTypes.ROW(
DataTypes.FIELD("name", DataTypes.STRING()),
DataTypes.FIELD(
"amount",
DataTypes.DECIMAL(10, 2))))),
row(1L, row("ABC", 3)),
row(3.1f, row("DEFG", new BigDecimal("12345"))))
.equalTo(
new ValuesQueryOperation(
asList(
asList(
cast(
valueLiteral(1L),
DataTypes.DECIMAL(10, 2)),
rowCtor(
DataTypes.ROW(
DataTypes.FIELD(
"name",
DataTypes.STRING()),
DataTypes.FIELD(
"amount",
DataTypes.DECIMAL(
10, 2))),
cast(
valueLiteral(
"ABC",
DataTypes.STRING()
.notNull()),
DataTypes.STRING()),
cast(
valueLiteral(3),
DataTypes.DECIMAL(10, 2)))),
asList(
cast(
valueLiteral(3.1f),
DataTypes.DECIMAL(10, 2)),
rowCtor(
DataTypes.ROW(
DataTypes.FIELD(
"name",
DataTypes.STRING()),
DataTypes.FIELD(
"amount",
DataTypes.DECIMAL(
10, 2))),
cast(
valueLiteral(
"DEFG",
DataTypes.STRING()
.notNull()),
DataTypes.STRING()),
cast(
valueLiteral(
new BigDecimal(
"12345"),
DataTypes.DECIMAL(
10,
2)
.notNull()),
DataTypes.DECIMAL(
10, 2))))),
ResolvedSchema.of(
Column.physical("id", DataTypes.DECIMAL(10, 2)),
Column.physical(
"details",
DataTypes.ROW(
DataTypes.FIELD(
"name", DataTypes.STRING()),
DataTypes.FIELD(
"amount",
DataTypes.DECIMAL(
10, 2))))))),
TestSpec.test("Finding a common type for nested rows")
.values(row(1L, row(1L, "ABC")), row(3.1f, row(3.1f, "DEFG")))
.equalTo(
new ValuesQueryOperation(
asList(
asList(
cast(
valueLiteral(1L),
DataTypes.FLOAT().notNull()),
rowCtor(
DataTypes.ROW(
DataTypes.FIELD(
"f0",
DataTypes
.FLOAT()
.notNull()),
DataTypes.FIELD(
"f1",
DataTypes
.VARCHAR(
4)
.notNull()))
.notNull(),
cast(
valueLiteral(1L),
DataTypes.FLOAT()
.notNull()),
valueLiteral(
"ABC",
DataTypes.VARCHAR(4)
.notNull()))),
asList(
valueLiteral(3.1f),
rowCtor(
DataTypes.ROW(
DataTypes.FIELD(
"f0",
DataTypes
.FLOAT()
.notNull()),
DataTypes.FIELD(
"f1",
DataTypes
.VARCHAR(
4)
.notNull()))
.notNull(),
valueLiteral(
3.1f,
DataTypes.FLOAT()
.notNull()),
valueLiteral(
"DEFG",
DataTypes.VARCHAR(4)
.notNull())))),
ResolvedSchema.of(
Column.physical("f0", DataTypes.FLOAT().notNull()),
Column.physical(
"f1",
DataTypes.ROW(
DataTypes.FIELD(
"f0",
DataTypes.FLOAT()
.notNull()),
DataTypes.FIELD(
"f1",
DataTypes.VARCHAR(4)
.notNull()))
.notNull())))),
TestSpec.test("Finding common type. Insert cast for calls")
.values(call(new IntScalarFunction()), row(3.1f))
.equalTo(
new ValuesQueryOperation(
asList(
singletonList(
cast(
CallExpression.anonymous(
new IntScalarFunction(),
Collections.emptyList(),
DataTypes.INT()),
DataTypes.FLOAT())),
singletonList(
cast(
valueLiteral(3.1f),
DataTypes.FLOAT()))),
ResolvedSchema.of(
Column.physical("f0", DataTypes.FLOAT())))),
TestSpec.test("Row in a function result is not flattened")
.values(call(new RowScalarFunction()))
.equalTo(
new ValuesQueryOperation(
singletonList(
singletonList(
CallExpression.anonymous(
new RowScalarFunction(),
Collections.emptyList(),
DataTypes.ROW(
DataTypes.FIELD(
"f0",
DataTypes.INT()),
DataTypes.FIELD(
"f1",
DataTypes
.STRING()))))),
ResolvedSchema.of(
Column.physical(
"f0",
DataTypes.ROW(
DataTypes.FIELD(
"f0", DataTypes.INT()),
DataTypes.FIELD(
"f1",
DataTypes.STRING())))))),
TestSpec.test("Cannot find a common super type")
.values(
valueLiteral(LocalTime.of(1, 1)),
valueLiteral(LocalDate.of(1, 1, 1)))
.expectValidationException(
"Types in fromValues(...) must have a common super type. Could not find a common type"
+ " for all rows at column 0.\n"
+ "Could not find a common super type for types: [TIME(0) NOT NULL, DATE NOT NULL]"),
TestSpec.test("Cannot find a common super type in a nested row")
.values(
row(1, row(3, valueLiteral(LocalTime.of(1, 1)))),
row(1, row(4, valueLiteral(LocalTime.of(2, 1)))),
row(2D, row(2.0, valueLiteral(LocalDate.of(1, 1, 1)))))
.expectValidationException(
"Types in fromValues(...) must have a common super type. Could not find a common type"
+ " for all rows at column 1.\n"
+ "Could not find a common super type for types: "
+ "[ROW<`f0` INT NOT NULL, `f1` TIME(0) NOT NULL> NOT NULL,"
+ " ROW<`f0` DOUBLE NOT NULL, `f1` DATE NOT NULL> NOT NULL]"),
TestSpec.test("Cannot cast to the requested type")
.values(
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD("b", DataTypes.BINARY(3))),
row(valueLiteral(1), valueLiteral(LocalTime.of(1, 1))),
row(valueLiteral((short) 2), valueLiteral(LocalDate.of(1, 1, 1))))
.expectValidationException(
"Could not cast the value of the 1 column: [ 01:01 ] of a row: [ 1, 01:01 ]"
+ " to the requested type: BINARY(3)"),
TestSpec.test("Cannot cast to the requested type in a nested row")
.values(
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD(
"b",
DataTypes.ROW(
DataTypes.FIELD(
"c", DataTypes.BINARY(3))))),
row(valueLiteral(1), row(valueLiteral(LocalTime.of(1, 1)))),
row(
valueLiteral((short) 2),
row(valueLiteral(LocalDate.of(1, 1, 1)))))
.expectValidationException(
"Could not cast the value of the 1 column: [ row(01:01) ] of a row: [ 1, row(01:01) ]"
+ " to the requested type: ROW<`c` BINARY(3)>"));
}
/** A simple function that returns a ROW. */
@FunctionHint(output = @DataTypeHint("ROW<f0 INT, f1 STRING>"))
public static
|
ValuesOperationTreeBuilderTest
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeInfoFactoryTest.java
|
{
"start": 12059,
"end": 12208
}
|
class ____<C> extends MyTupleMapperL1<C, Boolean> {
// empty
}
@TypeInfo(MyTupleTypeInfoFactory.class)
public static
|
MyTupleMapperL2
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java
|
{
"start": 2292,
"end": 6595
}
|
class ____ extends AbstractBulkByScrollRequest<DeleteByQueryRequest>
implements
IndicesRequest.Replaceable,
ToXContentObject {
public DeleteByQueryRequest() {
this(new SearchRequest());
}
public DeleteByQueryRequest(String... indices) {
this(new SearchRequest(indices));
}
DeleteByQueryRequest(SearchRequest search) {
this(search, true);
}
public DeleteByQueryRequest(StreamInput in) throws IOException {
super(in);
}
DeleteByQueryRequest(SearchRequest search, boolean setDefaults) {
super(search, setDefaults);
// Delete-By-Query does not require the source
if (setDefaults) {
search.source().fetchSource(false);
}
}
/**
* Set the query for selective delete
*/
public DeleteByQueryRequest setQuery(QueryBuilder query) {
if (query != null) {
getSearchRequest().source().query(query);
}
return this;
}
/**
* Set routing limiting the process to the shards that match that routing value
*/
public DeleteByQueryRequest setRouting(String routing) {
if (routing != null) {
getSearchRequest().routing(routing);
}
return this;
}
/**
* The scroll size to control number of documents processed per batch
*/
public DeleteByQueryRequest setBatchSize(int size) {
getSearchRequest().source().size(size);
return this;
}
/**
* Set the IndicesOptions for controlling unavailable indices
*/
public DeleteByQueryRequest setIndicesOptions(IndicesOptions indicesOptions) {
getSearchRequest().indicesOptions(indicesOptions);
return this;
}
/**
* Gets the batch size for this request
*/
public int getBatchSize() {
return getSearchRequest().source().size();
}
/**
* Gets the routing value used for this request
*/
public String getRouting() {
return getSearchRequest().routing();
}
@Override
protected DeleteByQueryRequest self() {
return this;
}
@Override
public boolean includeDataStreams() {
return true;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException e = super.validate();
if (getSearchRequest().indices() == null || getSearchRequest().indices().length == 0) {
e = addValidationError("use _all if you really want to delete from all existing indexes", e);
}
if (getSearchRequest() == null || getSearchRequest().source() == null) {
e = addValidationError("source is missing", e);
} else if (getSearchRequest().source().query() == null) {
e = addValidationError("query is missing", e);
}
return e;
}
@Override
public DeleteByQueryRequest forSlice(TaskId slicingTask, SearchRequest slice, int totalSlices) {
return doForSlice(new DeleteByQueryRequest(slice, false), slicingTask, totalSlices);
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append("delete-by-query ");
searchToString(b);
return b.toString();
}
// delete by query deletes all documents that match a query. The indices and indices options that affect how
// indices are resolved depend entirely on the inner search request. That's why the following methods delegate to it.
@Override
public DeleteByQueryRequest indices(String... indices) {
assert getSearchRequest() != null;
getSearchRequest().indices(indices);
return this;
}
@Override
public String[] indices() {
assert getSearchRequest() != null;
return getSearchRequest().indices();
}
@Override
public IndicesOptions indicesOptions() {
assert getSearchRequest() != null;
return getSearchRequest().indicesOptions();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
getSearchRequest().source().innerToXContent(builder, params);
builder.endObject();
return builder;
}
}
|
DeleteByQueryRequest
|
java
|
apache__camel
|
components/camel-barcode/src/test/java/org/apache/camel/dataformat/barcode/BarcodeDataFormatTest.java
|
{
"start": 1397,
"end": 3676
}
|
class ____ {
/**
* Test default constructor.
*/
@Test
final void testDefaultConstructor() throws IOException {
try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat()) {
barcodeDataFormat.start();
this.checkParams(BarcodeParameters.IMAGE_TYPE, BarcodeParameters.WIDTH, BarcodeParameters.HEIGHT,
BarcodeParameters.FORMAT, barcodeDataFormat.getParams());
}
}
/**
* Test constructor with barcode format.
*/
@Test
final void testConstructorWithBarcodeFormat() throws IOException {
try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(BarcodeFormat.AZTEC)) {
barcodeDataFormat.start();
this.checkParams(BarcodeParameters.IMAGE_TYPE, BarcodeParameters.WIDTH, BarcodeParameters.HEIGHT,
BarcodeFormat.AZTEC, barcodeDataFormat.getParams());
}
}
/**
* Test constructor with size.
*/
@Test
final void testConstructorWithSize() throws IOException {
try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(200, 250)) {
barcodeDataFormat.start();
this.checkParams(BarcodeParameters.IMAGE_TYPE, 200, 250, BarcodeParameters.FORMAT, barcodeDataFormat.getParams());
}
}
/**
* Test constructor with image type.
*/
@Test
final void testConstructorWithImageType() throws IOException {
try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(BarcodeImageType.JPG)) {
barcodeDataFormat.start();
this.checkParams(BarcodeImageType.JPG, BarcodeParameters.WIDTH, BarcodeParameters.HEIGHT, BarcodeParameters.FORMAT,
barcodeDataFormat.getParams());
}
}
/**
* Test constructor with all.
*/
@Test
final void testConstructorWithAll() throws IOException {
try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(200, 250, BarcodeImageType.JPG, BarcodeFormat.AZTEC)) {
barcodeDataFormat.start();
this.checkParams(BarcodeImageType.JPG, 200, 250, BarcodeFormat.AZTEC, barcodeDataFormat.getParams());
}
}
/**
* Test of optimizeHints method, of
|
BarcodeDataFormatTest
|
java
|
spring-projects__spring-framework
|
spring-orm/src/main/java/org/springframework/orm/jpa/JpaDialect.java
|
{
"start": 2001,
"end": 9392
}
|
interface ____ extends PersistenceExceptionTranslator {
/**
* Begin the given JPA transaction, applying the semantics specified by the
* given Spring transaction definition (in particular, an isolation level
* and a timeout). Called by JpaTransactionManager on transaction begin.
* <p>An implementation can configure the JPA Transaction object and then
* invoke {@code begin}, or invoke a special begin method that takes,
* for example, an isolation level.
* <p>An implementation can apply the read-only flag as flush mode. In that case,
* a transaction data object can be returned that holds the previous flush mode
* (and possibly other data), to be reset in {@code cleanupTransaction}.
* It may also apply the read-only flag and isolation level to the underlying
* JDBC Connection before beginning the transaction.
* <p>Implementations can also use the Spring transaction name, as exposed by the
* passed-in TransactionDefinition, to optimize for specific data access use cases
* (effectively using the current transaction name as use case identifier).
* <p>This method also allows for exposing savepoint capabilities if supported by
* the persistence provider, through returning an Object that implements Spring's
* {@link org.springframework.transaction.SavepointManager} interface.
* {@link JpaTransactionManager} will use this capability if needed.
* @param entityManager the EntityManager to begin a JPA transaction on
* @param definition the Spring transaction definition that defines semantics
* @return an arbitrary object that holds transaction data, if any
* (to be passed into {@link #cleanupTransaction}). May implement the
* {@link org.springframework.transaction.SavepointManager} interface.
* @throws jakarta.persistence.PersistenceException if thrown by JPA methods
* @throws java.sql.SQLException if thrown by JDBC methods
* @throws org.springframework.transaction.TransactionException in case of invalid arguments
* @see #cleanupTransaction
* @see jakarta.persistence.EntityTransaction#begin
* @see org.springframework.jdbc.datasource.DataSourceUtils#prepareConnectionForTransaction
*/
@Nullable Object beginTransaction(EntityManager entityManager, TransactionDefinition definition)
throws PersistenceException, SQLException, TransactionException;
/**
* Prepare a JPA transaction, applying the specified semantics. Called by
* EntityManagerFactoryUtils when enlisting an EntityManager in a JTA transaction
* or a locally joined transaction (for example, after upgrading an unsynchronized
* EntityManager to a synchronized one).
* <p>An implementation can apply the read-only flag as flush mode. In that case,
* a transaction data object can be returned that holds the previous flush mode
* (and possibly other data), to be reset in {@code cleanupTransaction}.
* <p>Implementations can also use the Spring transaction name to optimize for
* specific data access use cases (effectively using the current transaction
* name as use case identifier).
* @param entityManager the EntityManager to begin a JPA transaction on
* @param readOnly whether the transaction is supposed to be read-only
* @param name the name of the transaction (if any)
* @return an arbitrary object that holds transaction data, if any
* (to be passed into cleanupTransaction)
* @throws jakarta.persistence.PersistenceException if thrown by JPA methods
* @see #cleanupTransaction
*/
@Nullable Object prepareTransaction(EntityManager entityManager, boolean readOnly, @Nullable String name)
throws PersistenceException;
/**
* Clean up the transaction via the given transaction data. Called by
* JpaTransactionManager and EntityManagerFactoryUtils on transaction cleanup.
* <p>An implementation can, for example, reset read-only flag and
* isolation level of the underlying JDBC Connection. Furthermore,
* an exposed data access use case can be reset here.
* @param transactionData arbitrary object that holds transaction data, if any
* (as returned by beginTransaction or prepareTransaction)
* @see #beginTransaction
* @see org.springframework.jdbc.datasource.DataSourceUtils#resetConnectionAfterTransaction
*/
void cleanupTransaction(@Nullable Object transactionData);
/**
* Retrieve the JDBC Connection that the given JPA EntityManager uses underneath,
* if accessing a relational database. This method will just get invoked if actually
* needing access to the underlying JDBC Connection, usually within an active JPA
* transaction (for example, by JpaTransactionManager). The returned handle will
* be passed into the {@code releaseJdbcConnection} method when not needed anymore.
* <p>This strategy is necessary as JPA does not provide a standard way to retrieve
* the underlying JDBC Connection (due to the fact that a JPA implementation might not
* work with a relational database at all).
* <p>Implementations are encouraged to return an unwrapped Connection object, i.e.
* the Connection as they got it from the connection pool. This makes it easier for
* application code to get at the underlying native JDBC Connection, like an
* OracleConnection, which is sometimes necessary for LOB handling etc. We assume
* that calling code knows how to properly handle the returned Connection object.
* <p>In a simple case where the returned Connection will be auto-closed with the
* EntityManager or can be released via the Connection object itself, an
* implementation can return a SimpleConnectionHandle that just contains the
* Connection. If some other object is needed in {@code releaseJdbcConnection},
* an implementation should use a special handle that references that other object.
* @param entityManager the current JPA EntityManager
* @param readOnly whether the Connection is only needed for read-only purposes
* @return a handle for the Connection, to be passed into {@code releaseJdbcConnection},
* or {@code null} if no JDBC Connection can be retrieved
* @throws jakarta.persistence.PersistenceException if thrown by JPA methods
* @throws java.sql.SQLException if thrown by JDBC methods
* @see #releaseJdbcConnection
* @see org.springframework.jdbc.datasource.ConnectionHandle#getConnection
* @see org.springframework.jdbc.datasource.SimpleConnectionHandle
* @see JpaTransactionManager#setDataSource
*/
@Nullable ConnectionHandle getJdbcConnection(EntityManager entityManager, boolean readOnly)
throws PersistenceException, SQLException;
/**
* Release the given JDBC Connection, which has originally been retrieved
* via {@code getJdbcConnection}. This should be invoked in any case,
* to allow for proper release of the retrieved Connection handle.
* <p>An implementation might simply do nothing, if the Connection returned
* by {@code getJdbcConnection} will be implicitly closed when the JPA
* transaction completes or when the EntityManager is closed.
* @param conHandle the JDBC Connection handle to release
* @param entityManager the current JPA EntityManager
* @throws jakarta.persistence.PersistenceException if thrown by JPA methods
* @throws java.sql.SQLException if thrown by JDBC methods
* @see #getJdbcConnection
*/
void releaseJdbcConnection(ConnectionHandle conHandle, EntityManager entityManager)
throws PersistenceException, SQLException;
}
|
JpaDialect
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetNodesToLabelsRequest.java
|
{
"start": 906,
"end": 1060
}
|
class ____ {
public static GetNodesToLabelsRequest newInstance() {
return Records.newRecord(GetNodesToLabelsRequest.class);
}
}
|
GetNodesToLabelsRequest
|
java
|
apache__camel
|
components/camel-sjms/src/test/java/org/apache/camel/component/sjms/tx/TransactedConsumersMultipleRouteTest.java
|
{
"start": 1241,
"end": 2205
}
|
class ____ extends TransactedConsumerSupport {
@RegisterExtension
protected static ArtemisService service = ArtemisServiceFactory.createVMService();
/**
* We want to verify that when consuming from a single destination with multiple routes that we are thread safe and
* behave accordingly.
*/
@Test
public void testRoute() throws Exception {
final String destinationName = "sjms:queue:one.consumer.one.route.test";
int routeCount = 2;
int concurrentConsumers = 1;
int messageCount = 20;
int maxAttemptsCount = 10;
int totalRedeliverdFalse = 10;
int totalRedeliveredTrue = 1;
runTest(destinationName, routeCount, messageCount, totalRedeliverdFalse, totalRedeliveredTrue,
concurrentConsumers, maxAttemptsCount);
}
@Override
public String getBrokerUri() {
return service.serviceAddress();
}
}
|
TransactedConsumersMultipleRouteTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/ColumnsBuilder.java
|
{
"start": 1987,
"end": 9554
}
|
class ____ {
private final PropertyHolder propertyHolder;
private final Nullability nullability;
private final MemberDetails property;
private final PropertyData inferredData;
private final EntityBinder entityBinder;
private final MetadataBuildingContext buildingContext;
private AnnotatedColumns columns;
private AnnotatedJoinColumns joinColumns;
public ColumnsBuilder(
PropertyHolder propertyHolder,
Nullability nullability,
MemberDetails property,
PropertyData inferredData,
EntityBinder entityBinder,
MetadataBuildingContext buildingContext) {
this.propertyHolder = propertyHolder;
this.nullability = nullability;
this.property = property;
this.inferredData = inferredData;
this.entityBinder = entityBinder;
this.buildingContext = buildingContext;
}
public AnnotatedColumns getColumns() {
return columns;
}
public AnnotatedJoinColumns getJoinColumns() {
return joinColumns;
}
public ColumnsBuilder extractMetadata() {
columns = null;
joinColumns = buildExplicitJoinColumns( property, inferredData );
if ( property.hasDirectAnnotationUsage( Column.class ) ) {
columns = buildColumnFromAnnotation(
property.getDirectAnnotationUsage( Column.class ),
property.getDirectAnnotationUsage( FractionalSeconds.class ),
nullability,
propertyHolder,
inferredData,
entityBinder.getSecondaryTables(),
buildingContext
);
}
else if ( property.hasDirectAnnotationUsage( Formula.class) ) {
columns = buildFormulaFromAnnotation(
getOverridableAnnotation( property, Formula.class, buildingContext ),
nullability,
propertyHolder,
inferredData,
entityBinder.getSecondaryTables(),
buildingContext
);
}
else if ( property.hasDirectAnnotationUsage( Columns.class ) ) {
columns = buildColumnsFromAnnotations(
property.getDirectAnnotationUsage( Columns.class ).columns(),
null,
nullability,
propertyHolder,
inferredData,
entityBinder.getSecondaryTables(),
buildingContext
);
}
//set default values if needed
if ( joinColumns == null
&& ( property.hasDirectAnnotationUsage( ManyToOne.class )
|| property.hasDirectAnnotationUsage( OneToOne.class ) ) ) {
joinColumns = buildDefaultJoinColumnsForToOne( property, inferredData );
}
else if ( joinColumns == null
&& ( property.hasDirectAnnotationUsage( OneToMany.class )
|| property.hasDirectAnnotationUsage( ElementCollection.class ) ) ) {
final var oneToMany = property.getDirectAnnotationUsage( OneToMany.class );
joinColumns = AnnotatedJoinColumns.buildJoinColumns(
null,
oneToMany == null ? null : nullIfEmpty( oneToMany.mappedBy() ),
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
else if ( joinColumns == null
&& property.hasDirectAnnotationUsage( Any.class ) ) {
throw new AnnotationException( "Property '" + getPath( propertyHolder, inferredData )
+ "' is annotated '@Any' and must declare at least one '@JoinColumn'" );
}
if ( columns == null && !property.hasDirectAnnotationUsage( ManyToMany.class ) ) {
//useful for collection of embedded elements
columns = buildColumnFromNoAnnotation(
property.getDirectAnnotationUsage( FractionalSeconds.class ),
nullability,
propertyHolder,
inferredData,
entityBinder.getSecondaryTables(),
buildingContext
);
}
if ( nullability == Nullability.FORCED_NOT_NULL ) {
//force columns to not null
for ( AnnotatedColumn column : columns.getColumns() ) {
column.forceNotNull();
}
}
return this;
}
private AnnotatedJoinColumns buildDefaultJoinColumnsForToOne(
MemberDetails property,
PropertyData inferredData) {
final var joinTable = propertyHolder.getJoinTable( property );
if ( joinTable != null ) {
return AnnotatedJoinColumns.buildJoinColumns(
joinTable.inverseJoinColumns(),
null,
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
else {
final var oneToOne = property.getDirectAnnotationUsage( OneToOne.class );
return AnnotatedJoinColumns.buildJoinColumns(
null,
oneToOne == null ? null : nullIfEmpty( oneToOne.mappedBy() ),
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
}
private AnnotatedJoinColumns buildExplicitJoinColumns(MemberDetails property, PropertyData inferredData) {
// process @JoinColumns before @Columns to handle collection of entities properly
final var joinColumns = getJoinColumnAnnotations( property );
if ( joinColumns != null ) {
return AnnotatedJoinColumns.buildJoinColumns(
joinColumns,
null,
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
final var joinColumnOrFormulas = joinColumnOrFormulaAnnotations( property );
if ( joinColumnOrFormulas != null ) {
return AnnotatedJoinColumns.buildJoinColumnsOrFormulas(
joinColumnOrFormulas,
null,
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
if ( property.hasDirectAnnotationUsage( JoinFormula.class ) ) {
return AnnotatedJoinColumns.buildJoinColumnsWithFormula(
getOverridableAnnotation( property, JoinFormula.class, buildingContext ),
entityBinder.getSecondaryTables(),
propertyHolder,
inferredData,
buildingContext
);
}
return null;
}
private JoinColumnOrFormula[] joinColumnOrFormulaAnnotations(MemberDetails property) {
final var annotations = property.getRepeatedAnnotationUsages(
HibernateAnnotations.JOIN_COLUMN_OR_FORMULA,
buildingContext.getBootstrapContext().getModelsContext()
);
return isNotEmpty( annotations ) ? annotations : null;
}
private JoinColumn[] getJoinColumnAnnotations(MemberDetails property) {
final var modelsContext = buildingContext.getBootstrapContext().getModelsContext();
final var joinColumns = property.getRepeatedAnnotationUsages( JpaAnnotations.JOIN_COLUMN, modelsContext );
if ( isNotEmpty( joinColumns ) ) {
return joinColumns;
}
else if ( property.hasDirectAnnotationUsage( MapsId.class ) ) {
// inelegant solution to HHH-16463, let the PrimaryKeyJoinColumn
// masquerade as a regular JoinColumn (when a @OneToOne maps to
// the primary key of the child table, it's more elegant and more
// spec-compliant to map the association with @PrimaryKeyJoinColumn)
final var primaryKeyJoinColumns =
property.getRepeatedAnnotationUsages( JpaAnnotations.PRIMARY_KEY_JOIN_COLUMN, modelsContext );
if ( isNotEmpty( primaryKeyJoinColumns ) ) {
final var adapters = new JoinColumn[primaryKeyJoinColumns.length];
for ( int i = 0; i < primaryKeyJoinColumns.length; i++ ) {
adapters[i] = JoinColumnJpaAnnotation.toJoinColumn( primaryKeyJoinColumns[i], modelsContext );
}
return adapters;
}
else {
return null;
}
}
else {
return null;
}
}
/**
* Useful to override a column either by {@code @MapsId} or by {@code @IdClass}
*/
AnnotatedColumns overrideColumnFromMapperOrMapsIdProperty(PropertyData override) {
if ( override != null ) {
final var memberDetails = override.getAttributeMember();
final var joinColumns = buildExplicitJoinColumns( memberDetails, override );
return joinColumns == null
? buildDefaultJoinColumnsForToOne( memberDetails, override )
: joinColumns;
}
else {
return columns;
}
}
}
|
ColumnsBuilder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/PartitionResourcesInfo.java
|
{
"start": 1034,
"end": 1169
}
|
class ____ queue/user resource usage info for a given partition
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public
|
represents
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/boot/ServiceRegistryTestingImpl.java
|
{
"start": 1001,
"end": 2994
}
|
class ____
extends StandardServiceRegistryImpl
implements ServiceRegistryImplementor {
private ServiceRegistryTestingImpl(
boolean autoCloseRegistry,
BootstrapServiceRegistry bootstrapServiceRegistry,
Map<String, Object> configurationValues) {
super( autoCloseRegistry, bootstrapServiceRegistry, configurationValues );
}
public static ServiceRegistryTestingImpl forUnitTesting() {
return ServiceRegistryTestingImpl.create(
true,
new BootstrapServiceRegistryBuilder().build(),
StandardServiceInitiators.LIST,
Arrays.asList(
dialectFactoryService(),
connectionProviderService()
),
PropertiesHelper.map( Environment.getProperties() )
);
}
public static ServiceRegistryTestingImpl forUnitTesting(Map<String,Object> settings) {
return ServiceRegistryTestingImpl.create(
true,
new BootstrapServiceRegistryBuilder().build(),
StandardServiceInitiators.LIST,
Arrays.asList(
dialectFactoryService(),
connectionProviderService()
),
settings
);
}
private static ProvidedService<DialectFactory> dialectFactoryService() {
return new ProvidedService<>( DialectFactory.class, new DialectFactoryTestingImpl() );
}
private static ProvidedService<ConnectionProvider> connectionProviderService() {
return new ProvidedService<>(
ConnectionProvider.class,
ConnectionProviderBuilder.buildConnectionProvider( true )
);
}
public static ServiceRegistryTestingImpl create(
boolean autoCloseRegistry,
BootstrapServiceRegistry bootstrapServiceRegistry,
List<StandardServiceInitiator<?>> serviceInitiators,
List<ProvidedService<?>> providedServices,
Map<String,Object> configurationValues) {
ServiceRegistryTestingImpl instance = new ServiceRegistryTestingImpl( autoCloseRegistry, bootstrapServiceRegistry, configurationValues );
instance.initialize();
instance.applyServiceRegistrations( serviceInitiators, providedServices );
return instance;
}
}
|
ServiceRegistryTestingImpl
|
java
|
apache__logging-log4j2
|
log4j-api-test/src/test/java/org/apache/logging/log4j/message/ObjectMessageTest.java
|
{
"start": 3272,
"end": 4065
}
|
class ____ {
@Override
public boolean equals(final Object other) {
return other instanceof NonSerializable; // a very lenient equals()
}
}
return Stream.of(
"World",
new NonSerializable(),
new BigDecimal("123.456"),
// LOG4J2-3680
new RuntimeException(),
null);
}
@ParameterizedTest
@MethodSource
void testSerializable(final Object arg) {
final Message expected = new ObjectMessage(arg);
final Message actual = SerialUtil.deserialize(SerialUtil.serialize(expected));
assertThat(actual).isInstanceOf(ObjectMessage.class);
assertThat(actual).isEqualTo(expected);
}
}
|
NonSerializable
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableFlatMapSingle.java
|
{
"start": 1920,
"end": 7876
}
|
class ____<T, R>
extends AtomicInteger
implements Observer<T>, Disposable {
private static final long serialVersionUID = 8600231336733376951L;
final Observer<? super R> downstream;
final boolean delayErrors;
final CompositeDisposable set;
final AtomicInteger active;
final AtomicThrowable errors;
final Function<? super T, ? extends SingleSource<? extends R>> mapper;
final AtomicReference<SpscLinkedArrayQueue<R>> queue;
Disposable upstream;
volatile boolean cancelled;
FlatMapSingleObserver(Observer<? super R> actual,
Function<? super T, ? extends SingleSource<? extends R>> mapper, boolean delayErrors) {
this.downstream = actual;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.set = new CompositeDisposable();
this.errors = new AtomicThrowable();
this.active = new AtomicInteger(1);
this.queue = new AtomicReference<>();
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onNext(T t) {
SingleSource<? extends R> ms;
try {
ms = Objects.requireNonNull(mapper.apply(t), "The mapper returned a null SingleSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
upstream.dispose();
onError(ex);
return;
}
active.getAndIncrement();
InnerObserver inner = new InnerObserver();
if (!cancelled && set.add(inner)) {
ms.subscribe(inner);
}
}
@Override
public void onError(Throwable t) {
active.decrementAndGet();
if (errors.tryAddThrowableOrReport(t)) {
if (!delayErrors) {
set.dispose();
}
drain();
}
}
@Override
public void onComplete() {
active.decrementAndGet();
drain();
}
@Override
public void dispose() {
cancelled = true;
upstream.dispose();
set.dispose();
errors.tryTerminateAndReport();
}
@Override
public boolean isDisposed() {
return cancelled;
}
void innerSuccess(InnerObserver inner, R value) {
set.delete(inner);
if (get() == 0 && compareAndSet(0, 1)) {
downstream.onNext(value);
boolean d = active.decrementAndGet() == 0;
SpscLinkedArrayQueue<R> q = queue.get();
if (d && (q == null || q.isEmpty())) {
errors.tryTerminateConsumer(downstream);
return;
}
if (decrementAndGet() == 0) {
return;
}
} else {
SpscLinkedArrayQueue<R> q = getOrCreateQueue();
synchronized (q) {
q.offer(value);
}
active.decrementAndGet();
if (getAndIncrement() != 0) {
return;
}
}
drainLoop();
}
SpscLinkedArrayQueue<R> getOrCreateQueue() {
SpscLinkedArrayQueue<R> current = queue.get();
if (current != null) {
return current;
}
current = new SpscLinkedArrayQueue<>(Observable.bufferSize());
if (queue.compareAndSet(null, current)) {
return current;
}
return queue.get();
}
void innerError(InnerObserver inner, Throwable e) {
set.delete(inner);
if (errors.tryAddThrowableOrReport(e)) {
if (!delayErrors) {
upstream.dispose();
set.dispose();
}
active.decrementAndGet();
drain();
}
}
void drain() {
if (getAndIncrement() == 0) {
drainLoop();
}
}
void clear() {
SpscLinkedArrayQueue<R> q = queue.get();
if (q != null) {
q.clear();
}
}
void drainLoop() {
int missed = 1;
Observer<? super R> a = downstream;
AtomicInteger n = active;
AtomicReference<SpscLinkedArrayQueue<R>> qr = queue;
for (;;) {
for (;;) {
if (cancelled) {
clear();
return;
}
if (!delayErrors) {
Throwable ex = errors.get();
if (ex != null) {
clear();
errors.tryTerminateConsumer(a);
return;
}
}
boolean d = n.get() == 0;
SpscLinkedArrayQueue<R> q = qr.get();
R v = q != null ? q.poll() : null;
boolean empty = v == null;
if (d && empty) {
errors.tryTerminateConsumer(downstream);
return;
}
if (empty) {
break;
}
a.onNext(v);
}
missed = addAndGet(-missed);
if (missed == 0) {
break;
}
}
}
final
|
FlatMapSingleObserver
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoAnnotationCompilationTest.java
|
{
"start": 13918,
"end": 14944
}
|
class ____ {",
" @AutoAnnotation",
" public static MyAnnotation newMyAnnotation(",
" List<Integer> value, Set<MyEnum> enums) {",
" return new AutoAnnotation_AnnotationFactory_newMyAnnotation(value, enums);",
" }",
"}");
JavaFileObject expectedOutput =
JavaFileObjects.forSourceLines(
"com.example.factories.AutoAnnotation_AnnotationFactory_newMyAnnotation",
"package com.example.factories;",
"",
"import com.example.annotations.MyAnnotation;",
"import com.example.enums.MyEnum;",
"import java.io.Serializable;",
"import java.util.Arrays;",
"import java.util.Collection;",
"import java.util.List;",
"import java.util.Set;",
GeneratedImport.importGeneratedAnnotationType(),
"",
"@Generated(\"" + AutoAnnotationProcessor.class.getName() + "\")",
"final
|
AnnotationFactory
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/fielddata/IndexNumericFieldData.java
|
{
"start": 2019,
"end": 2152
}
|
class ____ implements IndexFieldData<LeafNumericFieldData> {
/**
* The type of number.
*/
public
|
IndexNumericFieldData
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1300/Issue1330_float.java
|
{
"start": 1332,
"end": 1382
}
|
class ____ {
public float value;
}
}
|
Model
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/subscribers/ResourceSubscriberTest.java
|
{
"start": 1167,
"end": 5432
}
|
class ____<T> extends ResourceSubscriber<T> {
final List<T> values = new ArrayList<>();
final List<Throwable> errors = new ArrayList<>();
int complete;
int start;
@Override
protected void onStart() {
super.onStart();
start++;
}
@Override
public void onNext(T value) {
values.add(value);
}
@Override
public void onError(Throwable e) {
errors.add(e);
dispose();
}
@Override
public void onComplete() {
complete++;
dispose();
}
void requestMore(long n) {
request(n);
}
}
@Test(expected = NullPointerException.class)
public void nullResource() {
TestResourceSubscriber<Integer> ro = new TestResourceSubscriber<>();
ro.add(null);
}
@Test
public void addResources() {
TestResourceSubscriber<Integer> ro = new TestResourceSubscriber<>();
assertFalse(ro.isDisposed());
Disposable d = Disposable.empty();
ro.add(d);
assertFalse(d.isDisposed());
ro.dispose();
assertTrue(ro.isDisposed());
assertTrue(d.isDisposed());
ro.dispose();
assertTrue(ro.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void onCompleteCleansUp() {
TestResourceSubscriber<Integer> ro = new TestResourceSubscriber<>();
assertFalse(ro.isDisposed());
Disposable d = Disposable.empty();
ro.add(d);
assertFalse(d.isDisposed());
ro.onComplete();
assertTrue(ro.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void onErrorCleansUp() {
TestResourceSubscriber<Integer> ro = new TestResourceSubscriber<>();
assertFalse(ro.isDisposed());
Disposable d = Disposable.empty();
ro.add(d);
assertFalse(d.isDisposed());
ro.onError(new TestException());
assertTrue(ro.isDisposed());
assertTrue(d.isDisposed());
}
@Test
public void normal() {
TestResourceSubscriber<Integer> tc = new TestResourceSubscriber<>();
assertFalse(tc.isDisposed());
assertEquals(0, tc.start);
assertTrue(tc.values.isEmpty());
assertTrue(tc.errors.isEmpty());
Flowable.just(1).subscribe(tc);
assertTrue(tc.isDisposed());
assertEquals(1, tc.start);
assertEquals(1, tc.values.get(0).intValue());
assertTrue(tc.errors.isEmpty());
}
@Test
public void startOnce() {
List<Throwable> error = TestHelper.trackPluginErrors();
try {
TestResourceSubscriber<Integer> tc = new TestResourceSubscriber<>();
tc.onSubscribe(new BooleanSubscription());
BooleanSubscription bs = new BooleanSubscription();
tc.onSubscribe(bs);
assertTrue(bs.isCancelled());
assertEquals(1, tc.start);
TestHelper.assertError(error, 0, IllegalStateException.class, EndConsumerHelper.composeMessage(tc.getClass().getName()));
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void dispose() {
TestResourceSubscriber<Integer> tc = new TestResourceSubscriber<>();
tc.dispose();
BooleanSubscription bs = new BooleanSubscription();
tc.onSubscribe(bs);
assertTrue(bs.isCancelled());
assertEquals(0, tc.start);
}
@Test
public void request() {
TestResourceSubscriber<Integer> tc = new TestResourceSubscriber<Integer>() {
@Override
protected void onStart() {
start++;
}
};
Flowable.just(1).subscribe(tc);
assertEquals(1, tc.start);
assertEquals(Collections.emptyList(), tc.values);
assertTrue(tc.errors.isEmpty());
assertEquals(0, tc.complete);
tc.requestMore(1);
assertEquals(1, tc.start);
assertEquals(1, tc.values.get(0).intValue());
assertTrue(tc.errors.isEmpty());
assertEquals(1, tc.complete);
}
static final
|
TestResourceSubscriber
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/ssl/SslPropertiesBundleRegistrar.java
|
{
"start": 1321,
"end": 4839
}
|
class ____ implements SslBundleRegistrar {
private final SslProperties.Bundles properties;
private final FileWatcher fileWatcher;
private final ResourceLoader resourceLoader;
SslPropertiesBundleRegistrar(SslProperties properties, FileWatcher fileWatcher, ResourceLoader resourceLoader) {
this.properties = properties.getBundle();
this.fileWatcher = fileWatcher;
this.resourceLoader = resourceLoader;
}
@Override
public void registerBundles(SslBundleRegistry registry) {
registerBundles(registry, this.properties.getPem(), PropertiesSslBundle::get, this::watchedPemPaths);
registerBundles(registry, this.properties.getJks(), PropertiesSslBundle::get, this::watchedJksPaths);
}
private <P extends SslBundleProperties> void registerBundles(SslBundleRegistry registry, Map<String, P> properties,
BiFunction<P, ResourceLoader, SslBundle> bundleFactory, Function<Bundle<P>, Set<Path>> watchedPaths) {
properties.forEach((bundleName, bundleProperties) -> {
Supplier<SslBundle> bundleSupplier = () -> bundleFactory.apply(bundleProperties, this.resourceLoader);
try {
registry.registerBundle(bundleName, bundleSupplier.get());
if (bundleProperties.isReloadOnUpdate()) {
Supplier<Set<Path>> pathsSupplier = () -> watchedPaths
.apply(new Bundle<>(bundleName, bundleProperties));
watchForUpdates(registry, bundleName, pathsSupplier, bundleSupplier);
}
}
catch (IllegalStateException ex) {
throw new IllegalStateException("Unable to register SSL bundle '%s'".formatted(bundleName), ex);
}
});
}
private void watchForUpdates(SslBundleRegistry registry, String bundleName, Supplier<Set<Path>> pathsSupplier,
Supplier<SslBundle> bundleSupplier) {
try {
this.fileWatcher.watch(pathsSupplier.get(), () -> registry.updateBundle(bundleName, bundleSupplier.get()));
}
catch (RuntimeException ex) {
throw new IllegalStateException("Unable to watch for reload on update", ex);
}
}
private Set<Path> watchedJksPaths(Bundle<JksSslBundleProperties> bundle) {
List<BundleContentProperty> watched = new ArrayList<>();
watched.add(new BundleContentProperty("keystore.location", bundle.properties().getKeystore().getLocation()));
watched
.add(new BundleContentProperty("truststore.location", bundle.properties().getTruststore().getLocation()));
return watchedPaths(bundle.name(), watched);
}
private Set<Path> watchedPemPaths(Bundle<PemSslBundleProperties> bundle) {
List<BundleContentProperty> watched = new ArrayList<>();
watched
.add(new BundleContentProperty("keystore.private-key", bundle.properties().getKeystore().getPrivateKey()));
watched
.add(new BundleContentProperty("keystore.certificate", bundle.properties().getKeystore().getCertificate()));
watched.add(new BundleContentProperty("truststore.private-key",
bundle.properties().getTruststore().getPrivateKey()));
watched.add(new BundleContentProperty("truststore.certificate",
bundle.properties().getTruststore().getCertificate()));
return watchedPaths(bundle.name(), watched);
}
private Set<Path> watchedPaths(String bundleName, List<BundleContentProperty> properties) {
try {
return properties.stream()
.filter(BundleContentProperty::hasValue)
.map((content) -> content.toWatchPath(this.resourceLoader))
.collect(Collectors.toSet());
}
catch (BundleContentNotWatchableException ex) {
throw ex.withBundleName(bundleName);
}
}
private record Bundle<P>(String name, P properties) {
}
}
|
SslPropertiesBundleRegistrar
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/buffer/LocalBufferPoolTest.java
|
{
"start": 37379,
"end": 38172
}
|
class ____ implements Callable<Boolean> {
private final BufferProvider bufferProvider;
private final int numBuffersToRequest;
private BufferRequesterTask(BufferProvider bufferProvider, int numBuffersToRequest) {
this.bufferProvider = bufferProvider;
this.numBuffersToRequest = numBuffersToRequest;
}
@Override
public Boolean call() {
try {
for (int i = 0; i < numBuffersToRequest; i++) {
Buffer buffer = checkNotNull(bufferProvider.requestBuffer());
buffer.recycleBuffer();
}
} catch (Throwable t) {
return false;
}
return true;
}
}
private static
|
BufferRequesterTask
|
java
|
apache__kafka
|
group-coordinator/src/main/java/org/apache/kafka/coordinator/group/classic/ClassicGroupState.java
|
{
"start": 1059,
"end": 5961
}
|
enum ____ {
/**
* Group has no more members, but lingers until all offsets have expired. This state
* also represents groups which use Kafka only for offset commits and have no members.
*
* action: respond normally to join group from new members
* respond to sync group with UNKNOWN_MEMBER_ID
* respond to heartbeat with UNKNOWN_MEMBER_ID
* respond to leave group with UNKNOWN_MEMBER_ID
* respond to offset commit with UNKNOWN_MEMBER_ID
* allow offset fetch requests
* transition: last offsets removed in periodic expiration task => DEAD
* join group from a new member => PREPARING_REBALANCE
* group is removed by partition emigration => DEAD
* group is removed by expiration => DEAD
*/
EMPTY("Empty"),
/**
* Group is preparing to rebalance.
*
* action: respond to heartbeats with REBALANCE_IN_PROGRESS
* respond to sync group with REBALANCE_IN_PROGRESS
* remove member on leave group request
* park join group requests from new or existing members until all expected members have joined
* allow offset commits from previous generation
* allow offset fetch requests
* transition: some members have joined by the timeout => COMPLETING_REBALANCE
* all members have left the group => EMPTY
* group is removed by partition emigration => DEAD
*/
PREPARING_REBALANCE("PreparingRebalance"),
/**
* Group is awaiting state assignment from the leader.
*
* action: respond to heartbeats with REBALANCE_IN_PROGRESS
* respond to offset commits with REBALANCE_IN_PROGRESS
* park sync group requests from followers until transition to STABLE
* allow offset fetch requests
* transition: sync group with state assignment received from leader => STABLE
* join group from new member or existing member with updated metadata => PREPARING_REBALANCE
* leave group from existing member => PREPARING_REBALANCE
* member failure detected => PREPARING_REBALANCE
* group is removed by partition emigration => DEAD
*/
COMPLETING_REBALANCE("CompletingRebalance"),
/**
* Group is stable.
*
* action: respond to member heartbeats normally
* respond to sync group from any member with current assignment
* respond to join group from followers with matching metadata with current group metadata
* allow offset commits from member of current generation
* allow offset fetch requests
* transition: member failure detected via heartbeat => PREPARING_REBALANCE
* leave group from existing member => PREPARING_REBALANCE
* leader join-group received => PREPARING_REBALANCE
* follower join-group with new metadata => PREPARING_REBALANCE
* group is removed by partition emigration => DEAD
*/
STABLE("Stable"),
/**
* Group has no more members and its metadata is being removed.
*
* action: respond to join group with UNKNOWN_MEMBER_ID
* respond to sync group with UNKNOWN_MEMBER_ID
* respond to heartbeat with UNKNOWN_MEMBER_ID
* respond to leave group with UNKNOWN_MEMBER_ID
* respond to offset commit with UNKNOWN_MEMBER_ID
* allow offset fetch requests
* transition: DEAD is a final state before group metadata is cleaned up, so there are no transitions
*/
DEAD("Dead");
private final String name;
private final String lowerCaseName;
private Set<ClassicGroupState> validPreviousStates;
static {
EMPTY.addValidPreviousStates(PREPARING_REBALANCE);
PREPARING_REBALANCE.addValidPreviousStates(STABLE, COMPLETING_REBALANCE, EMPTY);
COMPLETING_REBALANCE.addValidPreviousStates(PREPARING_REBALANCE);
STABLE.addValidPreviousStates(COMPLETING_REBALANCE);
DEAD.addValidPreviousStates(STABLE, PREPARING_REBALANCE, COMPLETING_REBALANCE, EMPTY, DEAD);
}
ClassicGroupState(String name) {
this.name = name;
this.lowerCaseName = name.toLowerCase(Locale.ROOT);
}
@Override
public String toString() {
return name;
}
public String toLowerCaseString() {
return lowerCaseName;
}
private void addValidPreviousStates(ClassicGroupState... validPreviousStates) {
this.validPreviousStates = Set.of(validPreviousStates);
}
/**
* @return valid previous states a group must be in to transition to this state.
*/
public Set<ClassicGroupState> validPreviousStates() {
return this.validPreviousStates;
}
}
|
ClassicGroupState
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJob.java
|
{
"start": 2344,
"end": 17008
}
|
class ____ implements Callable<Job>, Delayed {
// Gridmix job name format is GRIDMIX<6 digit sequence number>
public static final String JOB_NAME_PREFIX = "GRIDMIX";
public static final Logger LOG = LoggerFactory.getLogger(GridmixJob.class);
private static final ThreadLocal<Formatter> nameFormat =
new ThreadLocal<Formatter>() {
@Override
protected Formatter initialValue() {
final StringBuilder sb =
new StringBuilder(JOB_NAME_PREFIX.length() + 6);
sb.append(JOB_NAME_PREFIX);
return new Formatter(sb);
}
};
private boolean submitted;
protected final int seq;
protected final Path outdir;
protected final Job job;
protected final JobStory jobdesc;
protected final UserGroupInformation ugi;
protected final long submissionTimeNanos;
private static final ConcurrentHashMap<Integer,List<InputSplit>> descCache =
new ConcurrentHashMap<Integer,List<InputSplit>>();
protected static final String GRIDMIX_JOB_SEQ = "gridmix.job.seq";
protected static final String GRIDMIX_USE_QUEUE_IN_TRACE =
"gridmix.job-submission.use-queue-in-trace";
protected static final String GRIDMIX_DEFAULT_QUEUE =
"gridmix.job-submission.default-queue";
// configuration key to enable/disable High-Ram feature emulation
static final String GRIDMIX_HIGHRAM_EMULATION_ENABLE =
"gridmix.highram-emulation.enable";
// configuration key to enable/disable task jvm options
static final String GRIDMIX_TASK_JVM_OPTIONS_ENABLE =
"gridmix.task.jvm-options.enable";
private static void setJobQueue(Job job, String queue) {
if (queue != null) {
job.getConfiguration().set(MRJobConfig.QUEUE_NAME, queue);
}
}
public GridmixJob(final Configuration conf, long submissionMillis,
final JobStory jobdesc, Path outRoot, UserGroupInformation ugi,
final int seq) throws IOException {
this.ugi = ugi;
this.jobdesc = jobdesc;
this.seq = seq;
((StringBuilder)nameFormat.get().out()).setLength(JOB_NAME_PREFIX.length());
try {
job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
public Job run() throws IOException {
String jobId = null == jobdesc.getJobID()
? "<unknown>"
: jobdesc.getJobID().toString();
Job ret = Job.getInstance(conf, nameFormat.get().format("%06d", seq)
.toString());
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
ret.getConfiguration().set(Gridmix.ORIGINAL_JOB_ID, jobId);
ret.getConfiguration().set(Gridmix.ORIGINAL_JOB_NAME,
jobdesc.getName());
if (conf.getBoolean(GRIDMIX_USE_QUEUE_IN_TRACE, false)) {
setJobQueue(ret, jobdesc.getQueueName());
} else {
setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));
}
// check if the job can emulate compression
if (canEmulateCompression()) {
// set the compression related configs if compression emulation is
// enabled
if (CompressionEmulationUtil.isCompressionEmulationEnabled(conf)) {
CompressionEmulationUtil.configureCompressionEmulation(
jobdesc.getJobConf(), ret.getConfiguration());
}
}
// configure high ram properties if enabled
if (conf.getBoolean(GRIDMIX_HIGHRAM_EMULATION_ENABLE, true)) {
configureHighRamProperties(jobdesc.getJobConf(),
ret.getConfiguration());
}
// configure task jvm options if enabled
// this knob can be turned off if there is a mismatch between the
// target (simulation) cluster and the original cluster. Such a
// mismatch can result in job failures (due to memory issues) on the
// target (simulated) cluster.
//
// TODO If configured, scale the original task's JVM (heap related)
// options to suit the target (simulation) cluster
if (conf.getBoolean(GRIDMIX_TASK_JVM_OPTIONS_ENABLE, true)) {
configureTaskJVMOptions(jobdesc.getJobConf(),
ret.getConfiguration());
}
return ret;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
submissionTimeNanos = TimeUnit.NANOSECONDS.convert(
submissionMillis, TimeUnit.MILLISECONDS);
outdir = new Path(outRoot, "" + seq);
}
@SuppressWarnings("deprecation")
protected static void configureTaskJVMOptions(Configuration originalJobConf,
Configuration simulatedJobConf){
// Get the heap related java opts used for the original job and set the
// same for the simulated job.
// set task task heap options
configureTaskJVMMaxHeapOptions(originalJobConf, simulatedJobConf,
JobConf.MAPRED_TASK_JAVA_OPTS);
// set map task heap options
configureTaskJVMMaxHeapOptions(originalJobConf, simulatedJobConf,
MRJobConfig.MAP_JAVA_OPTS);
// set reduce task heap options
configureTaskJVMMaxHeapOptions(originalJobConf, simulatedJobConf,
MRJobConfig.REDUCE_JAVA_OPTS);
}
// Configures the task's max heap options using the specified key
private static void configureTaskJVMMaxHeapOptions(Configuration srcConf,
Configuration destConf,
String key) {
String srcHeapOpts = srcConf.get(key);
if (srcHeapOpts != null) {
List<String> srcMaxOptsList = new ArrayList<String>();
// extract the max heap options and ignore the rest
extractMaxHeapOpts(srcHeapOpts, srcMaxOptsList,
new ArrayList<String>());
if (srcMaxOptsList.size() > 0) {
List<String> destOtherOptsList = new ArrayList<String>();
// extract the other heap options and ignore the max options in the
// destination configuration
String destHeapOpts = destConf.get(key);
if (destHeapOpts != null) {
extractMaxHeapOpts(destHeapOpts, new ArrayList<String>(),
destOtherOptsList);
}
// the source configuration might have some task level max heap opts set
// remove these opts from the destination configuration and replace
// with the options set in the original configuration
StringBuilder newHeapOpts = new StringBuilder();
for (String otherOpt : destOtherOptsList) {
newHeapOpts.append(otherOpt).append(" ");
}
for (String opts : srcMaxOptsList) {
newHeapOpts.append(opts).append(" ");
}
// set the final heap opts
destConf.set(key, newHeapOpts.toString().trim());
}
}
}
// Scales the desired job-level configuration parameter. This API makes sure
// that the ratio of the job level configuration parameter to the cluster
// level configuration parameter is maintained in the simulated run. Hence
// the values are scaled from the original cluster's configuration to the
// simulated cluster's configuration for higher emulation accuracy.
// This kind of scaling is useful for memory parameters.
private static void scaleConfigParameter(Configuration sourceConf,
Configuration destConf, String clusterValueKey,
String jobValueKey, long defaultValue) {
long simulatedClusterDefaultValue =
destConf.getLong(clusterValueKey, defaultValue);
long originalClusterDefaultValue =
sourceConf.getLong(clusterValueKey, defaultValue);
long originalJobValue =
sourceConf.getLong(jobValueKey, defaultValue);
double scaleFactor = (double)originalJobValue/originalClusterDefaultValue;
long simulatedJobValue = (long)(scaleFactor * simulatedClusterDefaultValue);
if (LOG.isDebugEnabled()) {
LOG.debug("For the job configuration parameter '" + jobValueKey
+ "' and the cluster configuration parameter '"
+ clusterValueKey + "', the original job's configuration value"
+ " is scaled from '" + originalJobValue + "' to '"
+ simulatedJobValue + "' using the default (unit) value of "
+ "'" + originalClusterDefaultValue + "' for the original "
+ " cluster and '" + simulatedClusterDefaultValue + "' for the"
+ " simulated cluster.");
}
destConf.setLong(jobValueKey, simulatedJobValue);
}
// Checks if the scaling of original job's memory parameter value is
// valid
@SuppressWarnings("deprecation")
private static boolean checkMemoryUpperLimits(String jobKey, String limitKey,
Configuration conf,
boolean convertLimitToMB) {
if (conf.get(limitKey) != null) {
long limit = conf.getLong(limitKey, JobConf.DISABLED_MEMORY_LIMIT);
// scale only if the max memory limit is set.
if (limit >= 0) {
if (convertLimitToMB) {
limit /= (1024 * 1024); //Converting to MB
}
long scaledConfigValue =
conf.getLong(jobKey, JobConf.DISABLED_MEMORY_LIMIT);
// check now
if (scaledConfigValue > limit) {
throw new RuntimeException("Simulated job's configuration"
+ " parameter '" + jobKey + "' got scaled to a value '"
+ scaledConfigValue + "' which exceeds the upper limit of '"
+ limit + "' defined for the simulated cluster by the key '"
+ limitKey + "'. To disable High-Ram feature emulation, set '"
+ GRIDMIX_HIGHRAM_EMULATION_ENABLE + "' to 'false'.");
}
return true;
}
}
return false;
}
// Check if the parameter scaling does not exceed the cluster limits.
@SuppressWarnings("deprecation")
private static void validateTaskMemoryLimits(Configuration conf,
String jobKey, String clusterMaxKey) {
if (!checkMemoryUpperLimits(jobKey,
JobConf.UPPER_LIMIT_ON_TASK_VMEM_PROPERTY, conf, true)) {
checkMemoryUpperLimits(jobKey, clusterMaxKey, conf, false);
}
}
/**
* Sets the high ram job properties in the simulated job's configuration.
*/
@SuppressWarnings("deprecation")
static void configureHighRamProperties(Configuration sourceConf,
Configuration destConf) {
// set the memory per map task
scaleConfigParameter(sourceConf, destConf,
MRConfig.MAPMEMORY_MB, MRJobConfig.MAP_MEMORY_MB,
MRJobConfig.DEFAULT_MAP_MEMORY_MB);
// validate and fail early
validateTaskMemoryLimits(destConf, MRJobConfig.MAP_MEMORY_MB,
JTConfig.JT_MAX_MAPMEMORY_MB);
// set the memory per reduce task
scaleConfigParameter(sourceConf, destConf,
MRConfig.REDUCEMEMORY_MB, MRJobConfig.REDUCE_MEMORY_MB,
MRJobConfig.DEFAULT_REDUCE_MEMORY_MB);
// validate and fail early
validateTaskMemoryLimits(destConf, MRJobConfig.REDUCE_MEMORY_MB,
JTConfig.JT_MAX_REDUCEMEMORY_MB);
}
/**
* Indicates whether this {@link GridmixJob} supports compression emulation.
*/
protected abstract boolean canEmulateCompression();
protected GridmixJob(final Configuration conf, long submissionMillis,
final String name) throws IOException {
submissionTimeNanos = TimeUnit.NANOSECONDS.convert(
submissionMillis, TimeUnit.MILLISECONDS);
jobdesc = null;
outdir = null;
seq = -1;
ugi = UserGroupInformation.getCurrentUser();
try {
job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
public Job run() throws IOException {
Job ret = Job.getInstance(conf, name);
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));
return ret;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
}
public UserGroupInformation getUgi() {
return ugi;
}
public String toString() {
return job.getJobName();
}
public long getDelay(TimeUnit unit) {
return unit.convert(submissionTimeNanos - System.nanoTime(),
TimeUnit.NANOSECONDS);
}
@Override
public int compareTo(Delayed other) {
if (this == other) {
return 0;
}
if (other instanceof GridmixJob) {
final long otherNanos = ((GridmixJob)other).submissionTimeNanos;
if (otherNanos < submissionTimeNanos) {
return 1;
}
if (otherNanos > submissionTimeNanos) {
return -1;
}
return id() - ((GridmixJob)other).id();
}
final long diff =
getDelay(TimeUnit.NANOSECONDS) - other.getDelay(TimeUnit.NANOSECONDS);
return 0 == diff ? 0 : (diff > 0 ? 1 : -1);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
// not possible unless job is cloned; all jobs should be unique
return other instanceof GridmixJob && id() == ((GridmixJob)other).id();
}
@Override
public int hashCode() {
return id();
}
int id() {
return seq;
}
Job getJob() {
return job;
}
JobStory getJobDesc() {
return jobdesc;
}
void setSubmitted() {
submitted = true;
}
boolean isSubmitted() {
return submitted;
}
static void pushDescription(int seq, List<InputSplit> splits) {
if (null != descCache.putIfAbsent(seq, splits)) {
throw new IllegalArgumentException("Description exists for id " + seq);
}
}
static List<InputSplit> pullDescription(JobContext jobCtxt) {
return pullDescription(GridmixJob.getJobSeqId(jobCtxt));
}
static List<InputSplit> pullDescription(int seq) {
return descCache.remove(seq);
}
static void clearAll() {
descCache.clear();
}
void buildSplits(FilePool inputDir) throws IOException {
}
static int getJobSeqId(JobContext job) {
return job.getConfiguration().getInt(GRIDMIX_JOB_SEQ,-1);
}
public static
|
GridmixJob
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java
|
{
"start": 27830,
"end": 28783
}
|
class ____ {
private final static byte[] AB_MAGIC_BCFILE =
{
// ... total of 16 bytes
(byte) 0xd1, (byte) 0x11, (byte) 0xd3, (byte) 0x68, (byte) 0x91,
(byte) 0xb5, (byte) 0xd7, (byte) 0xb6, (byte) 0x39, (byte) 0xdf,
(byte) 0x41, (byte) 0x40, (byte) 0x92, (byte) 0xba, (byte) 0xe1,
(byte) 0x50 };
public static void readAndVerify(DataInput in) throws IOException {
byte[] abMagic = new byte[size()];
in.readFully(abMagic);
// check against AB_MAGIC_BCFILE, if not matching, throw an
// Exception
if (!Arrays.equals(abMagic, AB_MAGIC_BCFILE)) {
throw new IOException("Not a valid BCFile.");
}
}
public static void write(DataOutput out) throws IOException {
out.write(AB_MAGIC_BCFILE);
}
public static int size() {
return AB_MAGIC_BCFILE.length;
}
}
/**
* Block region.
*/
static final
|
Magic
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/archive/internal/JarProtocolArchiveDescriptor.java
|
{
"start": 644,
"end": 2053
}
|
class ____ implements ArchiveDescriptor {
private final ArchiveDescriptor delegateDescriptor;
/**
* Constructs a JarProtocolArchiveDescriptor
*
* @param archiveDescriptorFactory The factory creating this
* @param url The url to the JAR file
* @param incomingEntry The prefix for entries within the JAR url
*/
public JarProtocolArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL url,
String incomingEntry) {
if ( incomingEntry != null && incomingEntry.length() > 0 ) {
throw new IllegalArgumentException( "jar:jar: not supported: " + url );
}
final String urlFile = url.getFile();
final int subEntryIndex = urlFile.lastIndexOf( '!' );
if ( subEntryIndex == -1 ) {
throw new AssertionFailure( "JAR URL does not contain '!/' : " + url );
}
final String subEntry;
if ( subEntryIndex + 1 >= urlFile.length() ) {
subEntry = "";
}
else {
subEntry = urlFile.substring( subEntryIndex + 1 );
}
final URL fileUrl = archiveDescriptorFactory.getJarURLFromURLEntry( url, subEntry );
delegateDescriptor = archiveDescriptorFactory.buildArchiveDescriptor( fileUrl, subEntry );
}
@Override
public void visitArchive(ArchiveContext context) {
delegateDescriptor.visitArchive( context );
}
@Override
public @Nullable ArchiveEntry findEntry(String path) {
return delegateDescriptor.findEntry( path );
}
}
|
JarProtocolArchiveDescriptor
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/main/java/io/quarkus/qute/FragmentSectionHelper.java
|
{
"start": 2114,
"end": 5762
}
|
class ____ implements SectionHelperFactory<FragmentSectionHelper> {
static final Pattern FRAGMENT_PATTERN = Pattern.compile("[a-zA-Z0-9_]+");
static final String RENDERED = "rendered";
static final String HIDDEN = "_hidden";
static final String CAPTURE = "capture";
private final Map<String, Map<String, Origin>> templateToFragments = new ConcurrentHashMap<>();
@Override
public List<String> getDefaultAliases() {
return ImmutableList.of("fragment", CAPTURE);
}
@Override
public ParametersInfo getParameters() {
return ParametersInfo.builder()
.addParameter(ID)
.addParameter(Parameter.builder(RENDERED).ignoreUnnamedValues().optional().build())
.addParameter(Parameter.builder(HIDDEN).optional().valuePredicate(HIDDEN::equals).build())
.build();
}
@Override
public FragmentSectionHelper initialize(SectionInitContext context) {
String id = context.getParameter(ID);
if (LiteralSupport.isStringLiteralSeparator(id.charAt(0))) {
id = id.substring(1, id.length() - 1);
}
if (!FRAGMENT_PATTERN.matcher(id).matches()) {
throw context.error(
"found an invalid fragment identifier [{id}] - an identifier can only consist of alphanumeric characters and underscores")
.code(Code.INVALID_FRAGMENT_ID)
.argument("id", id)
.origin(context.getOrigin())
.build();
}
String generatedId = context.getOrigin().getTemplateGeneratedId();
Map<String, Origin> fragments = templateToFragments.get(generatedId);
if (fragments == null) {
// note that we don't need a concurrent map here because all fragments of a template are initialized sequentially
// and the map is only used to validate unique fragment ids
fragments = new HashMap<>();
fragments.put(id, context.getOrigin());
templateToFragments.put(generatedId, fragments);
} else {
Origin existing = fragments.put(id, context.getOrigin());
if (existing != null) {
throw context.error(
"found a non-unique fragment identifier: [{id}]")
.code(Code.NON_UNIQUE_FRAGMENT_ID)
.argument("id", id)
.origin(context.getOrigin())
.build();
}
}
Expression rendered = null;
if (context.getName().equals(CAPTURE)) {
rendered = ExpressionImpl.literalFrom(-1, "false");
} else if (context.hasParameter(RENDERED)) {
rendered = context.getExpression(RENDERED);
} else if (context.hasParameter(HIDDEN)) {
rendered = ExpressionImpl.literalFrom(-1, "false");
}
return new FragmentSectionHelper(id, rendered, generatedId);
}
@Override
public Scope initializeBlock(Scope previousScope, BlockInfo block) {
if (block.getLabel().equals(MAIN_BLOCK_NAME)) {
String visible = block.getParameter(RENDERED);
if (visible != null) {
block.addExpression(RENDERED, visible);
}
}
return previousScope;
}
}
|
Factory
|
java
|
quarkusio__quarkus
|
extensions/quartz/runtime/src/main/java/io/quarkus/quartz/runtime/QuartzMisfirePolicy.java
|
{
"start": 95,
"end": 1133
}
|
enum ____ {
SMART_POLICY,
IGNORE_MISFIRE_POLICY,
FIRE_NOW,
SIMPLE_TRIGGER_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT,
SIMPLE_TRIGGER_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT,
SIMPLE_TRIGGER_RESCHEDULE_NEXT_WITH_REMAINING_COUNT,
SIMPLE_TRIGGER_RESCHEDULE_NEXT_WITH_EXISTING_COUNT,
CRON_TRIGGER_DO_NOTHING;
String dashedName() {
return this.name().toLowerCase(Locale.ROOT).replace('_', '-');
}
static EnumSet<QuartzMisfirePolicy> validCronValues() {
return EnumSet.of(SMART_POLICY, IGNORE_MISFIRE_POLICY, FIRE_NOW, CRON_TRIGGER_DO_NOTHING);
}
static EnumSet<QuartzMisfirePolicy> validSimpleValues() {
return EnumSet.of(SMART_POLICY, IGNORE_MISFIRE_POLICY, FIRE_NOW,
SIMPLE_TRIGGER_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT,
SIMPLE_TRIGGER_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT, SIMPLE_TRIGGER_RESCHEDULE_NEXT_WITH_EXISTING_COUNT,
SIMPLE_TRIGGER_RESCHEDULE_NEXT_WITH_REMAINING_COUNT);
}
}
|
QuartzMisfirePolicy
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/resume/ResumeStrategy.java
|
{
"start": 1900,
"end": 3944
}
|
class ____ the adapter
* @return the adapter or null if it can't be cast to the requested class
* @param <T> the type of the adapter
*/
default <T extends ResumeAdapter> T getAdapter(Class<T> clazz) {
return clazz.cast(getAdapter());
}
/**
* Loads the cache with the data currently available in this strategy
*
* @throws Exception
*/
default void loadCache() throws Exception {
}
/**
* Updates the last processed offset
*
* @param offset the offset to update
* @throws Exception if unable to update the offset
*/
<T extends Resumable> void updateLastOffset(T offset) throws Exception;
/**
* Updates the last processed offset
*
* @param offset the offset to update
* @param updateCallBack a callback to be executed after the updated has occurred (null if not available)
* @throws Exception if unable to update the offset
*/
<T extends Resumable> void updateLastOffset(T offset, UpdateCallBack updateCallBack) throws Exception;
/**
* Updates the last processed offset
*
* @param offsetKey the offset key to update
* @param offsetValue the offset value to update
* @throws Exception if unable to update the offset
*/
void updateLastOffset(OffsetKey<?> offsetKey, Offset<?> offsetValue) throws Exception;
/**
* Updates the last processed offset
*
* @param offsetKey the offset key to update
* @param offset the offset value to update
* @param updateCallBack a callback to be executed after the updated has occurred (null if not available)
* @throws Exception if unable to update the offset
*/
void updateLastOffset(OffsetKey<?> offsetKey, Offset<?> offset, UpdateCallBack updateCallBack) throws Exception;
void setResumeStrategyConfiguration(ResumeStrategyConfiguration resumeStrategyConfiguration);
ResumeStrategyConfiguration getResumeStrategyConfiguration();
}
|
of
|
java
|
apache__camel
|
components/camel-sql/src/main/java/org/apache/camel/component/sql/SqlConsumer.java
|
{
"start": 1974,
"end": 2916
}
|
class ____ extends ScheduledBatchPollingConsumer {
private static final Logger LOG = LoggerFactory.getLogger(SqlConsumer.class);
private final String query;
private String resolvedQuery;
private final ExchangeFactory exchangeFactory;
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedJdbcTemplate;
private final SqlParameterSource parameterSource;
private final SqlPrepareStatementStrategy sqlPrepareStatementStrategy;
private final SqlProcessingStrategy sqlProcessingStrategy;
private String onConsume;
private String onConsumeFailed;
private String onConsumeBatchComplete;
private boolean useIterator = true;
private boolean routeEmptyResultSet;
private int expectedUpdateCount = -1;
private boolean breakBatchOnConsumeFail;
private int parametersCount;
private boolean alwaysPopulateStatement;
private static final
|
SqlConsumer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DateFormatConstantTest.java
|
{
"start": 1755,
"end": 2651
}
|
class ____ {
private static final SimpleDateFormat NO_INITIALIZER;
static {
NO_INITIALIZER = new SimpleDateFormat("yyyy-MM-dd HH:mm");
}
private final SimpleDateFormat NON_STATIC = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private static SimpleDateFormat NON_FINAL = new SimpleDateFormat("yyyy-MM-dd HH:mm");
private static final SimpleDateFormat lowerCamelCase = new SimpleDateFormat("yyyy-MM-dd HH:mm");
static void f() {
final SimpleDateFormat NOT_A_FIELD = new SimpleDateFormat("yyyy-MM-dd HH:mm");
}
private static final String NOT_A_SIMPLE_DATE_FORMAT = "";
}
""")
.doTest();
}
@Test
public void threadLocalFix() {
BugCheckerRefactoringTestHelper.newInstance(DateFormatConstant.class, getClass())
.addInputLines(
"in/Test.java",
"""
import java.text.SimpleDateFormat;
import java.text.DateFormat;
import java.util.Date;
|
Test
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/HostInfo.java
|
{
"start": 1310,
"end": 1813
}
|
class ____ be obtained by calling one of:
* {@link KafkaStreams#metadataForAllStreamsClients()}
* {@link KafkaStreams#streamsMetadataForStore(String)}
*
* The HostInfo is constructed during Partition Assignment
* see {@link StreamsPartitionAssignor}
* It is extracted from the config {@link org.apache.kafka.streams.StreamsConfig#APPLICATION_SERVER_CONFIG}
*
* If developers wish to expose an endpoint in their KafkaStreams applications they should provide the above
* config.
*/
public
|
can
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/CustomizedConverter.java
|
{
"start": 1338,
"end": 2201
}
|
class ____ {
public abstract RexNode convert(
CallExpression call, CallExpressionConvertRule.ConvertContext context);
// ---------------------------------------------------------------------------------------------
protected static void checkArgumentNumber(CallExpression call, int... validArgumentCounts) {
boolean hasValidArgumentCount = false;
for (int argumentCount : validArgumentCounts) {
if (call.getChildren().size() == argumentCount) {
hasValidArgumentCount = true;
break;
}
}
checkArgument(call, hasValidArgumentCount);
}
protected static void checkArgument(CallExpression call, boolean check) {
if (!check) {
throw new TableException("Invalid arguments for call: " + call);
}
}
}
|
CustomizedConverter
|
java
|
apache__camel
|
components/camel-telemetry/src/main/java/org/apache/camel/telemetry/SpanLifecycleManager.java
|
{
"start": 916,
"end": 1229
}
|
interface ____ {
Span create(String spanName, Span parent, SpanContextPropagationExtractor extractor);
void activate(Span span);
void deactivate(Span span);
void close(Span span);
void inject(Span span, SpanContextPropagationInjector injector, boolean includeTracing);
}
|
SpanLifecycleManager
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/GroupMetadataManagerTestContext.java
|
{
"start": 15861,
"end": 16405
}
|
class ____ {
CompletableFuture<JoinGroupResponseData> joinFuture;
List<CoordinatorRecord> records;
CompletableFuture<Void> appendFuture;
public JoinResult(
CompletableFuture<JoinGroupResponseData> joinFuture,
CoordinatorResult<Void, CoordinatorRecord> coordinatorResult
) {
this.joinFuture = joinFuture;
this.records = coordinatorResult.records();
this.appendFuture = coordinatorResult.appendFuture();
}
}
public static
|
JoinResult
|
java
|
dropwizard__dropwizard
|
dropwizard-testing/src/main/java/io/dropwizard/testing/DropwizardTestSupport.java
|
{
"start": 13908,
"end": 14235
}
|
class ____<T extends Configuration> {
public void onRun(T configuration, Environment environment, DropwizardTestSupport<T> rule) throws Exception {
// Default NOP
}
public void onStop(DropwizardTestSupport<T> rule) throws Exception {
// Default NOP
}
}
}
|
ServiceListener
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/MockResultPartitionWriter.java
|
{
"start": 1402,
"end": 3585
}
|
class ____ implements ResultPartitionWriter {
private final ResultPartitionID partitionId = new ResultPartitionID();
@Override
public void setup() {}
@Override
public ResultPartitionID getPartitionId() {
return partitionId;
}
@Override
public int getNumberOfSubpartitions() {
return 1;
}
@Override
public int getNumTargetKeyGroups() {
return 1;
}
@Override
public void setMaxOverdraftBuffersPerGate(int maxOverdraftBuffersPerGate) {}
@Override
public void emitRecord(ByteBuffer record, int targetSubpartition) throws IOException {}
@Override
public void broadcastRecord(ByteBuffer record) throws IOException {}
@Override
public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent) throws IOException {}
@Override
public void alignedBarrierTimeout(long checkpointId) throws IOException {}
@Override
public void abortCheckpoint(long checkpointId, CheckpointException cause) {}
@Override
public void notifyEndOfData(StopMode mode) throws IOException {}
@Override
public CompletableFuture<Void> getAllDataProcessedFuture() {
return CompletableFuture.completedFuture(null);
}
@Override
public ResultSubpartitionView createSubpartitionView(
ResultSubpartitionIndexSet indexSet, BufferAvailabilityListener availabilityListener)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void setMetricGroup(TaskIOMetricGroup metrics) {}
@Override
public void flushAll() {}
@Override
public void flush(int subpartitionIndex) {}
@Override
public void fail(@Nullable Throwable throwable) {}
@Override
public void finish() {}
@Override
public boolean isFinished() {
return false;
}
@Override
public void release(Throwable cause) {}
@Override
public boolean isReleased() {
return false;
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return AVAILABLE;
}
@Override
public void close() {}
}
|
MockResultPartitionWriter
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/BitMapCommandsTest.java
|
{
"start": 994,
"end": 7295
}
|
class ____ extends DatasourceTestBase {
private RedisDataSource ds;
private BitMapCommands<String> bitmap;
@BeforeEach
void initialize() {
ds = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(1));
bitmap = ds.bitmap();
}
@AfterEach
void clear() {
ds.flushall();
}
@Test
void getDataSource() {
assertThat(ds).isEqualTo(bitmap.getDataSource());
}
@Test
void bitcount() {
assertThat(bitmap.bitcount(key)).isEqualTo(0);
bitmap.setbit(key, 0L, 1);
bitmap.setbit(key, 1L, 1);
bitmap.setbit(key, 2L, 1);
assertThat(bitmap.bitcount(key)).isEqualTo(3);
assertThat(bitmap.bitcount(key, 3, -1)).isEqualTo(0);
}
@Test
void bitfieldType() {
assertThat(signed(64).bits).isEqualTo(64);
assertThat(signed(64).signed).isTrue();
assertThat(unsigned(63).bits).isEqualTo(63);
assertThat(unsigned(63).signed).isFalse();
}
@Test
void bitfieldTypeSigned65() {
assertThatThrownBy(() -> signed(65)).isInstanceOf(IllegalArgumentException.class);
}
@Test
void bitfieldTypeUnsigned64() {
assertThatThrownBy(() -> unsigned(64)).isInstanceOf(IllegalArgumentException.class);
}
@Test
void bitfieldBuilderEmptyPreviousType() {
assertThatThrownBy(() -> new BitFieldArgs().overflow(WRAP).get()).isInstanceOf(IllegalStateException.class);
}
@Test
void bitfieldArgsTest() {
assertThat(signed(5).toString()).isEqualTo("i5");
assertThat(unsigned(5).toString()).isEqualTo("u5");
assertThat(Offset.offset(5).value).isEqualTo(5);
assertThat(typeWidthBasedOffset(5).toString()).isEqualTo("#5");
}
@Test
void bitfield() {
BitFieldArgs bitFieldArgs = new BitFieldArgs().set(signed(8), 0, 1).set(5, 1).incrBy(2, 3).get().get(2);
List<Long> values = bitmap.bitfield(key, bitFieldArgs);
assertThat(values).containsExactly(0L, 32L, 3L, 0L, 3L);
}
@Test
void bitfieldGetWithOffset() {
BitFieldArgs bitFieldArgs = new BitFieldArgs().set(signed(8), 0, 1).get(signed(2), typeWidthBasedOffset(1));
List<Long> values = bitmap.bitfield(key, bitFieldArgs);
assertThat(values).containsExactly(0L, 0L);
}
@Test
void bitfieldSet() {
BitFieldArgs bitFieldArgs = new BitFieldArgs().set(signed(8), 0, 5).set(5);
List<Long> values = bitmap.bitfield(key, bitFieldArgs);
assertThat(values).containsExactly(0L, 5L);
}
@Test
void bitfieldWithOffsetSet() {
bitmap.bitfield(key, new BitFieldArgs().set(signed(8), typeWidthBasedOffset(2), 5));
ds.key(String.class).del(key);
bitmap.bitfield(key, new BitFieldArgs().set(signed(8), offset(2), 5));
}
@Test
void bitfieldIncrBy() {
BitFieldArgs bitFieldArgs = new BitFieldArgs().set(signed(8), 0, 5).incrBy(1);
List<Long> values = bitmap.bitfield(key, bitFieldArgs);
assertThat(values).containsExactly(0L, 6L);
}
@Test
void bitfieldWithOffsetIncrBy() {
bitmap.bitfield(key, new BitFieldArgs().incrBy(signed(8), typeWidthBasedOffset(2), 1));
ds.key(String.class).del(key);
bitmap.bitfield(key, new BitFieldArgs().incrBy(signed(8), offset(2), 1));
}
@Test
void bitfieldOverflow() {
BitFieldArgs bitFieldArgs = new BitFieldArgs().overflow(WRAP).set(signed(8), 9, Integer.MAX_VALUE).get(signed(8));
List<Long> values = bitmap.bitfield(key, bitFieldArgs);
assertThat(values).containsExactly(0L, 0L);
}
@Test
void bitpos() {
assertThat(bitmap.bitcount(key)).isEqualTo(0);
bitmap.setbit(key, 0L, 0);
bitmap.setbit(key, 1L, 1);
assertThat(bitmap.bitpos(key, 1)).isEqualTo(1);
}
@Test
void bitposOffset() {
assertThat(bitmap.bitcount(key)).isEqualTo(0);
bitmap.setbit(key, 0, 1);
bitmap.setbit(key, 1, 1);
bitmap.setbit(key, 2, 0);
bitmap.setbit(key, 3, 0);
bitmap.setbit(key, 4, 0);
bitmap.setbit(key, 5, 1);
bitmap.setbit(key, 16, 1);
assertThat(bitmap.getbit(key, 1)).isEqualTo(1);
assertThat(bitmap.getbit(key, 4)).isEqualTo(0);
assertThat(bitmap.getbit(key, 5)).isEqualTo(1);
assertThat(bitmap.bitpos(key, 1, 1)).isEqualTo(16);
assertThat(bitmap.bitpos(key, 0, 0, 0)).isEqualTo(2);
}
@Test
void bitopAnd() {
bitmap.setbit("foo", 0, 1);
bitmap.setbit("bar", 1, 1);
bitmap.setbit("baz", 2, 1);
assertThat(bitmap.bitopAnd(key, "foo", "bar", "baz")).isEqualTo(1);
assertThat(bitmap.bitcount(key)).isEqualTo(0);
}
@Test
void bitopNot() {
bitmap.setbit("foo", 0, 1);
bitmap.setbit("foo", 2, 1);
assertThat(bitmap.bitopNot(key, "foo")).isEqualTo(1);
assertThat(bitmap.bitcount(key)).isEqualTo(6);
}
@Test
void bitopOr() {
bitmap.setbit("foo", 0, 1);
bitmap.setbit("bar", 1, 1);
bitmap.setbit("baz", 2, 1);
assertThat(bitmap.bitopOr(key, "foo", "bar", "baz")).isEqualTo(1);
}
@Test
void bitopXor() {
bitmap.setbit("foo", 0, 1);
bitmap.setbit("bar", 0, 1);
bitmap.setbit("baz", 2, 1);
assertThat(bitmap.bitopXor(key, "foo", "bar", "baz")).isEqualTo(1);
}
@Test
void getbit() {
assertThat(bitmap.getbit(key, 0)).isEqualTo(0);
bitmap.setbit(key, 0, 1);
assertThat(bitmap.getbit(key, 0)).isEqualTo(1);
}
@Test
void setbit() {
assertThat(bitmap.setbit(key, 0, 1)).isEqualTo(0);
assertThat(bitmap.setbit(key, 0, 0)).isEqualTo(1);
}
@Test
void bitcountWithTypeReference() {
var bitmap = ds.bitmap(new TypeReference<List<String>>() {
// Empty on purpose
});
List<String> key = List.of("a", "b", "c");
assertThat(bitmap.bitcount(key)).isEqualTo(0);
bitmap.setbit(key, 0L, 1);
bitmap.setbit(key, 1L, 1);
bitmap.setbit(key, 2L, 1);
assertThat(bitmap.bitcount(key)).isEqualTo(3);
assertThat(bitmap.bitcount(key, 3, -1)).isEqualTo(0);
}
}
|
BitMapCommandsTest
|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/InternalServiceManager.java
|
{
"start": 1780,
"end": 10837
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(InternalServiceManager.class);
private final InternalRouteStartupManager internalRouteStartupManager;
private final DeferServiceStartupListener deferStartupListener = new DeferServiceStartupListener();
private final List<Service> services = new CopyOnWriteArrayList<>();
InternalServiceManager(InternalRouteStartupManager internalRouteStartupManager, List<StartupListener> startupListeners) {
/*
Note: this is an internal API and not meant to be public, so it uses assertion for lightweight nullability
checking for extremely unlikely scenarios that should be found during development time.
*/
assert internalRouteStartupManager != null : "the internalRouteStartupManager cannot be null";
assert startupListeners != null : "the startupListeners cannot be null";
this.internalRouteStartupManager = internalRouteStartupManager;
startupListeners.add(deferStartupListener);
}
public <T> T addService(CamelContext camelContext, T object) {
return addService(camelContext, object, true);
}
public <T> T addService(CamelContext camelContext, T object, boolean stopOnShutdown) {
return addService(camelContext, object, stopOnShutdown, true, true);
}
public <T> T addService(
CamelContext camelContext, T object, boolean stopOnShutdown, boolean forceStart, boolean useLifecycleStrategies) {
try {
doAddService(camelContext, object, stopOnShutdown, forceStart, useLifecycleStrategies);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
return object;
}
public void doAddService(
CamelContext camelContext, Object object, boolean stopOnShutdown, boolean forceStart,
boolean useLifecycleStrategies)
throws Exception {
if (object == null) {
return;
}
// inject CamelContext
CamelContextAware.trySetCamelContext(object, camelContext);
if (object instanceof Service service) {
if (useLifecycleStrategies) {
for (LifecycleStrategy strategy : camelContext.getLifecycleStrategies()) {
Route route;
if (service instanceof RouteAware routeAware) {
route = routeAware.getRoute();
} else {
// if the service is added while creating a new route then grab the route from the startup manager
route = internalRouteStartupManager.getSetupRoute();
}
if (service instanceof Endpoint endpoint) {
// use specialized endpoint add
strategy.onEndpointAdd(endpoint);
} else {
strategy.onServiceAdd(camelContext, service, route);
}
}
}
if (!forceStart) {
ServiceHelper.initService(service);
// now start the service (and defer starting if CamelContext is
// starting up itself)
camelContext.deferStartService(object, stopOnShutdown);
} else {
// only add to services to close if its a singleton
// otherwise we could for example end up with a lot of prototype
// scope endpoints
boolean singleton = true; // assume singleton by default
if (service instanceof IsSingleton singletonService) {
singleton = singletonService.isSingleton();
}
// do not add endpoints as they have their own list
if (singleton && !(service instanceof Endpoint)) {
// only add to list of services to stop if its not already there
if (stopOnShutdown && !camelContext.hasService(service)) {
// special for type converter / type converter registry which is stopped manual later
boolean tc = service instanceof TypeConverter || service instanceof TypeConverterRegistry;
if (!tc) {
services.add(service);
}
}
}
if (camelContext instanceof BaseService baseService) {
if (baseService.isStartingOrStarted()) {
ServiceHelper.startService(service);
} else {
ServiceHelper.initService(service);
deferStartService(camelContext, object, stopOnShutdown, true);
}
}
}
}
}
public void deferStartService(CamelContext camelContext, Object object, boolean stopOnShutdown, boolean startEarly) {
if (object instanceof Service service) {
// only add to services to close if its a singleton
// otherwise we could for example end up with a lot of prototype
// scope endpoints
boolean singleton = true; // assume singleton by default
if (service instanceof IsSingleton singletonService) {
singleton = singletonService.isSingleton();
}
// do not add endpoints as they have their own list
if (singleton && !(service instanceof Endpoint)) {
// only add to list of services to stop if its not already there
if (stopOnShutdown && !camelContext.hasService(service)) {
services.add(service);
}
}
// are we already started?
if (camelContext.isStarted()) {
ServiceHelper.startService(service);
} else {
deferStartupListener.addService(service, startEarly);
}
}
}
public boolean removeService(Service service) {
return services.remove(service);
}
@SuppressWarnings("unchecked")
public <T> Set<T> hasServices(Class<T> type) {
if (services.isEmpty()) {
return Collections.emptySet();
}
Set<T> set = new HashSet<>();
for (Service service : services) {
if (type.isInstance(service)) {
set.add((T) service);
}
}
return set;
}
public boolean hasService(Object object) {
if (services.isEmpty()) {
return false;
}
if (object instanceof Service service) {
return services.contains(service);
}
return false;
}
public <T> T hasService(Class<T> type) {
if (services.isEmpty()) {
return null;
}
for (Service service : services) {
if (type.isInstance(service)) {
return type.cast(service);
}
}
return null;
}
public void stopConsumers(CamelContext camelContext) {
for (Service service : services) {
if (service instanceof Consumer) {
InternalServiceManager.shutdownServices(camelContext, service);
}
}
}
public void shutdownServices(CamelContext camelContext) {
InternalServiceManager.shutdownServices(camelContext, services);
services.clear();
}
public static void shutdownServices(CamelContext camelContext, Collection<?> services) {
// reverse stopping by default
shutdownServices(camelContext, services, true);
}
public List<Service> getServices() {
return Collections.unmodifiableList(services);
}
public static void shutdownServices(CamelContext camelContext, Collection<?> services, boolean reverse) {
Collection<?> list = services;
if (reverse) {
List<Object> reverseList = new ArrayList<>(services);
Collections.reverse(reverseList);
list = reverseList;
}
for (Object service : list) {
shutdownServices(camelContext, service);
}
}
public static void shutdownServices(CamelContext camelContext, Object service) {
// do not rethrow exception as we want to keep shutting down in case of
// problems
// allow us to do custom work before delegating to service helper
try {
if (service instanceof Service) {
ServiceHelper.stopAndShutdownService(service);
} else if (service instanceof Collection) {
ServiceHelper.stopAndShutdownServices((Collection<?>) service);
}
} catch (Exception e) {
LOG.warn("Error occurred while shutting down service: {}. This exception will be ignored.", service, e);
// fire event
EventHelper.notifyServiceStopFailure(camelContext, service, e);
}
}
}
|
InternalServiceManager
|
java
|
apache__rocketmq
|
remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/namesrv/RegisterOrderTopicRequestHeader.java
|
{
"start": 1165,
"end": 1755
}
|
class ____ implements CommandCustomHeader {
@CFNotNull
private String topic;
@CFNotNull
private String orderTopicString;
@Override
public void checkFields() throws RemotingCommandException {
}
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public String getOrderTopicString() {
return orderTopicString;
}
public void setOrderTopicString(String orderTopicString) {
this.orderTopicString = orderTopicString;
}
}
|
RegisterOrderTopicRequestHeader
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java
|
{
"start": 855,
"end": 1560
}
|
class ____ extends ESTestCase {
public void testEmptyRequestBody() throws Exception {
RestPutSynonymsAction action = new RestPutSynonymsAction();
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT)
.withParams(Map.of("synonymsSet", "test"))
.build();
FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0);
try (var threadPool = createThreadPool()) {
final var nodeClient = new NoOpNodeClient(threadPool);
expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient));
}
}
}
|
PutSynonymsActionTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormat.java
|
{
"start": 33824,
"end": 44078
}
|
class ____ as legacy-mode of offline image viewer
// should only load legacy FSImages without newer features.
final long nsQuota = in.readLong();
final long dsQuota = in.readLong();
return nsQuota == -1L && dsQuota == -1L ? new INodeDirectoryAttributes.SnapshotCopy(
name, permissions, null, modificationTime, null)
: new INodeDirectoryAttributes.CopyWithQuota(name, permissions,
null, modificationTime, nsQuota, dsQuota, null, null);
}
private void loadFilesUnderConstruction(DataInput in,
boolean supportSnapshot, Counter counter) throws IOException {
FSDirectory fsDir = namesystem.dir;
int size = in.readInt();
LOG.info("Number of files under construction = " + size);
for (int i = 0; i < size; i++) {
INodeFile cons = FSImageSerialization.readINodeUnderConstruction(in,
namesystem, getLayoutVersion());
counter.increment();
// verify that file exists in namespace
String path = cons.getLocalName();
INodeFile oldnode = null;
boolean inSnapshot = false;
if (path != null && FSDirectory.isReservedName(path) &&
NameNodeLayoutVersion.supports(
LayoutVersion.Feature.ADD_INODE_ID, getLayoutVersion())) {
// TODO: for HDFS-5428, we use reserved path for those INodeFileUC in
// snapshot. If we support INode ID in the layout version, we can use
// the inode id to find the oldnode.
oldnode = namesystem.dir.getInode(cons.getId()).asFile();
inSnapshot = true;
} else {
path = renameReservedPathsOnUpgrade(path, getLayoutVersion());
final INodesInPath iip = fsDir.getINodesInPath(path, DirOp.WRITE);
oldnode = INodeFile.valueOf(iip.getLastINode(), path);
}
FileUnderConstructionFeature uc = cons.getFileUnderConstructionFeature();
oldnode.toUnderConstruction(uc.getClientName(), uc.getClientMachine());
if (oldnode.numBlocks() > 0) {
BlockInfo ucBlock = cons.getLastBlock();
// we do not replace the inode, just replace the last block of oldnode
BlockInfo info = namesystem.getBlockManager()
.addBlockCollectionWithCheck(ucBlock, oldnode);
oldnode.setBlock(oldnode.numBlocks() - 1, info);
}
if (!inSnapshot) {
namesystem.leaseManager.addLease(uc.getClientName(), oldnode.getId());
}
}
}
private void loadSecretManagerState(DataInput in)
throws IOException {
int imgVersion = getLayoutVersion();
if (!NameNodeLayoutVersion.supports(
LayoutVersion.Feature.DELEGATION_TOKEN, imgVersion)) {
//SecretManagerState is not available.
//This must not happen if security is turned on.
return;
}
namesystem.loadSecretManagerStateCompat(in);
}
private void loadCacheManagerState(DataInput in) throws IOException {
int imgVersion = getLayoutVersion();
if (!NameNodeLayoutVersion.supports(
LayoutVersion.Feature.CACHING, imgVersion)) {
return;
}
namesystem.getCacheManager().loadStateCompat(in);
}
private int getLayoutVersion() {
return namesystem.getFSImage().getStorage().getLayoutVersion();
}
private boolean isRoot(byte[][] path) {
return path.length == 1 &&
path[0] == null;
}
private boolean isParent(byte[][] path, byte[][] parent) {
if (path == null || parent == null)
return false;
if (parent.length == 0 || path.length != parent.length + 1)
return false;
boolean isParent = true;
for (int i = 0; i < parent.length; i++) {
isParent = isParent && Arrays.equals(path[i], parent[i]);
}
return isParent;
}
/**
* Return string representing the parent of the given path.
*/
String getParent(String path) {
return path.substring(0, path.lastIndexOf(Path.SEPARATOR));
}
byte[][] getParent(byte[][] path) {
byte[][] result = new byte[path.length - 1][];
for (int i = 0; i < result.length; i++) {
result[i] = new byte[path[i].length];
System.arraycopy(path[i], 0, result[i], 0, path[i].length);
}
return result;
}
public Snapshot getSnapshot(DataInput in) throws IOException {
return snapshotMap.get(in.readInt());
}
}
@VisibleForTesting
public static final TreeMap<String, String> renameReservedMap =
new TreeMap<String, String>();
/**
* Use the default key-value pairs that will be used to determine how to
* rename reserved paths on upgrade.
*/
@VisibleForTesting
public static void useDefaultRenameReservedPairs() {
renameReservedMap.clear();
for (String key: HdfsServerConstants.RESERVED_PATH_COMPONENTS) {
renameReservedMap.put(
key,
key + "." + HdfsServerConstants.NAMENODE_LAYOUT_VERSION + "."
+ "UPGRADE_RENAMED");
}
}
/**
* Set the key-value pairs that will be used to determine how to rename
* reserved paths on upgrade.
*/
@VisibleForTesting
public static void setRenameReservedPairs(String renameReserved) {
// Clear and set the default values
useDefaultRenameReservedPairs();
// Overwrite with provided values
setRenameReservedMapInternal(renameReserved);
}
private static void setRenameReservedMapInternal(String renameReserved) {
Collection<String> pairs =
StringUtils.getTrimmedStringCollection(renameReserved);
for (String p : pairs) {
String[] pair = StringUtils.split(p, '/', '=');
Preconditions.checkArgument(pair.length == 2,
"Could not parse key-value pair " + p);
String key = pair[0];
String value = pair[1];
Preconditions.checkArgument(DFSUtil.isReservedPathComponent(key),
"Unknown reserved path " + key);
Preconditions.checkArgument(DFSUtil.isValidNameForComponent(value),
"Invalid rename path for " + key + ": " + value);
LOG.info("Will rename reserved path " + key + " to " + value);
renameReservedMap.put(key, value);
}
}
/**
* When upgrading from an old version, the filesystem could contain paths
* that are now reserved in the new version (e.g. .snapshot). This renames
* these new reserved paths to a user-specified value to avoid collisions
* with the reserved name.
*
* @param path Old path potentially containing a reserved path
* @return New path with reserved path components renamed to user value
*/
static String renameReservedPathsOnUpgrade(String path,
final int layoutVersion) throws IllegalReservedPathException {
final String oldPath = path;
// If any known LVs aren't supported, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, layoutVersion)) {
String[] components = INode.getPathNames(path);
// Only need to worry about the root directory
if (components.length > 1) {
components[1] = DFSUtil.bytes2String(
renameReservedRootComponentOnUpgrade(
DFSUtil.string2Bytes(components[1]),
layoutVersion));
path = DFSUtil.strings2PathString(components);
}
}
if (!NameNodeLayoutVersion.supports(Feature.SNAPSHOT, layoutVersion)) {
String[] components = INode.getPathNames(path);
// Special case the root path
if (components.length == 0) {
return path;
}
for (int i=0; i<components.length; i++) {
components[i] = DFSUtil.bytes2String(
renameReservedComponentOnUpgrade(
DFSUtil.string2Bytes(components[i]),
layoutVersion));
}
path = DFSUtil.strings2PathString(components);
}
if (!path.equals(oldPath)) {
LOG.info("Upgrade process renamed reserved path " + oldPath + " to "
+ path);
}
return path;
}
private final static String RESERVED_ERROR_MSG =
FSDirectory.DOT_RESERVED_PATH_PREFIX + " is a reserved path and "
+ HdfsConstants.DOT_SNAPSHOT_DIR + " is a reserved path component in"
+ " this version of HDFS. Please rollback and delete or rename"
+ " this path, or upgrade with the "
+ StartupOption.RENAMERESERVED.getName()
+ " [key-value pairs]"
+ " option to automatically rename these paths during upgrade.";
/**
* Same as {@link #renameReservedPathsOnUpgrade}, but for a single
* byte array path component.
*/
private static byte[] renameReservedComponentOnUpgrade(byte[] component,
final int layoutVersion) throws IllegalReservedPathException {
// If the LV doesn't support snapshots, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.SNAPSHOT, layoutVersion)) {
if (Arrays.equals(component, HdfsServerConstants.DOT_SNAPSHOT_DIR_BYTES)) {
if (!renameReservedMap.containsKey(HdfsConstants.DOT_SNAPSHOT_DIR)) {
throw new IllegalReservedPathException(RESERVED_ERROR_MSG);
}
component =
DFSUtil.string2Bytes(renameReservedMap
.get(HdfsConstants.DOT_SNAPSHOT_DIR));
}
}
return component;
}
/**
* Same as {@link #renameReservedPathsOnUpgrade}, but for a single
* byte array path component.
*/
private static byte[] renameReservedRootComponentOnUpgrade(byte[] component,
final int layoutVersion) throws IllegalReservedPathException {
// If the LV doesn't support inode IDs, we're doing an upgrade
if (!NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, layoutVersion)) {
if (Arrays.equals(component, FSDirectory.DOT_RESERVED)) {
if (!renameReservedMap.containsKey(HdfsConstants.DOT_SNAPSHOT_DIR)) {
throw new IllegalReservedPathException(RESERVED_ERROR_MSG);
}
final String renameString = renameReservedMap
.get(FSDirectory.DOT_RESERVED_STRING);
component =
DFSUtil.string2Bytes(renameString);
LOG.info("Renamed root path " + FSDirectory.DOT_RESERVED_STRING
+ " to " + renameString);
}
}
return component;
}
/**
* A one-shot
|
such
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/main/java/org/springframework/boot/testsupport/web/servlet/DirtiesUrlFactoriesExtension.java
|
{
"start": 1151,
"end": 2155
}
|
class ____ implements BeforeEachCallback, AfterEachCallback {
private static final String TOMCAT_URL_STREAM_HANDLER_FACTORY = "org.apache.catalina.webresources.TomcatURLStreamHandlerFactory";
@Override
public void afterEach(ExtensionContext context) throws Exception {
reset();
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
reset();
}
private void reset() {
try {
ClassLoader classLoader = getClass().getClassLoader();
if (ClassUtils.isPresent(TOMCAT_URL_STREAM_HANDLER_FACTORY, classLoader)) {
Class<?> factoryClass = ClassUtils.resolveClassName(TOMCAT_URL_STREAM_HANDLER_FACTORY, classLoader);
ReflectionTestUtils.setField(factoryClass, "instance", null);
}
ReflectionTestUtils.setField(URL.class, "factory", null);
}
catch (InaccessibleObjectException ex) {
throw new IllegalStateException(
"Unable to reset field. Please run with '--add-opens=java.base/java.net=ALL-UNNAMED'", ex);
}
}
}
|
DirtiesUrlFactoriesExtension
|
java
|
apache__camel
|
components/camel-kafka/src/main/java/org/apache/camel/component/kafka/transform/TimestampRouter.java
|
{
"start": 1159,
"end": 3069
}
|
class ____ {
public void process(
@ExchangeProperty("topicFormat") String topicFormat, @ExchangeProperty("timestampFormat") String timestampFormat,
@ExchangeProperty("timestampHeaderName") String timestampHeaderName, Exchange ex) {
final Pattern TOPIC = Pattern.compile("$[topic]", Pattern.LITERAL);
final Pattern TIMESTAMP = Pattern.compile("$[timestamp]", Pattern.LITERAL);
final SimpleDateFormat fmt = new SimpleDateFormat(timestampFormat);
fmt.setTimeZone(TimeZone.getTimeZone("UTC"));
Long timestamp = null;
String topicName = ex.getMessage().getHeader("kafka.TOPIC", String.class);
Object rawTimestamp = ex.getMessage().getHeader(timestampHeaderName);
if (rawTimestamp instanceof Long) {
timestamp = (Long) rawTimestamp;
} else if (rawTimestamp instanceof Instant) {
timestamp = ((Instant) rawTimestamp).toEpochMilli();
} else if (ObjectHelper.isNotEmpty(rawTimestamp)) {
timestamp = Long.parseLong(rawTimestamp.toString());
}
if (ObjectHelper.isNotEmpty(timestamp)) {
final String formattedTimestamp = fmt.format(new Date(timestamp));
String replace1;
String updatedTopic;
if (ObjectHelper.isNotEmpty(topicName)) {
replace1 = TOPIC.matcher(topicFormat).replaceAll(Matcher.quoteReplacement(topicName));
updatedTopic = TIMESTAMP.matcher(replace1).replaceAll(Matcher.quoteReplacement(formattedTimestamp));
} else {
replace1 = TOPIC.matcher(topicFormat).replaceAll(Matcher.quoteReplacement(""));
updatedTopic = TIMESTAMP.matcher(replace1).replaceAll(Matcher.quoteReplacement(formattedTimestamp));
}
ex.getMessage().setHeader("kafka.OVERRIDE_TOPIC", updatedTopic);
}
}
}
|
TimestampRouter
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/context/config/SystemEnvironmentConfigDataResource.java
|
{
"start": 1250,
"end": 2888
}
|
class ____ extends ConfigDataResource {
private final String variableName;
private final PropertySourceLoader loader;
private final Function<String, @Nullable String> environment;
SystemEnvironmentConfigDataResource(String variableName, PropertySourceLoader loader,
Function<String, @Nullable String> environment) {
this.variableName = variableName;
this.loader = loader;
this.environment = environment;
}
String getVariableName() {
return this.variableName;
}
PropertySourceLoader getLoader() {
return this.loader;
}
@Nullable List<PropertySource<?>> load() throws IOException {
String content = this.environment.apply(this.variableName);
return (content != null) ? this.loader.load(StringUtils.capitalize(toString()), asResource(content)) : null;
}
private ByteArrayResource asResource(String content) {
return new ByteArrayResource(content.getBytes(StandardCharsets.UTF_8));
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SystemEnvironmentConfigDataResource other = (SystemEnvironmentConfigDataResource) obj;
return Objects.equals(this.loader.getClass(), other.loader.getClass())
&& Objects.equals(this.variableName, other.variableName);
}
@Override
public int hashCode() {
return Objects.hash(this.variableName, this.loader.getClass());
}
@Override
public String toString() {
return "system environment variable [" + this.variableName + "] content loaded using "
+ ClassUtils.getShortName(this.loader.getClass());
}
}
|
SystemEnvironmentConfigDataResource
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/GenericTypeId1735Test.java
|
{
"start": 654,
"end": 2140
}
|
class ____ {
public Nefarious1735() {
throw new Error("Never call this constructor");
}
public void setValue(String str) {
throw new Error("Never call this setter");
}
}
/*
/**********************************************************
/* Unit tests
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
private final static String NEF_CLASS = Nefarious1735.class.getName();
// Existing checks should kick in fine
@Test
public void testSimpleTypeCheck1735() throws Exception
{
try {
MAPPER.readValue(a2q(
"{'w':{'type':'"+NEF_CLASS+"'}}"),
Wrapper1735.class);
fail("Should not pass");
} catch (InvalidTypeIdException e) {
verifyException(e, "could not resolve type id");
verifyException(e, "not a subtype");
}
}
// but this was not being verified early enough
@Test
public void testNestedTypeCheck1735() throws Exception
{
try {
MAPPER.readValue(a2q(
"{'w':{'type':'java.util.HashMap<java.lang.String,java.lang.String>'}}"),
Wrapper1735.class);
fail("Should not pass");
} catch (InvalidTypeIdException e) {
verifyException(e, "could not resolve type id");
verifyException(e, "not a subtype");
}
}
}
|
Nefarious1735
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathAccessDeniedException.java
|
{
"start": 859,
"end": 1318
}
|
class ____ extends PathIOException {
static final long serialVersionUID = 0L;
/** @param path for the exception */
public PathAccessDeniedException(String path) {
super(path, "Permission denied");
}
public PathAccessDeniedException(String path, Throwable cause) {
super(path, cause);
}
public PathAccessDeniedException(String path,
String error,
Throwable cause) {
super(path, error, cause);
}
}
|
PathAccessDeniedException
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/client/ThirdPartyClientFilterSpec.java
|
{
"start": 4980,
"end": 5097
}
|
class ____ {
public static final String URL = 'https://api.bintray.com'
}
//end::bintrayApiConstants[]
*/
|
BintrayApi
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/metadata/Flower.java
|
{
"start": 300,
"end": 574
}
|
class ____ {
private Long id;
private String name;
@Id @GeneratedValue
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
Flower
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/WatcherConditionScript.java
|
{
"start": 632,
"end": 1350
}
|
class ____ {
public static final String[] PARAMETERS = {};
private final Map<String, Object> params;
// TODO: ctx should have its members extracted into execute parameters, but it needs to be a member for bwc access in params
private final Map<String, Object> ctx;
public WatcherConditionScript(Map<String, Object> params, WatchExecutionContext watcherContext) {
this.params = params;
this.ctx = Variables.createCtx(watcherContext, watcherContext.payload());
}
public abstract boolean execute();
public Map<String, Object> getParams() {
return params;
}
public Map<String, Object> getCtx() {
return ctx;
}
public
|
WatcherConditionScript
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/devmode/DevModeService.java
|
{
"start": 248,
"end": 736
}
|
class ____ implements devmodetest.v1.DevModeService {
@Override
public Uni<DevModeResponse> check(Devmodetest.DevModeRequest request) {
return Uni.createFrom().item(DevModeResponse.getDefaultInstance());
}
// test will add override here
public Multi<DevModeResponse> streamCheck(Devmodetest.DevModeRequest request) {
return Multi.createFrom().item(DevModeResponse.newBuilder().setStatus(DevModeResponse.Status.NOT_SERVING).build());
}
}
|
DevModeService
|
java
|
spring-projects__spring-boot
|
module/spring-boot-restclient/src/test/java/org/springframework/boot/restclient/RootUriTemplateHandlerTests.java
|
{
"start": 1525,
"end": 4152
}
|
class ____ {
private URI uri;
@Mock
@SuppressWarnings("NullAway.Init")
public UriTemplateHandler delegate;
public UriTemplateHandler handler;
@BeforeEach
void setup() throws URISyntaxException {
this.uri = new URI("https://example.com/hello");
this.handler = new RootUriTemplateHandler("https://example.com", this.delegate);
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWithNullRootUriShouldThrowException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new RootUriTemplateHandler((String) null, mock(UriTemplateHandler.class)))
.withMessageContaining("'rootUri' must not be null");
}
@Test
@SuppressWarnings("NullAway") // Test null check
void createWithNullHandlerShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> new RootUriTemplateHandler("https://example.com", null))
.withMessageContaining("'handler' must not be null");
}
@Test
@SuppressWarnings("unchecked")
void expandMapVariablesShouldPrefixRoot() {
given(this.delegate.expand(anyString(), any(Map.class))).willReturn(this.uri);
HashMap<String, Object> uriVariables = new HashMap<>();
URI expanded = this.handler.expand("/hello", uriVariables);
then(this.delegate).should().expand("https://example.com/hello", uriVariables);
assertThat(expanded).isEqualTo(this.uri);
}
@Test
@SuppressWarnings("unchecked")
void expandMapVariablesWhenPathDoesNotStartWithSlashShouldNotPrefixRoot() {
given(this.delegate.expand(anyString(), any(Map.class))).willReturn(this.uri);
HashMap<String, Object> uriVariables = new HashMap<>();
URI expanded = this.handler.expand("https://spring.io/hello", uriVariables);
then(this.delegate).should().expand("https://spring.io/hello", uriVariables);
assertThat(expanded).isEqualTo(this.uri);
}
@Test
void expandArrayVariablesShouldPrefixRoot() {
given(this.delegate.expand(anyString(), any(Object[].class))).willReturn(this.uri);
Object[] uriVariables = new Object[0];
URI expanded = this.handler.expand("/hello", uriVariables);
then(this.delegate).should().expand("https://example.com/hello", uriVariables);
assertThat(expanded).isEqualTo(this.uri);
}
@Test
void expandArrayVariablesWhenPathDoesNotStartWithSlashShouldNotPrefixRoot() {
given(this.delegate.expand(anyString(), any(Object[].class))).willReturn(this.uri);
Object[] uriVariables = new Object[0];
URI expanded = this.handler.expand("https://spring.io/hello", uriVariables);
then(this.delegate).should().expand("https://spring.io/hello", uriVariables);
assertThat(expanded).isEqualTo(this.uri);
}
}
|
RootUriTemplateHandlerTests
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/UnalignedCheckpointTestBase.java
|
{
"start": 21236,
"end": 24465
}
|
class ____
implements SplitEnumerator<LongSplit, EnumeratorState> {
private final SplitEnumeratorContext<LongSplit> context;
private final EnumeratorState state;
private final Map<Integer, Integer> subtaskRestarts = new HashMap<>();
private LongSplitSplitEnumerator(
SplitEnumeratorContext<LongSplit> context, EnumeratorState state) {
this.context = context;
this.state = state;
}
@Override
public void start() {}
@Override
public void handleSplitRequest(int subtaskId, @Nullable String requesterHostname) {}
@Override
public void addSplitsBack(List<LongSplit> splits, int subtaskId) {
LOG.info("addSplitsBack {}", splits);
// Called on recovery
subtaskRestarts.compute(
subtaskId,
(id, oldCount) -> oldCount == null ? state.numRestarts + 1 : oldCount + 1);
state.unassignedSplits.addAll(splits);
}
@Override
public void addReader(int subtaskId) {
if (context.registeredReaders().size() == context.currentParallelism()) {
if (!state.unassignedSplits.isEmpty()) {
Map<Integer, List<LongSplit>> assignment =
state.unassignedSplits.stream()
.collect(Collectors.groupingBy(LongSplit::getBaseNumber));
LOG.info("Assigning splits {}", assignment);
context.assignSplits(new SplitsAssignment<>(assignment));
state.unassignedSplits.clear();
}
context.registeredReaders().keySet().forEach(context::signalNoMoreSplits);
Optional<Integer> restarts =
subtaskRestarts.values().stream().max(Comparator.naturalOrder());
if (restarts.isPresent() && restarts.get() > state.numRestarts) {
state.numRestarts = restarts.get();
// Implicitly sync the restart count of all subtasks with state.numRestarts
subtaskRestarts.clear();
final SyncEvent event =
new SyncEvent(state.numRestarts, state.numCompletedCheckpoints);
context.registeredReaders()
.keySet()
.forEach(index -> context.sendEventToSourceReader(index, event));
}
}
}
@Override
public void notifyCheckpointComplete(long checkpointId) {
state.numCompletedCheckpoints++;
}
@Override
public EnumeratorState snapshotState(long checkpointId) throws Exception {
LOG.info("snapshotState {}", state);
return state;
}
@Override
public void close() throws IOException {}
}
private static
|
LongSplitSplitEnumerator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherExecutorServiceBenchmark.java
|
{
"start": 7605,
"end": 9678
}
|
class ____ extends WatcherExecutorServiceBenchmark {
public static void main(String[] args) throws Exception {
start();
int numAlerts = 1000;
for (int i = 0; i < numAlerts; i++) {
final String name = "_name" + i;
PutWatchRequest putAlertRequest = new PutWatchRequest(
name,
new WatchSourceBuilder().trigger(schedule(interval("5s")))
.input(httpInput(HttpRequestTemplate.builder("localhost", 9200)))
.condition(
new ScriptCondition(
new Script(
ScriptType.INLINE,
Script.DEFAULT_SCRIPT_LANG,
"ctx.payload.tagline == \"You Know, for Search\"",
emptyMap()
)
)
)
.buildAsBytes(XContentType.JSON),
XContentType.JSON
);
putAlertRequest.setId(name);
client.execute(PutWatchAction.INSTANCE, putAlertRequest).actionGet();
}
int numThreads = 50;
int watchersPerThread = numAlerts / numThreads;
Thread[] threads = new Thread[numThreads];
for (int i = 0; i < numThreads; i++) {
final int begin = i * watchersPerThread;
final int end = (i + 1) * watchersPerThread;
Runnable r = () -> {
while (true) {
for (int j = begin; j < end; j++) {
scheduler.trigger("_name" + j);
}
}
};
threads[i] = new Thread(r);
threads[i].start();
}
for (Thread thread : threads) {
thread.join();
}
}
}
public static
|
HttpInput
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/configuration/SecurityReactorContextConfiguration.java
|
{
"start": 6370,
"end": 7629
}
|
class ____<T> implements CoreSubscriber<T> {
static final String SECURITY_CONTEXT_ATTRIBUTES = "org.springframework.security.SECURITY_CONTEXT_ATTRIBUTES";
private final CoreSubscriber<T> delegate;
private final Context context;
SecurityReactorContextSubscriber(CoreSubscriber<T> delegate, Map<Object, Object> attributes) {
this.delegate = delegate;
Context context = getOrPutContext(attributes, this.delegate.currentContext());
this.context = context;
}
private Context getOrPutContext(Map<Object, Object> attributes, Context currentContext) {
if (currentContext.hasKey(SECURITY_CONTEXT_ATTRIBUTES)) {
return currentContext;
}
return currentContext.put(SECURITY_CONTEXT_ATTRIBUTES, attributes);
}
@Override
public Context currentContext() {
return this.context;
}
@Override
public void onSubscribe(Subscription s) {
this.delegate.onSubscribe(s);
}
@Override
public void onNext(T t) {
this.delegate.onNext(t);
}
@Override
public void onError(Throwable ex) {
this.delegate.onError(ex);
}
@Override
public void onComplete() {
this.delegate.onComplete();
}
}
/**
* A map that computes each value when {@link #get} is invoked
*/
static
|
SecurityReactorContextSubscriber
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/util/clhm/ConcurrentLinkedHashMap.java
|
{
"start": 43748,
"end": 44925
}
|
class ____<V> {
final int weight;
final V value;
WeightedValue(V value, int weight) {
this.weight = weight;
this.value = value;
}
boolean contains(Object o) {
return (o == value) || value.equals(o);
}
/**
* If the entry is available in the hash-table and page replacement policy.
*/
boolean isAlive() {
return weight > 0;
}
/**
* If the entry was removed from the hash-table and is awaiting removal from
* the page replacement policy.
*/
boolean isRetired() {
return weight < 0;
}
/**
* If the entry was removed from the hash-table and the page replacement
* policy.
*/
boolean isDead() {
return weight == 0;
}
}
/**
* A node contains the key, the weighted value, and the linkage pointers on
* the page-replacement algorithm's data structures.
*
* @param <K> The key type
* @param <V> The value type
*/
@SuppressWarnings("serial")
private static final
|
WeightedValue
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/View.java
|
{
"start": 980,
"end": 1046
}
|
interface ____ a persisted view.
*/
@DeveloperApi
public
|
representing
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsSimpleRequestReply2Test.java
|
{
"start": 1520,
"end": 3437
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected final String componentName = "activemq";
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testWithInOnly() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
// send an InOnly
template.sendBody("direct:start", "World");
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testWithInOut() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
// send an InOut
String out = template.requestBody("direct:start", "World", String.class);
assertEquals("Hello World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
public String getComponentName() {
return componentName;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.to(ExchangePattern.InOut, "activemq:queue:JmsSimpleRequestReply2Test")
.to("mock:result");
from("activemq:queue:JmsSimpleRequestReply2Test")
.transform(body().prepend("Hello "));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
JmsSimpleRequestReply2Test
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/IndicativeSentencesGeneration.java
|
{
"start": 1828,
"end": 2394
}
|
interface ____ {
String DEFAULT_SEPARATOR = ", ";
Class<? extends DisplayNameGenerator> DEFAULT_GENERATOR = DisplayNameGenerator.Standard.class;
/**
* Custom separator for sentence fragments.
*
* <p>Defaults to {@value #DEFAULT_SEPARATOR}.
*/
String separator() default DEFAULT_SEPARATOR;
/**
* Custom display name generator to use for sentence fragments.
*
* <p>Defaults to {@link DisplayNameGenerator.Standard}.
*/
Class<? extends DisplayNameGenerator> generator() default DisplayNameGenerator.Standard.class;
}
|
IndicativeSentencesGeneration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java
|
{
"start": 699,
"end": 4544
}
|
class ____ extends ESTestCase {
public void testSortDatafeedIdsByTaskState_GivenDatafeedId() {
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder);
addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder);
PersistentTasksCustomMetadata tasks = tasksBuilder.build();
List<String> startedDatafeeds = new ArrayList<>();
List<String> stoppingDatafeeds = new ArrayList<>();
List<String> notStoppedDatafeeds = new ArrayList<>();
TransportStopDatafeedAction.sortDatafeedIdsByTaskState(
Collections.singleton("datafeed_1"),
tasks,
startedDatafeeds,
stoppingDatafeeds,
notStoppedDatafeeds
);
assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds);
assertEquals(Collections.emptyList(), stoppingDatafeeds);
assertEquals(Collections.singletonList("datafeed_1"), notStoppedDatafeeds);
startedDatafeeds.clear();
stoppingDatafeeds.clear();
notStoppedDatafeeds.clear();
TransportStopDatafeedAction.sortDatafeedIdsByTaskState(
Collections.singleton("datafeed_2"),
tasks,
startedDatafeeds,
stoppingDatafeeds,
notStoppedDatafeeds
);
assertEquals(Collections.emptyList(), startedDatafeeds);
assertEquals(Collections.emptyList(), stoppingDatafeeds);
assertEquals(Collections.emptyList(), notStoppedDatafeeds);
}
public void testSortDatafeedIdsByTaskState_GivenAll() {
PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder);
addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder);
addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder);
PersistentTasksCustomMetadata tasks = tasksBuilder.build();
List<String> startedDatafeeds = new ArrayList<>();
List<String> stoppingDatafeeds = new ArrayList<>();
List<String> notStoppedDatafeeds = new ArrayList<>();
TransportStopDatafeedAction.sortDatafeedIdsByTaskState(
Arrays.asList("datafeed_1", "datafeed_2", "datafeed_3"),
tasks,
startedDatafeeds,
stoppingDatafeeds,
notStoppedDatafeeds
);
assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds);
assertEquals(Collections.singletonList("datafeed_3"), stoppingDatafeeds);
assertEquals(Arrays.asList("datafeed_1", "datafeed_3"), notStoppedDatafeeds);
startedDatafeeds.clear();
stoppingDatafeeds.clear();
TransportStopDatafeedAction.sortDatafeedIdsByTaskState(
Collections.singleton("datafeed_2"),
tasks,
startedDatafeeds,
stoppingDatafeeds,
notStoppedDatafeeds
);
assertEquals(Collections.emptyList(), startedDatafeeds);
assertEquals(Collections.emptyList(), stoppingDatafeeds);
}
public static void addTask(
String datafeedId,
long startTime,
String nodeId,
DatafeedState state,
PersistentTasksCustomMetadata.Builder taskBuilder
) {
taskBuilder.addTask(
MlTasks.datafeedTaskId(datafeedId),
MlTasks.DATAFEED_TASK_NAME,
new StartDatafeedAction.DatafeedParams(datafeedId, startTime),
new PersistentTasksCustomMetadata.Assignment(nodeId, "test assignment")
);
taskBuilder.updateTaskState(MlTasks.datafeedTaskId(datafeedId), state);
}
}
|
TransportStopDatafeedActionTests
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/beanutil/JavaBeanDescriptor.java
|
{
"start": 969,
"end": 2269
}
|
class ____ implements Serializable, Iterable<Map.Entry<Object, Object>> {
private static final long serialVersionUID = -8505586483570518029L;
public static final int TYPE_CLASS = 1;
public static final int TYPE_ENUM = 2;
public static final int TYPE_COLLECTION = 3;
public static final int TYPE_MAP = 4;
public static final int TYPE_ARRAY = 5;
/**
* @see org.apache.dubbo.common.utils.ReflectUtils#isPrimitive(Class)
*/
public static final int TYPE_PRIMITIVE = 6;
public static final int TYPE_BEAN = 7;
private static final String ENUM_PROPERTY_NAME = "name";
private static final String CLASS_PROPERTY_NAME = "name";
private static final String PRIMITIVE_PROPERTY_VALUE = "value";
/**
* Used to define a type is valid.
*
* @see #isValidType(int)
*/
private static final int TYPE_MAX = TYPE_BEAN;
/**
* Used to define a type is valid.
*
* @see #isValidType(int)
*/
private static final int TYPE_MIN = TYPE_CLASS;
private String className;
private int type;
private final Map<Object, Object> properties = new LinkedHashMap<>();
public JavaBeanDescriptor() {}
public JavaBeanDescriptor(String className, int type) {
notEmpty(className, "
|
JavaBeanDescriptor
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedScheduledPollConsumerTest.java
|
{
"start": 1399,
"end": 5174
}
|
class ____ extends ManagementTestSupport {
@Test
public void testScheduledPollConsumer() throws Exception {
MBeanServer mbeanServer = getMBeanServer();
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=consumers,*"), null);
assertEquals(1, set.size());
ObjectName on = set.iterator().next();
assertTrue(mbeanServer.isRegistered(on), "Should be registered");
String uri = (String) mbeanServer.getAttribute(on, "EndpointUri");
assertEquals(
"file://" + testDirectory() + "?backoffErrorThreshold=3&backoffIdleThreshold=2&backoffMultiplier=4&delay=4000",
uri);
Long delay = (Long) mbeanServer.getAttribute(on, "Delay");
assertEquals(4000, delay.longValue());
Long initialDelay = (Long) mbeanServer.getAttribute(on, "InitialDelay");
assertEquals(1000, initialDelay.longValue());
Boolean fixedDelay = (Boolean) mbeanServer.getAttribute(on, "UseFixedDelay");
assertEquals(Boolean.TRUE, fixedDelay);
Boolean schedulerStarted = (Boolean) mbeanServer.getAttribute(on, "SchedulerStarted");
assertEquals(Boolean.TRUE, schedulerStarted);
String timeUnit = (String) mbeanServer.getAttribute(on, "TimeUnit");
assertEquals(TimeUnit.MILLISECONDS.toString(), timeUnit);
Integer backoffMultiplier = (Integer) mbeanServer.getAttribute(on, "BackoffMultiplier");
assertEquals(4, backoffMultiplier.longValue());
Integer backoffCounter = (Integer) mbeanServer.getAttribute(on, "BackoffCounter");
assertEquals(0, backoffCounter.longValue());
Integer backoffIdleThreshold = (Integer) mbeanServer.getAttribute(on, "BackoffIdleThreshold");
assertEquals(2, backoffIdleThreshold.longValue());
Integer backoffErrorThreshold = (Integer) mbeanServer.getAttribute(on, "BackoffErrorThreshold");
assertEquals(3, backoffErrorThreshold.longValue());
String routeId = (String) mbeanServer.getAttribute(on, "RouteId");
assertEquals("route1", routeId);
// stop it
mbeanServer.invoke(on, "stop", null, null);
schedulerStarted = (Boolean) mbeanServer.getAttribute(on, "SchedulerStarted");
assertEquals(Boolean.FALSE, schedulerStarted);
// change delay
mbeanServer.setAttribute(on, new Attribute("Delay", 2000));
// start it
mbeanServer.invoke(on, "start", null, null);
delay = (Long) mbeanServer.getAttribute(on, "Delay");
assertEquals(2000, delay.longValue());
// change some options
mbeanServer.setAttribute(on, new Attribute("UseFixedDelay", Boolean.FALSE));
fixedDelay = (Boolean) mbeanServer.getAttribute(on, "UseFixedDelay");
assertEquals(Boolean.FALSE, fixedDelay);
mbeanServer.setAttribute(on, new Attribute("TimeUnit", TimeUnit.SECONDS.name()));
timeUnit = (String) mbeanServer.getAttribute(on, "TimeUnit");
assertEquals(TimeUnit.SECONDS.toString(), timeUnit);
mbeanServer.setAttribute(on, new Attribute("InitialDelay", Long.valueOf("2000")));
initialDelay = (Long) mbeanServer.getAttribute(on, "InitialDelay");
assertEquals(2000, initialDelay.longValue());
context.stop();
assertFalse(mbeanServer.isRegistered(on), "Should no longer be registered");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?delay=4000&backoffMultiplier=4&backoffIdleThreshold=2&backoffErrorThreshold=3"))
.to("mock:result");
}
};
}
}
|
ManagedScheduledPollConsumerTest
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/swagger/JavadocOpenAPIDefinitionResolver.java
|
{
"start": 10038,
"end": 13637
}
|
class ____ {
private static final Map<Field, Field> MAPPING = new LinkedHashMap<>();
private static Field PARAMS;
public final ClassJavadoc javadoc;
public Map<String, FieldJavadoc> fields;
public Map<MethodSignature, MethodJavadoc> methods;
public Map<String, ParamJavadoc> recordComponents;
static {
try {
Field[] fields = ClassJavadoc.class.getDeclaredFields();
Field[] wFields = ClassJavadocWrapper.class.getFields();
for (Field field : fields) {
field.setAccessible(true);
for (Field wField : wFields) {
if (wField.getName().equals(field.getName())) {
MAPPING.put(field, wField);
break;
}
}
}
PARAMS = MethodJavadoc.class.getDeclaredField("params");
PARAMS.setAccessible(true);
} catch (Throwable ignored) {
}
}
public ClassJavadocWrapper(ClassJavadoc javadoc) {
this.javadoc = javadoc;
try {
for (Map.Entry<Field, Field> entry : MAPPING.entrySet()) {
entry.getValue().set(this, entry.getKey().get(javadoc));
}
} catch (Throwable ignored) {
}
}
public boolean isEmpty() {
return javadoc.isEmpty();
}
public Comment getClassComment() {
return javadoc.getComment();
}
public FieldJavadoc getField(Field field) {
if (fields == null) {
return null;
}
FieldJavadoc fieldJavadoc = fields.get(field.getName());
return fieldJavadoc == null || fieldJavadoc.isEmpty() ? null : fieldJavadoc;
}
public MethodJavadoc getMethod(Method method) {
if (methods == null) {
return null;
}
MethodJavadoc methodJavadoc = methods.get(MethodSignature.from(method));
if (methodJavadoc != null && !methodJavadoc.isEmpty()) {
return methodJavadoc;
}
Method bridgeMethod = RuntimeJavadocHelper.findBridgeMethod(method);
if (bridgeMethod != null && bridgeMethod != method) {
methodJavadoc = methods.get(MethodSignature.from(bridgeMethod));
if (methodJavadoc != null && !methodJavadoc.isEmpty()) {
return methodJavadoc;
}
}
return null;
}
@SuppressWarnings("unchecked")
public Comment getParameter(Method method, String name) {
if (methods == null) {
return null;
}
MethodJavadoc methodJavadoc = methods.get(MethodSignature.from(method));
if (methodJavadoc == null || PARAMS == null) {
return null;
}
try {
Map<String, ParamJavadoc> params = (Map<String, ParamJavadoc>) PARAMS.get(methodJavadoc);
ParamJavadoc paramJavadoc = params.get(name);
if (paramJavadoc != null) {
return paramJavadoc.getComment();
}
} catch (Throwable ignored) {
}
return null;
}
public ParamJavadoc getRecordComponent(String name) {
return recordComponents == null ? null : recordComponents.get(name);
}
}
}
|
ClassJavadocWrapper
|
java
|
apache__camel
|
components/camel-elasticsearch/src/main/java/org/apache/camel/component/es/ElasticsearchProducer.java
|
{
"start": 25273,
"end": 25376
}
|
class ____ all the information that an asynchronous action could need.
*/
private static
|
providing
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/HeldLockAnalyzer.java
|
{
"start": 3776,
"end": 5369
}
|
interface ____ {
/**
* Handles a guarded member access.
*
* @param tree The member access expression.
* @param guard The member's guard expression.
* @param locks The set of held locks.
*/
void handleGuardedAccess(ExpressionTree tree, GuardedByExpression guard, HeldLockSet locks);
}
/**
* Analyzes a method body, tracking the set of held locks and checking accesses to guarded
* members.
*/
public static void analyze(
VisitorState state, LockEventListener listener, Predicate<Tree> isSuppressed) {
HeldLockSet locks = HeldLockSet.empty();
locks = handleMonitorGuards(state, locks);
new LockScanner(state, listener, isSuppressed).scan(state.getPath(), locks);
}
// Don't use Class#getName() for inner classes, we don't want `Monitor$Guard`
private static final String MONITOR_GUARD_CLASS =
"com.google.common.util.concurrent.Monitor.Guard";
private static HeldLockSet handleMonitorGuards(VisitorState state, HeldLockSet locks) {
JCNewClass newClassTree = ASTHelpers.findEnclosingNode(state.getPath(), JCNewClass.class);
if (newClassTree == null) {
return locks;
}
if (!(ASTHelpers.getSymbol(newClassTree.clazz) instanceof ClassSymbol classSymbol)) {
return locks;
}
if (!classSymbol.fullname.contentEquals(MONITOR_GUARD_CLASS)) {
return locks;
}
return GuardedByBinder.bindExpression(
Iterables.getOnlyElement(newClassTree.getArguments()), state)
.map(le -> locks.plus(le))
.orElse(locks);
}
private static
|
LockEventListener
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DepAnnTest.java
|
{
"start": 2144,
"end": 2731
}
|
interface ____ {}
/**
* @deprecated
*/
// BUG: Diagnostic contains: @Deprecated
public void deprecatedMethood() {}
}\
""")
.doTest();
}
@Test
public void negativeCase1() {
compilationHelper
.setArgs(JAVACOPTS)
.addSourceLines(
"DepAnnNegativeCase1.java",
"""
package com.google.errorprone.bugpatterns.testdata;
/**
* @deprecated
*/
@Deprecated
public
|
Interface
|
java
|
google__dagger
|
javatests/artifacts/dagger/build-tests/src/test/java/buildtests/TransitiveSubcomponentQualifierTest.java
|
{
"start": 8693,
"end": 8830
}
|
class ____ extends MyBaseSubcomponent {",
" @Subcomponent.Builder",
" public abstract static
|
MySubcomponent
|
java
|
square__retrofit
|
samples/src/main/java/com/example/retrofit/ChunkingConverter.java
|
{
"start": 2936,
"end": 3098
}
|
class ____ {
final String owner;
final String name;
Repo(String owner, String name) {
this.owner = owner;
this.name = name;
}
}
|
Repo
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/function/MethodInvokersFailableBiConsumerTest.java
|
{
"start": 1388,
"end": 2556
}
|
class ____ extends MethodFixtures {
@Test
void testApply1Arg() throws Throwable {
MethodInvokers.asFailableBiConsumer(getMethodForSetString1Arg()).accept(INSTANCE, "A");
assertEquals("A", INSTANCE.getValue1());
}
@Test
void testApply1ArgThrowsChecked() throws Exception {
assertThrows(CustomCheckedException.class, () -> MethodInvokers.asFailableBiConsumer(getMethodForSetString1ArgThrowsChecked()).accept(INSTANCE, "A"));
}
@Test
void testApply1ArgThrowsUnchecked() throws Exception {
assertThrows(CustomUncheckedException.class, () -> MethodInvokers.asFailableBiConsumer(getMethodForSetString1ArgThrowsUnchecked()).accept(INSTANCE, "A"));
}
@Test
void testConstructorForNull() throws Exception {
assertNullPointerException(() -> MethodInvokers.asFailableBiConsumer(null));
}
@Test
void testToString() throws SecurityException, ReflectiveOperationException {
// Should not blow up and must return _something_
assertFalse(MethodInvokers.asFailableBiConsumer(getMethodForSetString1Arg()).toString().isEmpty());
}
}
|
MethodInvokersFailableBiConsumerTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/cleanup/TestingRetryStrategies.java
|
{
"start": 1188,
"end": 1580
}
|
class ____'t be instantiated
}
private static final Duration TESTING_DEFAULT_RETRY_DELAY = Duration.ofMillis(10);
public static final RetryStrategy NO_RETRY_STRATEGY = new FixedRetryStrategy(0, Duration.ZERO);
public static RetryStrategy createWithNumberOfRetries(int retryCount) {
return new FixedRetryStrategy(retryCount, TESTING_DEFAULT_RETRY_DELAY);
}
}
|
shouldn
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cut/MonetoryAmountUserType.java
|
{
"start": 393,
"end": 2278
}
|
class ____ implements CompositeUserType<MonetoryAmount> {
@Override
public Object getPropertyValue(MonetoryAmount component, int property) throws HibernateException {
return property == 0 ? component.getAmount() : component.getCurrency();
}
@Override
public MonetoryAmount instantiate(ValueAccess valueAccess) {
final BigDecimal value = valueAccess.getValue( 0, BigDecimal.class );
final Currency currency = valueAccess.getValue( 1, Currency.class );
if ( value == null && currency == null ) {
return null;
}
return new MonetoryAmount( value, currency );
}
@Override
public Class<?> embeddable() {
return MonetoryAmount.class;
}
@Override
public Class returnedClass() {
return MonetoryAmount.class;
}
@Override
public boolean equals(MonetoryAmount x, MonetoryAmount y) throws HibernateException {
if ( x == y ) {
return true;
}
if ( x == null || y == null ) {
return false;
}
return x.getAmount().equals( y.getAmount() ) &&
x.getCurrency().equals( y.getCurrency() );
}
@Override
public int hashCode(MonetoryAmount x) throws HibernateException {
return x.getAmount().hashCode();
}
@Override
public MonetoryAmount deepCopy(MonetoryAmount value) throws HibernateException {
return new MonetoryAmount( value.getAmount(), value.getCurrency() );
}
@Override
public boolean isMutable() {
return true;
}
@Override
public Serializable disassemble(MonetoryAmount value)
throws HibernateException {
return deepCopy( value );
}
@Override
public MonetoryAmount assemble(Serializable cached, Object owner)
throws HibernateException {
return deepCopy( (MonetoryAmount) cached );
}
@Override
public MonetoryAmount replace(MonetoryAmount original, MonetoryAmount target, Object owner)
throws HibernateException {
return deepCopy( original ); //TODO: improve
}
// public static
|
MonetoryAmountUserType
|
java
|
apache__camel
|
core/camel-base/src/main/java/org/apache/camel/component/properties/EnvPropertiesFunction.java
|
{
"start": 1091,
"end": 1646
}
|
class ____ implements PropertiesFunction {
@Override
public String getName() {
return "env";
}
@Override
public String apply(String remainder) {
String key = remainder;
String defaultValue = null;
if (remainder.contains(":")) {
key = StringHelper.before(remainder, ":");
defaultValue = StringHelper.after(remainder, ":");
}
String value = IOHelper.lookupEnvironmentVariable(key);
return value != null ? value : defaultValue;
}
}
|
EnvPropertiesFunction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/onetoone/OverrideOneToOneJoinColumnTest.java
|
{
"start": 3734,
"end": 4282
}
|
class ____ {
private String street;
private String city;
private State state;
@OneToOne
public State getState() {
return state;
}
public void setState(State state) {
this.state = state;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
}
@Entity(name = "State")
@jakarta.persistence.Table(name = "STATE_TABLE")
public static
|
Address
|
java
|
elastic__elasticsearch
|
test/fixtures/gcs-fixture/src/main/java/fixture/gcs/GoogleCloudStorageHttpFixture.java
|
{
"start": 647,
"end": 1798
}
|
class ____ extends ExternalResource {
private final boolean enabled;
private final String bucket;
private final String token;
private HttpServer server;
public GoogleCloudStorageHttpFixture(boolean enabled, final String bucket, final String token) {
this.enabled = enabled;
this.bucket = bucket;
this.token = token;
}
public String getAddress() {
return "http://" + server.getAddress().getHostString() + ":" + server.getAddress().getPort();
}
@Override
protected void before() throws Throwable {
if (enabled) {
this.server = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
server.createContext("/" + token, new FakeOAuth2HttpHandler());
server.createContext("/computeMetadata/v1/project/project-id", new FakeProjectIdHttpHandler());
server.createContext("/", new GoogleCloudStorageHttpHandler(bucket));
server.start();
}
}
@Override
protected void after() {
if (enabled) {
server.stop(0);
}
}
}
|
GoogleCloudStorageHttpFixture
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/DoNotCallCheckerTest.java
|
{
"start": 2407,
"end": 2904
}
|
class ____ {
void foo() {
List<Integer> xs = ImmutableList.of();
// BUG: Diagnostic contains:
xs.add(1);
xs.get(1);
}
}
""")
.doTest();
}
@Test
public void positiveWhereDeclaredTypeIsSuper_butAssignedMultipleTimes() {
testHelperWithImmutableList()
.addSourceLines(
"Test.java",
"""
import java.util.List;
|
Test
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/junit/ReconfigurationPolicy.java
|
{
"start": 1082,
"end": 1414
}
|
enum ____ {
/** Performs no reconfiguration of the logging system for the entire run of tests in a test class. This is the default. */
NEVER,
/** Performs a reconfiguration before executing each test. */
BEFORE_EACH,
/** Performs a reconfiguration after executing each test. */
AFTER_EACH
}
|
ReconfigurationPolicy
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/dos/DeepNestingUntypedDeserTest.java
|
{
"start": 472,
"end": 2511
}
|
class ____
{
// 28-Mar-2021, tatu: Currently 3000 fails for untyped/Object,
// 4000 for untyped/Array
// 31-May-2022, tatu: But no more! Can handle much much larger
// nesting levels, bounded by memory usage not stack. Tested with
// 1 million (!) nesting levels, but to keep tests fast use 100k
private final static int TOO_DEEP_NESTING = StreamReadConstraints.DEFAULT_MAX_DEPTH * 100;
private final JsonFactory jsonFactory = JsonFactory.builder()
.streamReadConstraints(StreamReadConstraints.builder().maxNestingDepth(Integer.MAX_VALUE).build())
.build();
private final ObjectMapper MAPPER = JsonMapper.builder(jsonFactory).build();
@Test
public void testFormerlyTooDeepUntypedWithArray() throws Exception
{
final String doc = _nestedDoc(TOO_DEEP_NESTING, "[ ", "] ");
Object ob = MAPPER.readValue(doc, Object.class);
assertTrue(ob instanceof List<?>);
// ... but also work with Java array
ob = MAPPER.readerFor(Object.class)
.with(DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY)
.readValue(doc);
assertTrue(ob instanceof Object[]);
}
@Test
public void testFormerlyTooDeepUntypedWithObject() throws Exception
{
final String doc = "{"+_nestedDoc(TOO_DEEP_NESTING, "\"x\":{", "} ") + "}";
Object ob = MAPPER.readValue(doc, Object.class);
assertTrue(ob instanceof Map<?, ?>);
}
private String _nestedDoc(int nesting, String open, String close) {
StringBuilder sb = new StringBuilder(nesting * (open.length() + close.length()));
for (int i = 0; i < nesting; ++i) {
sb.append(open);
if ((i & 31) == 0) {
sb.append("\n");
}
}
for (int i = 0; i < nesting; ++i) {
sb.append(close);
if ((i & 31) == 0) {
sb.append("\n");
}
}
return sb.toString();
}
}
|
DeepNestingUntypedDeserTest
|
java
|
alibaba__nacos
|
api/src/test/java/com/alibaba/nacos/api/ai/listener/NacosAiListenerDefaultMethodTest.java
|
{
"start": 798,
"end": 1062
}
|
class ____ {
NacosAiListener aiListener;
@BeforeEach
void setUp() {
aiListener = event -> {
};
}
@Test
void testGetExecutor() {
assertNull(aiListener.getExecutor());
}
}
|
NacosAiListenerDefaultMethodTest
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
|
{
"start": 25497,
"end": 28789
}
|
class ____ extends ByteSource {
private final InputStream in;
protected boolean isEof = false;
private InputStreamByteSource(InputStream in) {
super();
this.in = in;
}
@Override
protected void skipSourceBytes(long length) throws IOException {
boolean readZero = false;
while (length > 0) {
long n = in.skip(length);
if (n > 0) {
length -= n;
continue;
}
// The inputStream contract is evil.
// zero "might" mean EOF. So check for 2 in a row, we will
// infinite loop waiting for -1 with some classes others
// spuriously will return 0 on occasion without EOF
if (n == 0) {
if (readZero) {
isEof = true;
throw new EOFException();
}
readZero = true;
continue;
}
// read negative
isEof = true;
throw new EOFException();
}
}
@Override
protected long trySkipBytes(long length) throws IOException {
long leftToSkip = length;
try {
boolean readZero = false;
while (leftToSkip > 0) {
long n = in.skip(length);
if (n > 0) {
leftToSkip -= n;
continue;
}
// The inputStream contract is evil.
// zero "might" mean EOF. So check for 2 in a row, we will
// infinite loop waiting for -1 with some classes others
// spuriously will return 0 on occasion without EOF
if (n == 0) {
if (readZero) {
isEof = true;
break;
}
readZero = true;
continue;
}
// read negative
isEof = true;
break;
}
} catch (EOFException eof) {
isEof = true;
}
return length - leftToSkip;
}
@Override
protected void readRaw(byte[] data, int off, int len) throws IOException {
while (len > 0) {
int read = in.read(data, off, len);
if (read < 0) {
isEof = true;
throw new EOFException();
}
len -= read;
off += read;
}
}
@Override
protected int tryReadRaw(byte[] data, int off, int len) throws IOException {
int leftToCopy = len;
try {
while (leftToCopy > 0) {
int read = in.read(data, off, leftToCopy);
if (read < 0) {
isEof = true;
break;
}
leftToCopy -= read;
off += read;
}
} catch (EOFException eof) {
isEof = true;
}
return len - leftToCopy;
}
@Override
public int read() throws IOException {
if (ba.getLim() - ba.getPos() == 0) {
return in.read();
} else {
int position = ba.getPos();
int result = ba.getBuf()[position] & 0xff;
ba.setPos(position + 1);
return result;
}
}
@Override
public boolean isEof() {
return isEof;
}
@Override
public void close() throws IOException {
in.close();
}
}
/**
* This byte source is special. It will avoid copying data by using the source's
* byte[] as a buffer in the decoder.
*/
private static
|
InputStreamByteSource
|
java
|
quarkusio__quarkus
|
docs/src/test/java/io/quarkus/docs/LintException.java
|
{
"start": 33,
"end": 309
}
|
class ____ extends RuntimeException {
// Exception that has no stacktrace
public LintException(String fileName) {
super("Found errors in document metadata. See test output or " + fileName + " for details.",
null, false, false);
}
}
|
LintException
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ImmutableMapKeySet.java
|
{
"start": 1017,
"end": 1802
}
|
class ____<K, V> extends IndexedImmutableSet<K> {
private final ImmutableMap<K, V> map;
ImmutableMapKeySet(ImmutableMap<K, V> map) {
this.map = map;
}
@Override
public int size() {
return map.size();
}
@Override
public UnmodifiableIterator<K> iterator() {
return map.keyIterator();
}
@Override
public boolean contains(@Nullable Object object) {
return map.containsKey(object);
}
@Override
K get(int index) {
return map.entrySet().asList().get(index).getKey();
}
@Override
boolean isPartialView() {
return true;
}
@Override
@J2ktIncompatible
@GwtIncompatible
Object writeReplace() {
return new KeySetSerializedForm<K>(map);
}
@GwtIncompatible
@J2ktIncompatible
private static final
|
ImmutableMapKeySet
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/SingleRegisteredProviderTest.java
|
{
"start": 916,
"end": 3651
}
|
class ____ {
@Test
public void testCachingExplicitlyDisabled() {
try (final StandardServiceRegistry registry = ServiceRegistryUtil.serviceRegistryBuilder()
.applySetting( AvailableSettings.USE_SECOND_LEVEL_CACHE, "false" )
.build()) {
assertThat( registry.getService( RegionFactory.class ) ).isInstanceOf( NoCachingRegionFactory.class );
}
}
@Test
public void testCachingImplicitlyEnabledRegistered() {
try (final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder()
.build()) {
final Collection<Class<? extends RegionFactory>> implementors = bsr
.getService( StrategySelector.class )
.getRegisteredStrategyImplementors( RegionFactory.class );
assertThat( implementors.size() ).isEqualTo( 1 );
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder( bsr )
.applySetting( AvailableSettings.USE_SECOND_LEVEL_CACHE, "" )
.build();
assertThat( ssr.getService( RegionFactory.class ) ).isInstanceOf( NoCachingRegionFactory.class );
}
}
@Test
public void testCachingImplicitlyEnabledNoRegistered() {
try (final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder()
.build()) {
final Collection<Class<? extends RegionFactory>> implementors = bsr
.getService( StrategySelector.class )
.getRegisteredStrategyImplementors( RegionFactory.class );
assertThat( implementors.size() ).isEqualTo( 1 );
bsr.getService( StrategySelector.class ).unRegisterStrategyImplementor(
RegionFactory.class,
implementors.iterator().next()
);
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder( bsr )
.applySetting( AvailableSettings.USE_SECOND_LEVEL_CACHE, "" )
.build();
assertThat( ssr.getService( RegionFactory.class ) ).isInstanceOf( NoCachingRegionFactory.class );
}
}
@Test
public void testConnectionsRegistered() {
try (final BootstrapServiceRegistry bsr = new BootstrapServiceRegistryBuilder()
.build()) {
final Collection<Class<? extends ConnectionProvider>> implementors = bsr
.getService( StrategySelector.class )
.getRegisteredStrategyImplementors( ConnectionProvider.class );
assertThat( implementors.size() ).isEqualTo( 0 );
bsr.getService( StrategySelector.class ).registerStrategyImplementor(
ConnectionProvider.class,
"testing",
DriverManagerConnectionProvider.class
);
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistryBuilder( bsr ).build();
final ConnectionProvider configuredProvider = ssr.getService( ConnectionProvider.class );
assertThat( configuredProvider ).isInstanceOf( DriverManagerConnectionProvider.class );
}
}
}
|
SingleRegisteredProviderTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/xml/PluggableSchemaResolver.java
|
{
"start": 2265,
"end": 6524
}
|
class ____ implements EntityResolver {
/**
* The location of the file that defines schema mappings.
* Can be present in multiple JAR files.
*/
public static final String DEFAULT_SCHEMA_MAPPINGS_LOCATION = "META-INF/spring.schemas";
private static final Log logger = LogFactory.getLog(PluggableSchemaResolver.class);
private final @Nullable ClassLoader classLoader;
private final String schemaMappingsLocation;
/** Stores the mapping of schema URL → local schema path. */
private volatile @Nullable Map<String, String> schemaMappings;
/**
* Loads the schema URL → schema file location mappings using the default
* mapping file pattern "META-INF/spring.schemas".
* @param classLoader the ClassLoader to use for loading
* (can be {@code null} to use the default ClassLoader)
* @see PropertiesLoaderUtils#loadAllProperties(String, ClassLoader)
*/
public PluggableSchemaResolver(@Nullable ClassLoader classLoader) {
this.classLoader = classLoader;
this.schemaMappingsLocation = DEFAULT_SCHEMA_MAPPINGS_LOCATION;
}
/**
* Loads the schema URL → schema file location mappings using the given
* mapping file pattern.
* @param classLoader the ClassLoader to use for loading
* (can be {@code null} to use the default ClassLoader)
* @param schemaMappingsLocation the location of the file that defines schema mappings
* (must not be empty)
* @see PropertiesLoaderUtils#loadAllProperties(String, ClassLoader)
*/
public PluggableSchemaResolver(@Nullable ClassLoader classLoader, String schemaMappingsLocation) {
Assert.hasText(schemaMappingsLocation, "'schemaMappingsLocation' must not be empty");
this.classLoader = classLoader;
this.schemaMappingsLocation = schemaMappingsLocation;
}
@Override
public @Nullable InputSource resolveEntity(@Nullable String publicId, @Nullable String systemId) throws IOException {
if (logger.isTraceEnabled()) {
logger.trace("Trying to resolve XML entity with public id [" + publicId +
"] and system id [" + systemId + "]");
}
if (systemId != null) {
String resourceLocation = getSchemaMappings().get(systemId);
if (resourceLocation == null && systemId.startsWith("https:")) {
// Retrieve canonical http schema mapping even for https declaration
resourceLocation = getSchemaMappings().get("http:" + systemId.substring(6));
}
if (resourceLocation != null) {
Resource resource = new ClassPathResource(resourceLocation, this.classLoader);
try {
InputSource source = new InputSource(resource.getInputStream());
source.setPublicId(publicId);
source.setSystemId(systemId);
if (logger.isTraceEnabled()) {
logger.trace("Found XML schema [" + systemId + "] in classpath: " + resourceLocation);
}
return source;
}
catch (FileNotFoundException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Could not find XML schema [" + systemId + "]: " + resource, ex);
}
}
}
}
// Fall back to the parser's default behavior.
return null;
}
/**
* Load the specified schema mappings lazily.
*/
private Map<String, String> getSchemaMappings() {
Map<String, String> schemaMappings = this.schemaMappings;
if (schemaMappings == null) {
synchronized (this) {
schemaMappings = this.schemaMappings;
if (schemaMappings == null) {
if (logger.isTraceEnabled()) {
logger.trace("Loading schema mappings from [" + this.schemaMappingsLocation + "]");
}
try {
Properties mappings =
PropertiesLoaderUtils.loadAllProperties(this.schemaMappingsLocation, this.classLoader);
if (logger.isTraceEnabled()) {
logger.trace("Loaded schema mappings: " + mappings);
}
schemaMappings = new ConcurrentHashMap<>(mappings.size());
CollectionUtils.mergePropertiesIntoMap(mappings, schemaMappings);
this.schemaMappings = schemaMappings;
}
catch (IOException ex) {
throw new IllegalStateException(
"Unable to load schema mappings from location [" + this.schemaMappingsLocation + "]", ex);
}
}
}
}
return schemaMappings;
}
@Override
public String toString() {
return "EntityResolver using schema mappings " + getSchemaMappings();
}
}
|
PluggableSchemaResolver
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/support/spring/FastJsonpResponseBodyAdvice.java
|
{
"start": 770,
"end": 1176
}
|
class ____ {@code ResponseBodyAdvice} implementations
* that customize the response before JSON serialization with {@link FastJsonpHttpMessageConverter4}'s concrete
* subclasses.
* <p>
* Compatible Spring MVC version 4.2+
*
* @author Jerry.Chen
* @see JSONPResponseBodyAdvice
* @since 1.2.20
*/
@Deprecated
@Order(Integer.MIN_VALUE) //before FastJsonViewResponseBodyAdvice
@ControllerAdvice
public
|
for
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.