language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/NaturalIdEqualsHashCodeEntityTest.java | {
"start": 2501,
"end": 3618
} | class ____ {
@Id
@GeneratedValue
private Long id;
private String title;
private String author;
@NaturalId
private String isbn;
//Getters and setters are omitted for brevity
//end::entity-pojo-natural-id-equals-hashcode-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
//tag::entity-pojo-natural-id-equals-hashcode-example[]
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Book)) {
return false;
}
Book book = (Book) o;
return Objects.equals(isbn, book.getIsbn());
}
@Override
public int hashCode() {
return Objects.hash(isbn);
}
}
//end::entity-pojo-natural-id-equals-hashcode-example[]
}
| Book |
java | google__dagger | javatests/dagger/internal/codegen/XExecutableTypesTest.java | {
"start": 13281,
"end": 13547
} | class ____ {",
" void toList(Collection<Foo> c) { throw new RuntimeException(); }",
"}");
Source bar =
CompilerTests.javaSource(
"test.Bar",
"package test;",
"import java.util.*;",
" | Foo |
java | elastic__elasticsearch | x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java | {
"start": 1648,
"end": 1734
} | class ____ extends Plugin implements SearchPlugin {
public static | SkipShardPlugin |
java | google__guava | guava/src/com/google/common/hash/ChecksumHashFunction.java | {
"start": 3238,
"end": 4536
} | class ____ {
private static final @Nullable MethodHandle UPDATE_BB = updateByteBuffer();
@IgnoreJRERequirement // https://github.com/mojohaus/animal-sniffer/issues/67
static boolean updateByteBuffer(Checksum cs, ByteBuffer bb) {
if (UPDATE_BB != null) {
try {
UPDATE_BB.invokeExact(cs, bb);
} catch (Throwable e) {
// `update` has no `throws` clause.
sneakyThrow(e);
}
return true;
} else {
return false;
}
}
private static @Nullable MethodHandle updateByteBuffer() {
try {
Class<?> clazz = Class.forName("java.util.zip.Checksum");
return MethodHandles.lookup()
.findVirtual(clazz, "update", MethodType.methodType(void.class, ByteBuffer.class));
} catch (ClassNotFoundException e) {
throw new AssertionError(e);
} catch (IllegalAccessException e) {
// That API is public.
throw newLinkageError(e);
} catch (NoSuchMethodException e) {
// Only introduced in Java 9.
return null;
}
}
private static LinkageError newLinkageError(Throwable cause) {
return new LinkageError(cause.toString(), cause);
}
}
private static final long serialVersionUID = 0L;
}
| ChecksumMethodHandles |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/method/annotation/ExceptionHandlerMappingInfo.java | {
"start": 1096,
"end": 2242
} | class ____ {
private final Set<Class<? extends Throwable>> exceptionTypes;
private final Set<MediaType> producibleTypes;
private final Method handlerMethod;
ExceptionHandlerMappingInfo(Set<Class<? extends Throwable>> exceptionTypes, Set<MediaType> producibleMediaTypes, Method handlerMethod) {
Assert.notNull(exceptionTypes, "exceptionTypes should not be null");
Assert.notNull(producibleMediaTypes, "producibleMediaTypes should not be null");
Assert.notNull(handlerMethod, "handlerMethod should not be null");
this.exceptionTypes = exceptionTypes;
this.producibleTypes = producibleMediaTypes;
this.handlerMethod = handlerMethod;
}
/**
* Return the method responsible for handling the exception.
*/
public Method getHandlerMethod() {
return this.handlerMethod;
}
/**
* Return the exception types supported by this handler.
*/
public Set<Class<? extends Throwable>> getExceptionTypes() {
return this.exceptionTypes;
}
/**
* Return the producible media types by this handler. Can be empty.
*/
public Set<MediaType> getProducibleTypes() {
return this.producibleTypes;
}
}
| ExceptionHandlerMappingInfo |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1933/Issue1933Config.java | {
"start": 446,
"end": 555
} | interface ____ {
@BeanMapping(ignoreByDefault = true)
Entity updateEntity(Dto dto);
| Issue1933Config |
java | netty__netty | codec-memcache/src/main/java/io/netty/handler/codec/memcache/binary/AbstractBinaryMemcacheDecoder.java | {
"start": 1505,
"end": 8099
} | class ____<M extends BinaryMemcacheMessage>
extends AbstractMemcacheObjectDecoder {
public static final int DEFAULT_MAX_CHUNK_SIZE = 8192;
private final int chunkSize;
private M currentMessage;
private int alreadyReadChunkSize;
private State state = State.READ_HEADER;
/**
* Create a new {@link AbstractBinaryMemcacheDecoder} with default settings.
*/
protected AbstractBinaryMemcacheDecoder() {
this(DEFAULT_MAX_CHUNK_SIZE);
}
/**
* Create a new {@link AbstractBinaryMemcacheDecoder} with custom settings.
*
* @param chunkSize the maximum chunk size of the payload.
*/
protected AbstractBinaryMemcacheDecoder(int chunkSize) {
checkPositiveOrZero(chunkSize, "chunkSize");
this.chunkSize = chunkSize;
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
switch (state) {
case READ_HEADER: try {
if (in.readableBytes() < 24) {
return;
}
resetDecoder();
currentMessage = decodeHeader(in);
state = State.READ_EXTRAS;
} catch (Exception e) {
resetDecoder();
out.add(invalidMessage(e));
return;
}
case READ_EXTRAS: try {
byte extrasLength = currentMessage.extrasLength();
if (extrasLength > 0) {
if (in.readableBytes() < extrasLength) {
return;
}
currentMessage.setExtras(in.readRetainedSlice(extrasLength));
}
state = State.READ_KEY;
} catch (Exception e) {
resetDecoder();
out.add(invalidMessage(e));
return;
}
case READ_KEY: try {
short keyLength = currentMessage.keyLength();
if (keyLength > 0) {
if (in.readableBytes() < keyLength) {
return;
}
currentMessage.setKey(in.readRetainedSlice(keyLength));
}
out.add(currentMessage.retain());
state = State.READ_CONTENT;
} catch (Exception e) {
resetDecoder();
out.add(invalidMessage(e));
return;
}
case READ_CONTENT: try {
int valueLength = currentMessage.totalBodyLength()
- currentMessage.keyLength()
- currentMessage.extrasLength();
int toRead = in.readableBytes();
if (valueLength > 0) {
if (toRead == 0) {
return;
}
if (toRead > chunkSize) {
toRead = chunkSize;
}
int remainingLength = valueLength - alreadyReadChunkSize;
if (toRead > remainingLength) {
toRead = remainingLength;
}
ByteBuf chunkBuffer = in.readRetainedSlice(toRead);
MemcacheContent chunk;
if ((alreadyReadChunkSize += toRead) >= valueLength) {
chunk = new DefaultLastMemcacheContent(chunkBuffer);
} else {
chunk = new DefaultMemcacheContent(chunkBuffer);
}
out.add(chunk);
if (alreadyReadChunkSize < valueLength) {
return;
}
} else {
out.add(LastMemcacheContent.EMPTY_LAST_CONTENT);
}
resetDecoder();
state = State.READ_HEADER;
return;
} catch (Exception e) {
resetDecoder();
out.add(invalidChunk(e));
return;
}
case BAD_MESSAGE:
in.skipBytes(actualReadableBytes());
return;
default:
throw new Error("Unexpected state reached: " + state);
}
}
/**
* Helper method to create a message indicating a invalid decoding result.
*
* @param cause the cause of the decoding failure.
* @return a valid message indicating failure.
*/
private M invalidMessage(Exception cause) {
state = State.BAD_MESSAGE;
M message = buildInvalidMessage();
message.setDecoderResult(DecoderResult.failure(cause));
return message;
}
/**
* Helper method to create a content chunk indicating a invalid decoding result.
*
* @param cause the cause of the decoding failure.
* @return a valid content chunk indicating failure.
*/
private MemcacheContent invalidChunk(Exception cause) {
state = State.BAD_MESSAGE;
MemcacheContent chunk = new DefaultLastMemcacheContent(Unpooled.EMPTY_BUFFER);
chunk.setDecoderResult(DecoderResult.failure(cause));
return chunk;
}
/**
* When the channel goes inactive, release all frames to prevent data leaks.
*
* @param ctx handler context
* @throws Exception
*/
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
super.channelInactive(ctx);
resetDecoder();
}
/**
* Prepare for next decoding iteration.
*/
protected void resetDecoder() {
if (currentMessage != null) {
currentMessage.release();
currentMessage = null;
}
alreadyReadChunkSize = 0;
}
/**
* Decode and return the parsed {@link BinaryMemcacheMessage}.
*
* @param in the incoming buffer.
* @return the decoded header.
*/
protected abstract M decodeHeader(ByteBuf in);
/**
* Helper method to create a upstream message when the incoming parsing did fail.
*
* @return a message indicating a decoding failure.
*/
protected abstract M buildInvalidMessage();
/**
* Contains all states this decoder can possibly be in.
* <p/>
* Note that most of the states can be optional, the only one required is reading
* the header ({@link #READ_HEADER}. All other steps depend on the length fields
* in the header and will be executed conditionally.
*/
| AbstractBinaryMemcacheDecoder |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestS3AMiscOperationCost.java | {
"start": 2166,
"end": 5241
} | class ____ extends AbstractS3ACostTest {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3AMiscOperationCost.class);
@Override
public Configuration createConfiguration() {
final Configuration conf = super.createConfiguration();
removeBaseAndBucketOverrides(conf, AUDIT_ENABLED);
conf.setBoolean(AUDIT_ENABLED, true);
return conf;
}
/**
* Expected audit count.
* @param expected expected value.
* @return the probe.
*/
protected OperationCostValidator.ExpectedProbe withAuditCount(
final int expected) {
return probe(AUDIT_SPAN_CREATION, expected);
}
/**
* Common operation which should be low cost as possible.
*/
@Test
public void testMkdirOverDir() throws Throwable {
describe("create a dir over a dir");
S3AFileSystem fs = getFileSystem();
// create base dir with marker
Path baseDir = dir(methodPath());
// create the child; only assert on HEAD/GET IO
verifyMetrics(() -> fs.mkdirs(baseDir),
withAuditCount(1),
// full probe on dest plus list only on parent.
with(OBJECT_METADATA_REQUESTS, 0),
with(OBJECT_LIST_REQUEST, FILESTATUS_DIR_PROBE_L));
}
@Test
public void testGetContentSummaryRoot() throws Throwable {
describe("getContentSummary on Root");
S3AFileSystem fs = getFileSystem();
Path root = new Path("/");
verifyMetrics(() -> getContentSummary(root),
with(INVOCATION_GET_CONTENT_SUMMARY, 1));
}
@Test
public void testGetContentSummaryDir() throws Throwable {
describe("getContentSummary on test dir with children");
S3AFileSystem fs = getFileSystem();
Path baseDir = methodPath();
Path childDir = new Path(baseDir, "subdir/child");
touch(fs, childDir);
// look at path to see if it is a file
// it is not: so LIST
final ContentSummary summary = verifyMetrics(
() -> getContentSummary(baseDir),
with(INVOCATION_GET_CONTENT_SUMMARY, 1),
withAuditCount(1),
always(FILE_STATUS_FILE_PROBE // look at path to see if it is a file
.plus(LIST_OPERATION))); // it is not: so LIST
Assertions.assertThat(summary.getDirectoryCount())
.as("Summary " + summary)
.isEqualTo(2);
Assertions.assertThat(summary.getFileCount())
.as("Summary " + summary)
.isEqualTo(1);
}
@Test
public void testGetContentMissingPath() throws Throwable {
describe("getContentSummary on a missing path");
Path baseDir = methodPath();
verifyMetricsIntercepting(FileNotFoundException.class,
"", () -> getContentSummary(baseDir),
with(INVOCATION_GET_CONTENT_SUMMARY, 1),
withAuditCount(1),
always(FILE_STATUS_FILE_PROBE
.plus(FILE_STATUS_FILE_PROBE)
.plus(LIST_OPERATION)
.plus(LIST_OPERATION)));
}
private ContentSummary getContentSummary(final Path baseDir)
throws IOException {
S3AFileSystem fs = getFileSystem();
return fs.getContentSummary(baseDir);
}
}
| ITestS3AMiscOperationCost |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/client/ClientHttpRequestFactory.java | {
"start": 970,
"end": 1479
} | interface ____ {
/**
* Create a new {@link ClientHttpRequest} for the specified URI and HTTP method.
* <p>The returned request can be written to, and then executed by calling
* {@link ClientHttpRequest#execute()}.
* @param uri the URI to create a request for
* @param httpMethod the HTTP method to execute
* @return the created request
* @throws IOException in case of I/O errors
*/
ClientHttpRequest createRequest(URI uri, HttpMethod httpMethod) throws IOException;
}
| ClientHttpRequestFactory |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/bytecode/enhancement/extension/BytecodeEnhanced.java | {
"start": 436,
"end": 805
} | interface ____ {
/**
* If set to true, the test will be executed with and without bytecode enhancement within the same execution.
*/
boolean runNotEnhancedAsWell() default false;
/**
* Entity classes will be checked whether they were enhanced or not depending on the context the test is executed in.
* Enhancement check simply verifies that the | BytecodeEnhanced |
java | spring-projects__spring-boot | configuration-metadata/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/InheritanceMetadataGenerationTests.java | {
"start": 1293,
"end": 3193
} | class ____ extends AbstractMetadataGenerationTests {
@Test
void childProperties() {
ConfigurationMetadata metadata = compile(ChildPropertiesConfig.class);
assertThat(metadata).has(Metadata.withGroup("inheritance").fromSource(ChildPropertiesConfig.class));
assertThat(metadata).has(Metadata.withGroup("inheritance.nest").fromSource(ChildProperties.class));
assertThat(metadata).has(Metadata.withGroup("inheritance.child-nest").fromSource(ChildProperties.class));
assertThat(metadata).has(Metadata.withProperty("inheritance.bool-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.int-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.long-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.nest.bool-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.nest.int-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.child-nest.bool-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.child-nest.int-value"));
}
@Test
void overrideChildProperties() {
ConfigurationMetadata metadata = compile(OverrideChildPropertiesConfig.class);
assertThat(metadata).has(Metadata.withGroup("inheritance").fromSource(OverrideChildPropertiesConfig.class));
assertThat(metadata).has(Metadata.withGroup("inheritance.nest").fromSource(OverrideChildProperties.class));
assertThat(metadata).has(Metadata.withProperty("inheritance.bool-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.int-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.long-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.nest.bool-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.nest.int-value"));
assertThat(metadata).has(Metadata.withProperty("inheritance.nest.long-value"));
}
}
| InheritanceMetadataGenerationTests |
java | quarkusio__quarkus | extensions/hibernate-validator/deployment/src/test/java/io/quarkus/hibernate/validator/test/ValidatorFromValidationTest.java | {
"start": 767,
"end": 1575
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest().setArchiveProducer(() -> ShrinkWrap
.create(JavaArchive.class)
.addClasses(TestBean.class, TestConstraint.class, TestInjectedBean.class, TestInjectionValidator.class));
@Test
public void testValidationWithInjection() {
Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
Set<ConstraintViolation<TestBean>> constraintViolations = validator.validate(new TestBean());
assertThat(constraintViolations).isNotEmpty();
TestBean bean = new TestBean();
bean.name = "Alpha";
constraintViolations = validator.validate(bean);
assertThat(constraintViolations).isEmpty();
}
}
| ValidatorFromValidationTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/SuppressWarningsWithoutExplanationTest.java | {
"start": 997,
"end": 1377
} | class ____ {
private final BugCheckerRefactoringTestHelper helper =
BugCheckerRefactoringTestHelper.newInstance(
new SuppressWarningsWithoutExplanation(/* emitDummyFixes= */ true), getClass());
@Test
public void rawTypesSuppressed() {
helper
.addInputLines(
"Test.java",
"""
| SuppressWarningsWithoutExplanationTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java | {
"start": 2058,
"end": 2310
} | class ____ extends AggregatorTestCase {
private static final Comparator<InternalIpPrefix.Bucket> IP_ADDRESS_KEY_COMPARATOR = Comparator.comparing(
InternalIpPrefix.Bucket::getKeyAsString
);
private static final | IpPrefixAggregatorTests |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleCloudStorageEndpointBuilderFactory.java | {
"start": 54731,
"end": 55088
} | interface ____
extends
AdvancedGoogleCloudStorageEndpointConsumerBuilder,
AdvancedGoogleCloudStorageEndpointProducerBuilder {
default GoogleCloudStorageEndpointBuilder basic() {
return (GoogleCloudStorageEndpointBuilder) this;
}
}
public | AdvancedGoogleCloudStorageEndpointBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/sql/NativeQueryResultBuilderTests.java | {
"start": 12369,
"end": 12653
} | class ____ {
private Integer key;
private String text;
public Integer getKey() {
return key;
}
public void setKey(Integer key) {
this.key = key;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
}
| Bean |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/shuffle/TieredResultPartitionFactory.java | {
"start": 3520,
"end": 12382
} | class ____ {
private final TieredStorageConfiguration tieredStorageConfiguration;
private final TieredStorageNettyServiceImpl tieredStorageNettyService;
private final TieredStorageResourceRegistry tieredStorageResourceRegistry;
public TieredResultPartitionFactory(
TieredStorageConfiguration tieredStorageConfiguration,
TieredStorageNettyServiceImpl tieredStorageNettyService,
TieredStorageResourceRegistry tieredStorageResourceRegistry) {
this.tieredStorageConfiguration = tieredStorageConfiguration;
this.tieredStorageNettyService = tieredStorageNettyService;
this.tieredStorageResourceRegistry = tieredStorageResourceRegistry;
}
public TieredStorageConfiguration getTieredStorageConfiguration() {
return tieredStorageConfiguration;
}
public TieredStorageNettyServiceImpl getTieredStorageNettyService() {
return tieredStorageNettyService;
}
public TieredStorageResourceRegistry getTieredStorageResourceRegistry() {
return tieredStorageResourceRegistry;
}
public TieredResultPartition createTieredResultPartition(
String owningTaskName,
int partitionIndex,
ResultPartitionID partitionId,
ResultPartitionType partitionType,
int numPartitions,
int numSubpartitions,
int maxParallelism,
int bufferSizeBytes,
Boolean isBroadCastOnly,
ResultPartitionManager partitionManager,
@Nullable BufferCompressor bufferCompressor,
List<TierShuffleDescriptor> tierShuffleDescriptors,
SupplierWithException<BufferPool, IOException> bufferPoolFactory,
FileChannelManager fileChannelManager,
BatchShuffleReadBufferPool batchShuffleReadBufferPool,
ScheduledExecutorService batchShuffleReadIOExecutor,
boolean isNumberOfPartitionConsumerUndefined) {
// Create memory manager.
TieredStorageMemoryManager memoryManager =
new TieredStorageMemoryManagerImpl(
TieredStorageUtils.getNumBuffersTriggerFlushRatio(), true);
// Create buffer accumulator.
int numAccumulatorExclusiveBuffers = TieredStorageUtils.getAccumulatorExclusiveBuffers();
BufferAccumulator bufferAccumulator =
createBufferAccumulator(
numSubpartitions,
bufferSizeBytes,
numAccumulatorExclusiveBuffers,
memoryManager,
isNumberOfPartitionConsumerUndefined);
// Create producer agents and memory specs.
Tuple2<List<TierProducerAgent>, List<TieredStorageMemorySpec>>
producerAgentsAndMemorySpecs =
createTierProducerAgentsAndMemorySpecs(
numPartitions,
numSubpartitions,
isBroadCastOnly,
TieredStorageIdMappingUtils.convertId(partitionId),
memoryManager,
bufferAccumulator,
partitionType == ResultPartitionType.HYBRID_SELECTIVE,
tierShuffleDescriptors,
fileChannelManager,
batchShuffleReadBufferPool,
batchShuffleReadIOExecutor,
bufferCompressor);
// Create producer client.
TieredStorageProducerClient tieredStorageProducerClient =
new TieredStorageProducerClient(
numSubpartitions,
isBroadCastOnly,
bufferAccumulator,
bufferCompressor,
producerAgentsAndMemorySpecs.f0);
// Create tiered result partition.
return new TieredResultPartition(
owningTaskName,
partitionIndex,
partitionId,
partitionType,
numSubpartitions,
maxParallelism,
partitionManager,
bufferCompressor,
bufferPoolFactory,
tieredStorageProducerClient,
tieredStorageResourceRegistry,
tieredStorageNettyService,
producerAgentsAndMemorySpecs.f1,
memoryManager);
}
private BufferAccumulator createBufferAccumulator(
int numSubpartitions,
int bufferSizeBytes,
int numAccumulatorExclusiveBuffers,
TieredStorageMemoryManager storageMemoryManager,
boolean isNumberOfPartitionConsumerUndefined) {
BufferAccumulator bufferAccumulator;
if ((numSubpartitions + 1) > numAccumulatorExclusiveBuffers) {
bufferAccumulator =
new SortBufferAccumulator(
numSubpartitions,
numAccumulatorExclusiveBuffers,
bufferSizeBytes,
storageMemoryManager,
!isNumberOfPartitionConsumerUndefined);
} else {
bufferAccumulator =
new HashBufferAccumulator(
numSubpartitions,
bufferSizeBytes,
storageMemoryManager,
!isNumberOfPartitionConsumerUndefined);
}
return bufferAccumulator;
}
private Tuple2<List<TierProducerAgent>, List<TieredStorageMemorySpec>>
createTierProducerAgentsAndMemorySpecs(
int numberOfPartitions,
int numberOfSubpartitions,
boolean isBroadcastOnly,
TieredStoragePartitionId partitionID,
TieredStorageMemoryManager memoryManager,
BufferAccumulator bufferAccumulator,
boolean isHybridSelective,
List<TierShuffleDescriptor> tierShuffleDescriptors,
FileChannelManager fileChannelManager,
BatchShuffleReadBufferPool batchShuffleReadBufferPool,
ScheduledExecutorService batchShuffleReadIOExecutor,
@Nullable BufferCompressor bufferCompressor) {
List<TierProducerAgent> tierProducerAgents = new ArrayList<>();
List<TieredStorageMemorySpec> tieredStorageMemorySpecs = new ArrayList<>();
tieredStorageMemorySpecs.add(
// Accumulators are also treated as {@code guaranteedReclaimable}, since these
// buffers can always be transferred to the other tiers.
new TieredStorageMemorySpec(
bufferAccumulator,
2
* Math.min(
numberOfSubpartitions + 1,
TieredStorageUtils.getAccumulatorExclusiveBuffers()),
true));
List<TierFactory> tierFactories = tieredStorageConfiguration.getTierFactories();
checkState(tierFactories.size() == tierShuffleDescriptors.size());
for (int index = 0; index < tierFactories.size(); ++index) {
TierFactory tierFactory = tierFactories.get(index);
if (!isHybridSelective && tierFactory.getClass() == MemoryTierFactory.class) {
continue;
}
TierProducerAgent producerAgent =
tierFactory.createProducerAgent(
numberOfPartitions,
numberOfSubpartitions,
partitionID,
fileChannelManager.createChannel().getPath(),
isBroadcastOnly,
memoryManager,
tieredStorageNettyService,
tieredStorageResourceRegistry,
batchShuffleReadBufferPool,
batchShuffleReadIOExecutor,
Collections.singletonList(tierShuffleDescriptors.get(index)),
Math.max(
2 * batchShuffleReadBufferPool.getNumBuffersPerRequest(),
numberOfSubpartitions),
bufferCompressor);
tierProducerAgents.add(producerAgent);
tieredStorageMemorySpecs.add(tierFactory.getProducerAgentMemorySpec());
}
return Tuple2.of(tierProducerAgents, tieredStorageMemorySpecs);
}
}
| TieredResultPartitionFactory |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/AbstractStringBasedJpaQuery.java | {
"start": 1572,
"end": 1860
} | class ____ {@link String} based JPA queries.
*
* @author Oliver Gierke
* @author Thomas Darimont
* @author Jens Schauder
* @author Tom Hombergs
* @author David Madden
* @author Mark Paluch
* @author Diego Krupitza
* @author Greg Turnquist
* @author Christoph Strobl
*/
abstract | for |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/cluster/ClusterCommandUnitTests.java | {
"start": 853,
"end": 2830
} | class ____ {
@Mock
private RedisChannelWriter writerMock;
private ClusterCommand<String, String, String> sut;
private Command<String, String, String> command = new Command<>(CommandType.TYPE, new StatusOutput<>(StringCodec.UTF8),
null);
@BeforeEach
void before() {
sut = new ClusterCommand<>(command, writerMock, 1);
}
@Test
void testException() {
sut.completeExceptionally(new Exception());
assertThat(sut.isCompleted());
}
@Test
void testCancel() {
assertThat(command.isCancelled()).isFalse();
sut.cancel();
assertThat(command.isCancelled()).isTrue();
}
@Test
void testComplete() {
sut.complete();
assertThat(sut.isCompleted()).isTrue();
assertThat(sut.isCancelled()).isFalse();
}
@Test
void testRedirect() {
sut.getOutput().setError("MOVED 1234-2020 127.0.0.1:1000");
sut.complete();
assertThat(sut.isCompleted()).isFalse();
assertThat(sut.isCancelled()).isFalse();
verify(writerMock).write(sut);
}
@Test
void testRedirectLimit() {
sut.getOutput().setError("MOVED 1234-2020 127.0.0.1:1000");
sut.complete();
sut.getOutput().setError("MOVED 1234-2020 127.0.0.1:1000");
sut.complete();
assertThat(sut.isCompleted()).isTrue();
assertThat(sut.isCancelled()).isFalse();
verify(writerMock).write(sut);
}
@Test
void testCompleteListener() {
final List<String> someList = new ArrayList<>();
AsyncCommand<?, ?, ?> asyncCommand = new AsyncCommand<>(sut);
asyncCommand.thenRun(() -> someList.add(""));
asyncCommand.complete();
asyncCommand.await(1, TimeUnit.MINUTES);
assertThat(sut.isCompleted()).isTrue();
assertThat(someList.size()).describedAs("Inner listener has to add one element").isEqualTo(1);
}
}
| ClusterCommandUnitTests |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/models/role/RedisMasterInstance.java | {
"start": 189,
"end": 821
} | class ____ extends RedisUpstreamInstance {
public RedisMasterInstance() {
}
/**
* Constructs a {@link RedisMasterInstance}
*
* @param replicationOffset the replication offset
* @param replicas list of replicas, must not be {@code null} but may be empty
*/
public RedisMasterInstance(long replicationOffset, List<ReplicationPartner> replicas) {
super(replicationOffset, replicas);
}
/**
* @return always {@link io.lettuce.core.models.role.RedisInstance.Role#MASTER}
*/
@Override
public Role getRole() {
return Role.MASTER;
}
}
| RedisMasterInstance |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java | {
"start": 14823,
"end": 46974
} | class ____",
canonicalClassName
);
}
PainlessClassBuilder existingPainlessClassBuilder = classesToPainlessClassBuilders.get(clazz);
if (existingPainlessClassBuilder == null) {
PainlessClassBuilder painlessClassBuilder = new PainlessClassBuilder();
painlessClassBuilder.annotations.putAll(annotations);
canonicalClassNamesToClasses.put(canonicalClassName.intern(), clazz);
classesToPainlessClassBuilders.put(clazz, painlessClassBuilder);
}
String javaClassName = clazz.getName();
String importedCanonicalClassName = javaClassName.substring(javaClassName.lastIndexOf('.') + 1).replace('$', '.');
boolean importClassName = annotations.containsKey(NoImportAnnotation.class) == false;
if (canonicalClassName.equals(importedCanonicalClassName)) {
if (importClassName) {
throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package");
}
} else {
Class<?> importedClass = canonicalClassNamesToClasses.get(importedCanonicalClassName);
if (importedClass == null) {
if (importClassName) {
if (existingPainlessClassBuilder != null) {
throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]");
}
canonicalClassNamesToClasses.put(importedCanonicalClassName.intern(), clazz);
if (annotations.get(AliasAnnotation.class) instanceof AliasAnnotation alias) {
Class<?> existing = canonicalClassNamesToClasses.put(alias.alias(), clazz);
if (existing != null) {
throw lookupException("Cannot add alias [%s] for [%s] that shadows class [%s]", alias.alias(), clazz, existing);
}
}
}
} else if (importedClass != clazz) {
throw lookupException(
"imported class [%s] cannot represent multiple classes [%s] and [%s]",
importedCanonicalClassName,
canonicalClassName,
typeToCanonicalTypeName(importedClass)
);
} else if (importClassName == false) {
throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]");
}
}
}
private void addPainlessConstructor(
String targetCanonicalClassName,
List<String> canonicalTypeNameParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(targetCanonicalClassName);
Objects.requireNonNull(canonicalTypeNameParameters);
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
if (targetClass == null) {
throw lookupException(
"target class [%s] not found for constructor [[%s], %s]",
targetCanonicalClassName,
targetCanonicalClassName,
canonicalTypeNameParameters
);
}
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
if (typeParameter == null) {
throw lookupException(
"type parameter [%s] not found for constructor [[%s], %s]",
canonicalTypeNameParameter,
targetCanonicalClassName,
canonicalTypeNameParameters
);
}
typeParameters.add(typeParameter);
}
addPainlessConstructor(targetClass, typeParameters, annotations, dedup);
}
private void addPainlessConstructor(
Class<?> targetClass,
List<Class<?>> typeParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(typeParameters);
if (targetClass == def.class) {
throw new IllegalArgumentException("cannot add constructor to reserved class [" + DEF_CLASS_NAME + "]");
}
String targetCanonicalClassName = targetClass.getCanonicalName();
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
if (painlessClassBuilder == null) {
throw lookupException(
"target class [%s] not found for constructor [[%s], %s]",
targetCanonicalClassName,
targetCanonicalClassName,
typesToCanonicalTypeNames(typeParameters)
);
}
int typeParametersSize = typeParameters.size();
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize);
for (Class<?> typeParameter : typeParameters) {
if (isValidType(typeParameter) == false) {
throw lookupException(
"type parameter [%s] not found for constructor [[%s], %s]",
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
typesToCanonicalTypeNames(typeParameters)
);
}
javaTypeParameters.add(typeToJavaType(typeParameter));
}
Constructor<?> javaConstructor;
try {
javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(Class<?>[]::new));
} catch (NoSuchMethodException nsme) {
throw lookupException(
nsme,
"reflection object not found for constructor [[%s], %s]",
targetCanonicalClassName,
typesToCanonicalTypeNames(typeParameters)
);
}
MethodHandle methodHandle;
try {
methodHandle = lookup(targetClass).unreflectConstructor(javaConstructor);
} catch (IllegalAccessException iae) {
throw lookupException(
iae,
"method handle not found for constructor [[%s], %s]",
targetCanonicalClassName,
typesToCanonicalTypeNames(typeParameters)
);
}
if (annotations.containsKey(CompileTimeOnlyAnnotation.class)) {
throw new IllegalArgumentException("constructors can't have @" + CompileTimeOnlyAnnotation.NAME);
}
MethodType methodType = methodHandle.type();
String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize);
PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey);
PainlessConstructor newPainlessConstructor = new PainlessConstructor(
javaConstructor,
typeParameters,
methodHandle,
methodType,
annotations
);
if (existingPainlessConstructor == null) {
newPainlessConstructor = (PainlessConstructor) dedup.computeIfAbsent(newPainlessConstructor, Function.identity());
painlessClassBuilder.constructors.put(painlessConstructorKey.intern(), newPainlessConstructor);
} else if (newPainlessConstructor.equals(existingPainlessConstructor) == false) {
throw lookupException(
"cannot add constructors with the same arity but are not equivalent for constructors [[%s], %s] and [[%s], %s]",
targetCanonicalClassName,
typesToCanonicalTypeNames(typeParameters),
targetCanonicalClassName,
typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters())
);
}
}
private void addPainlessMethod(
ClassLoader classLoader,
String targetCanonicalClassName,
String augmentedCanonicalClassName,
String methodName,
String returnCanonicalTypeName,
List<String> canonicalTypeNameParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(classLoader);
Objects.requireNonNull(targetCanonicalClassName);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnCanonicalTypeName);
Objects.requireNonNull(canonicalTypeNameParameters);
Objects.requireNonNull(annotations);
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
if (targetClass == null) {
throw lookupException(
"target class [%s] not found for method [[%s], [%s], %s]",
targetCanonicalClassName,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
);
}
Class<?> augmentedClass = null;
if (augmentedCanonicalClassName != null) {
augmentedClass = loadClass(
classLoader,
augmentedCanonicalClassName,
() -> Strings.format(
"augmented class [%s] not found for method [[%s], [%s], %s]",
augmentedCanonicalClassName,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
)
);
}
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
if (typeParameter == null) {
throw lookupException(
"type parameter [%s] not found for method [[%s], [%s], %s]",
canonicalTypeNameParameter,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
);
}
typeParameters.add(typeParameter);
}
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
if (returnType == null) {
throw lookupException(
"return type [%s] not found for method [[%s], [%s], %s]",
returnCanonicalTypeName,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
);
}
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters, annotations, dedup);
}
public void addPainlessMethod(
Class<?> targetClass,
Class<?> augmentedClass,
String methodName,
Class<?> returnType,
List<Class<?>> typeParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnType);
Objects.requireNonNull(typeParameters);
Objects.requireNonNull(annotations);
if (targetClass == def.class) {
throw new IllegalArgumentException("cannot add method to reserved class [" + DEF_CLASS_NAME + "]");
}
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
if (METHOD_AND_FIELD_NAME_PATTERN.matcher(methodName).matches() == false) {
throw new IllegalArgumentException(
"invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."
);
}
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
if (painlessClassBuilder == null) {
throw lookupException(
"target class [%s] not found for method [[%s], [%s], %s]",
targetCanonicalClassName,
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
int typeParametersSize = typeParameters.size();
int augmentedParameterOffset = augmentedClass == null ? 0 : 1;
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize + augmentedParameterOffset);
if (augmentedClass != null) {
javaTypeParameters.add(targetClass);
}
for (Class<?> typeParameter : typeParameters) {
if (isValidType(typeParameter) == false) {
throw lookupException(
"type parameter [%s] not found for method [[%s], [%s], %s]",
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
javaTypeParameters.add(typeToJavaType(typeParameter));
}
if (isValidType(returnType) == false) {
throw lookupException(
"return type [%s] not found for method [[%s], [%s], %s]",
typeToCanonicalTypeName(returnType),
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
Method javaMethod;
if (augmentedClass == null) {
try {
javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(Class<?>[]::new));
} catch (NoSuchMethodException nsme) {
throw lookupException(
nsme,
"reflection object not found for method [[%s], [%s], %s]",
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
} else {
try {
javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(Class<?>[]::new));
if (Modifier.isStatic(javaMethod.getModifiers()) == false) {
throw lookupException(
"method [[%s], [%s], %s] with augmented class [%s] must be static",
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters),
typeToCanonicalTypeName(augmentedClass)
);
}
} catch (NoSuchMethodException nsme) {
throw lookupException(
nsme,
"reflection object not found for method [[%s], [%s], %s] with augmented class [%s]",
targetCanonicalClassName,
methodName,
typesToCanonicalTypeNames(typeParameters),
typeToCanonicalTypeName(augmentedClass)
);
}
}
// injections alter the type parameters required for the user to call this method, since some are injected by compiler
InjectConstantAnnotation inject = (InjectConstantAnnotation) annotations.get(InjectConstantAnnotation.class);
if (inject != null) {
int numInjections = inject.injects().size();
if (numInjections > 0) {
typeParameters.subList(0, numInjections).clear();
}
typeParametersSize = typeParameters.size();
}
if (javaMethod.getReturnType() != typeToJavaType(returnType)) {
throw lookupException(
"return type [%s] does not match the specified returned type [%s] for method [[%s], [%s], %s]",
typeToCanonicalTypeName(javaMethod.getReturnType()),
typeToCanonicalTypeName(returnType),
targetClass.getCanonicalName(),
methodName,
typesToCanonicalTypeNames(typeParameters)
);
}
MethodHandle methodHandle;
if (augmentedClass == null) {
try {
methodHandle = lookup(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw lookupException(
iae,
"method handle not found for method [[%s], [%s], %s], with lookup [%s]",
targetClass.getCanonicalName(),
methodName,
typesToCanonicalTypeNames(typeParameters),
lookup(targetClass)
);
}
} else {
try {
methodHandle = lookup(augmentedClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw lookupException(
iae,
"method handle not found for method [[%s], [%s], %s] with augmented class [%s]",
targetClass.getCanonicalName(),
methodName,
typesToCanonicalTypeNames(typeParameters),
typeToCanonicalTypeName(augmentedClass)
);
}
}
if (annotations.containsKey(CompileTimeOnlyAnnotation.class)) {
throw new IllegalArgumentException("regular methods can't have @" + CompileTimeOnlyAnnotation.NAME);
}
MethodType methodType = methodHandle.type();
boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers());
String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize);
PainlessMethod existingPainlessMethod = isStatic
? painlessClassBuilder.staticMethods.get(painlessMethodKey)
: painlessClassBuilder.methods.get(painlessMethodKey);
PainlessMethod newPainlessMethod = new PainlessMethod(
javaMethod,
targetClass,
returnType,
typeParameters,
methodHandle,
methodType,
annotations
);
if (existingPainlessMethod == null) {
newPainlessMethod = (PainlessMethod) dedup.computeIfAbsent(newPainlessMethod, Function.identity());
if (isStatic) {
painlessClassBuilder.staticMethods.put(painlessMethodKey.intern(), newPainlessMethod);
} else {
painlessClassBuilder.methods.put(painlessMethodKey.intern(), newPainlessMethod);
}
} else if (newPainlessMethod.equals(existingPainlessMethod) == false) {
throw lookupException(
"cannot add methods with the same name and arity but are not equivalent for methods "
+ "[[%s], [%s], [%s], %s] and [[%s], [%s], [%s], %s]",
targetCanonicalClassName,
methodName,
typeToCanonicalTypeName(returnType),
typesToCanonicalTypeNames(typeParameters),
targetCanonicalClassName,
methodName,
typeToCanonicalTypeName(existingPainlessMethod.returnType()),
typesToCanonicalTypeNames(existingPainlessMethod.typeParameters())
);
}
}
private void addPainlessField(
ClassLoader classLoader,
String targetCanonicalClassName,
String fieldName,
String canonicalTypeNameParameter,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(classLoader);
Objects.requireNonNull(targetCanonicalClassName);
Objects.requireNonNull(fieldName);
Objects.requireNonNull(canonicalTypeNameParameter);
Objects.requireNonNull(annotations);
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
if (targetClass == null) {
throw lookupException(
"target class [%s] not found for field [[%s], [%s], [%s]]",
targetCanonicalClassName,
targetCanonicalClassName,
fieldName,
canonicalTypeNameParameter
);
}
String augmentedCanonicalClassName = annotations.containsKey(AugmentedAnnotation.class)
? ((AugmentedAnnotation) annotations.get(AugmentedAnnotation.class)).augmentedCanonicalClassName()
: null;
Class<?> augmentedClass = null;
if (augmentedCanonicalClassName != null) {
augmentedClass = loadClass(
classLoader,
augmentedCanonicalClassName,
() -> Strings.format(
"augmented class [%s] not found for field [[%s], [%s]]",
augmentedCanonicalClassName,
targetCanonicalClassName,
fieldName
)
);
}
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
if (typeParameter == null) {
throw lookupException(
"type parameter [%s] not found for field [[%s], [%s]]",
canonicalTypeNameParameter,
targetCanonicalClassName,
fieldName
);
}
addPainlessField(targetClass, augmentedClass, fieldName, typeParameter, annotations, dedup);
}
private void addPainlessField(
Class<?> targetClass,
Class<?> augmentedClass,
String fieldName,
Class<?> typeParameter,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(fieldName);
Objects.requireNonNull(typeParameter);
Objects.requireNonNull(annotations);
if (targetClass == def.class) {
throw new IllegalArgumentException("cannot add field to reserved class [" + DEF_CLASS_NAME + "]");
}
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
if (METHOD_AND_FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) {
throw new IllegalArgumentException(
"invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]."
);
}
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
if (painlessClassBuilder == null) {
throw lookupException(
"target class [%s] not found for field [[%s], [%s], [%s]]",
targetCanonicalClassName,
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter)
);
}
if (isValidType(typeParameter) == false) {
throw lookupException(
"type parameter [%s] not found for field [[%s], [%s], [%s]]",
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter)
);
}
Field javaField;
if (augmentedClass == null) {
try {
javaField = targetClass.getField(fieldName);
} catch (NoSuchFieldException nsfe) {
throw lookupException(
nsfe,
"reflection object not found for field [[%s], [%s], [%s]]",
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter)
);
}
} else {
try {
javaField = augmentedClass.getField(fieldName);
if (Modifier.isStatic(javaField.getModifiers()) == false || Modifier.isFinal(javaField.getModifiers()) == false) {
throw lookupException(
"field [[%s], [%s]] with augmented class [%s] must be static and final",
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(augmentedClass)
);
}
} catch (NoSuchFieldException nsfe) {
throw lookupException(
nsfe,
"reflection object not found for field [[%s], [%s], [%s]] with augmented class [%s]",
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter),
typeToCanonicalTypeName(augmentedClass)
);
}
}
if (javaField.getType() != typeToJavaType(typeParameter)) {
throw lookupException(
"type parameter [%s] does not match the specified type parameter [%s] for field [[%s], [%s]]",
typeToCanonicalTypeName(javaField.getType()),
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
fieldName
);
}
MethodHandle methodHandleGetter;
try {
methodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException(
"getter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"
);
}
String painlessFieldKey = buildPainlessFieldKey(fieldName);
if (Modifier.isStatic(javaField.getModifiers())) {
if (Modifier.isFinal(javaField.getModifiers()) == false) {
throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final");
}
PainlessField existingPainlessField = painlessClassBuilder.staticFields.get(painlessFieldKey);
PainlessField newPainlessField = new PainlessField(javaField, typeParameter, annotations, methodHandleGetter, null);
if (existingPainlessField == null) {
newPainlessField = (PainlessField) dedup.computeIfAbsent(newPainlessField, Function.identity());
painlessClassBuilder.staticFields.put(painlessFieldKey.intern(), newPainlessField);
} else if (newPainlessField.equals(existingPainlessField) == false) {
throw lookupException(
"cannot add fields with the same name but are not equivalent for fields [[%s], [%s], [%s]] and [[%s], [%s], [%s]]"
+ " with the same name and different type parameters",
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
existingPainlessField.javaField().getName(),
typeToCanonicalTypeName(existingPainlessField.typeParameter())
);
}
} else {
MethodHandle methodHandleSetter;
try {
methodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException(
"setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"
);
}
PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey);
PainlessField newPainlessField = new PainlessField(
javaField,
typeParameter,
annotations,
methodHandleGetter,
methodHandleSetter
);
if (existingPainlessField == null) {
newPainlessField = (PainlessField) dedup.computeIfAbsent(newPainlessField, Function.identity());
painlessClassBuilder.fields.put(painlessFieldKey.intern(), newPainlessField);
} else if (newPainlessField.equals(existingPainlessField) == false) {
throw lookupException(
"cannot add fields with the same name but are not equivalent for fields [[%s], [%s], [%s]] and [[%s], [%s], [%s]]"
+ " with the same name and different type parameters",
targetCanonicalClassName,
fieldName,
typeToCanonicalTypeName(typeParameter),
targetCanonicalClassName,
existingPainlessField.javaField().getName(),
typeToCanonicalTypeName(existingPainlessField.typeParameter())
);
}
}
}
public void addImportedPainlessMethod(
ClassLoader classLoader,
String targetJavaClassName,
String methodName,
String returnCanonicalTypeName,
List<String> canonicalTypeNameParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(classLoader);
Objects.requireNonNull(targetJavaClassName);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnCanonicalTypeName);
Objects.requireNonNull(canonicalTypeNameParameters);
Class<?> targetClass = loadClass(classLoader, targetJavaClassName, () -> "class [" + targetJavaClassName + "] not found");
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
if (typeParameter == null) {
throw lookupException(
"type parameter [%s] not found for imported method [[%s], [%s], %s]",
canonicalTypeNameParameter,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
);
}
typeParameters.add(typeParameter);
}
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
if (returnType == null) {
throw lookupException(
"return type [%s] not found for imported method [[%s], [%s], %s]",
returnCanonicalTypeName,
targetCanonicalClassName,
methodName,
canonicalTypeNameParameters
);
}
addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters, annotations, dedup);
}
public void addImportedPainlessMethod(
Class<?> targetClass,
String methodName,
Class<?> returnType,
List<Class<?>> typeParameters,
Map<Class<?>, Object> annotations,
Map<Object, Object> dedup
) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnType);
Objects.requireNonNull(typeParameters);
if (targetClass == def.class) {
throw new IllegalArgumentException("cannot add imported method from reserved class [" + DEF_CLASS_NAME + "]");
}
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
Class<?> existingTargetClass = javaClassNamesToClasses.get(targetClass.getName());
if (existingTargetClass == null) {
javaClassNamesToClasses.put(targetClass.getName().intern(), targetClass);
} else if (existingTargetClass != targetClass) {
throw lookupException(
"class [%s] cannot represent multiple java classes with the same name from different | loaders |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/misc/CaseInsensitiveDeser953Test.java | {
"start": 387,
"end": 2458
} | class ____ {
@JsonProperty("someId")
public int someId;
}
@SuppressWarnings("deprecation") // Locale constructors deprecated in JDK 19
private final Locale LOCALE_EN = new Locale("en", "US");
private final ObjectMapper INSENSITIVE_MAPPER_EN = jsonMapperBuilder()
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES)
.defaultLocale(LOCALE_EN)
.build();
@SuppressWarnings("deprecation") // Locale constructors deprecated in JDK 19
private final Locale LOCALE_TR = new Locale("tr", "TR");
private final ObjectMapper INSENSITIVE_MAPPER_TR = jsonMapperBuilder()
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES)
.defaultLocale(LOCALE_TR)
.build();
@Test
public void testTurkishILetterDeserializationWithEn() throws Exception {
_testTurkishILetterDeserialization(INSENSITIVE_MAPPER_EN, LOCALE_EN);
}
@Test
public void testTurkishILetterDeserializationWithTr() throws Exception {
_testTurkishILetterDeserialization(INSENSITIVE_MAPPER_TR, LOCALE_TR);
}
private void _testTurkishILetterDeserialization(ObjectMapper mapper, Locale locale) throws Exception
{
// Sanity check first
assertEquals(locale, mapper.deserializationConfig().getLocale());
final String ORIGINAL_KEY = "someId";
Id953 result;
result = mapper.readValue("{\""+ORIGINAL_KEY+"\":1}", Id953.class);
assertEquals(1, result.someId);
result = mapper.readValue("{\""+ORIGINAL_KEY.toUpperCase(locale)+"\":1}", Id953.class);
assertEquals(1, result.someId);
result = mapper.readValue("{\""+ORIGINAL_KEY.toLowerCase(locale)+"\":1}", Id953.class);
assertEquals(1, result.someId);
// and finally round-trip too...
final Id953 input = new Id953();
input.someId = 1;
final String json = mapper.writeValueAsString(input);
result = mapper.readValue(json, Id953.class);
assertEquals(1, result.someId);
}
}
| Id953 |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/exceptions/HttpClientException.java | {
"start": 802,
"end": 893
} | class ____ all HTTP client exceptions.
*
* @author Graeme Rocher
* @since 1.0
*/
public | for |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/ProtoTruth.java | {
"start": 1476,
"end": 3905
} | class ____ {
/**
* Returns a {@link CustomSubjectBuilder.Factory}, akin to a {@link
* com.google.common.truth.Subject.Factory}, which can be used to assert on multiple types of
* Protos and collections containing them.
*/
public static CustomSubjectBuilder.Factory<ProtoSubjectBuilder> protos() {
return ProtoSubjectBuilder.factory();
}
/** Assert on a single {@link MessageLite} instance. */
public static LiteProtoSubject assertThat(@Nullable MessageLite messageLite) {
return assertAbout(protos()).that(messageLite);
}
/** Assert on a single {@link Message} instance. */
public static ProtoSubject assertThat(@Nullable Message message) {
return assertAbout(protos()).that(message);
}
/**
* Assert on a sequence of {@link Message}s.
*
* <p>This allows for the equality configurations on {@link ProtoSubject} to be applied to all
* comparison tests available on {@link IterableSubject.UsingCorrespondence}.
*/
// Note: We must specify M explicitly here. The presence of the type parameter makes this method
// signature distinct from Truth.assertThat(Iterable<?>), and allows users to import both static
// methods without conflict. If this method instead accepted Iterable<? extends Message>, this
// would result in method ambiguity errors.
// See http://stackoverflow.com/a/8467804 for a more thorough explanation.
public static <M extends Message> IterableOfProtosSubject<M> assertThat(
@Nullable Iterable<M> messages) {
return assertAbout(protos()).that(messages);
}
/**
* Assert on a map with {@link Message} values.
*
* <p>This allows for the equality configurations on {@link ProtoSubject} to be applied to all
* comparison tests available on {@link MapSubject.UsingCorrespondence}.
*/
public static <M extends Message> MapWithProtoValuesSubject<M> assertThat(
@Nullable Map<?, M> map) {
return assertAbout(protos()).that(map);
}
/**
* Assert on a {@link Multimap} with {@link Message} values.
*
* <p>This allows for the equality configurations on {@link ProtoSubject} to be applied to all
* comparison tests available on {@link MultimapSubject.UsingCorrespondence}.
*/
public static <M extends Message> MultimapWithProtoValuesSubject<M> assertThat(
@Nullable Multimap<?, M> multimap) {
return assertAbout(protos()).that(multimap);
}
private ProtoTruth() {}
}
| ProtoTruth |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesRuntimeFunctions.java | {
"start": 49276,
"end": 50940
} | class ____ extends AsyncVectorSearchFunction {
private final TestValueVectorSearchFunction impl;
private final @Nullable Integer latency;
private transient ExecutorService executors;
private transient Random random;
public TestValueAsyncVectorSearchFunction(
List<Row> data,
int[] searchIndices,
DataType physicalRowType,
@Nullable Integer latency) {
this.impl = new TestValueVectorSearchFunction(data, searchIndices, physicalRowType);
this.latency = latency;
}
@Override
public void open(FunctionContext context) throws Exception {
super.open(context);
impl.open(context);
executors = Executors.newCachedThreadPool();
random = new Random();
}
@Override
public CompletableFuture<Collection<RowData>> asyncVectorSearch(
int topK, RowData queryData) {
return CompletableFuture.supplyAsync(
() -> {
try {
Thread.sleep(latency == null ? random.nextInt(1000) : latency);
return impl.vectorSearch(topK, queryData);
} catch (Exception e) {
throw new RuntimeException(e);
}
},
executors);
}
@Override
public void close() throws Exception {
super.close();
impl.close();
executors.shutdown();
}
}
}
| TestValueAsyncVectorSearchFunction |
java | apache__kafka | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/modern/consumer/CurrentAssignmentBuilderTest.java | {
"start": 1842,
"end": 48907
} | class ____ {
@Test
public void testStableToStable() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testStableToStableWithNewPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3, 4),
mkTopicAssignment(topicId2, 4, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3, 4),
mkTopicAssignment(topicId2, 4, 5, 6, 7)))
.build(),
updatedMember
);
}
@Test
public void testStableToUnrevokedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1),
mkTopicAssignment(topicId2, 4)))
.build(),
updatedMember
);
}
@Test
public void testStableToUnreleasedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3, 4),
mkTopicAssignment(topicId2, 4, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testStableToUnreleasedPartitionsWithOwnedPartitionsNotHavingRevokedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 7))))
.withCurrentPartitionEpoch((topicId, __) ->
topicId2.equals(topicId) ? 10 : -1
)
.withOwnedTopicPartitions(List.of())
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5)))
.build(),
updatedMember
);
}
@Test
public void testUnrevokedPartitionsToStable() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1),
mkTopicAssignment(topicId2, 4)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testRemainsInUnrevokedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1),
mkTopicAssignment(topicId2, 4)))
.build();
CurrentAssignmentBuilder currentAssignmentBuilder = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1);
assertEquals(
member,
currentAssignmentBuilder
.withOwnedTopicPartitions(null)
.build()
);
assertEquals(
member,
currentAssignmentBuilder
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(5, 6))))
.build()
);
assertEquals(
member,
currentAssignmentBuilder
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(4, 5, 6))))
.build()
);
}
@ParameterizedTest
@CsvSource({
"10, 12, 11",
"10, 10, 10", // The member epoch must not advance past the target assignment epoch.
})
public void testUnrevokedPartitionsToUnrevokedPartitions(int memberEpoch, int targetAssignmentEpoch, int expectedMemberEpoch) {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1),
mkTopicAssignment(topicId2, 4)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(targetAssignmentEpoch, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(expectedMemberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 2),
mkTopicAssignment(topicId2, 5)))
.build(),
updatedMember
);
}
@Test
public void testUnrevokedPartitionsToUnreleasedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testUnrevokedPartitionsToStableWithReturnedPartitionsPendingRevocation() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
// Partition 4 is pending revocation by the member but is back in the latest target
// assignment.
mkTopicAssignment(topicId1, 4)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> {
if (topicId.equals(topicId1)) {
// Partitions 2 and 3 are in the member's current assignment.
// Partition 4 is pending revocation by the member.
switch (partitionId) {
case 2:
case 3:
case 4:
return 10;
}
} else if (topicId.equals(topicId2)) {
// Partitions 5 and 6 are in the member's current assignment.
switch (partitionId) {
case 5:
case 6:
return 10;
}
}
return -1;
})
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(12)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7)))
.setPartitionsPendingRevocation(Map.of())
.build(),
updatedMember
);
}
@Test
public void testUnreleasedPartitionsToStable() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(12)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testUnreleasedPartitionsToStableWithNewPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7)))
.build(),
updatedMember
);
}
@Test
public void testUnreleasedPartitionsToUnreleasedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(11, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 2, 3, 4),
mkTopicAssignment(topicId2, 5, 6, 7))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build();
assertEquals(member, updatedMember);
}
@Test
public void testUnreleasedPartitionsToUnrevokedPartitions() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNRELEASED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 2),
mkTopicAssignment(topicId2, 5)))
.build(),
updatedMember
);
}
@Test
public void testUnknownState() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNKNOWN)
.setMemberEpoch(11)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 2),
mkTopicAssignment(topicId2, 5)))
.build();
// When the member is in an unknown state, the member is first to force
// a reset of the client side member state.
assertThrows(FencedMemberEpochException.class, () -> new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 10)
.build());
// Then the member rejoins with no owned partitions.
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6))))
.withCurrentPartitionEpoch((topicId, partitionId) -> 11)
.withOwnedTopicPartitions(List.of())
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(12)
.setPreviousMemberEpoch(11)
.setSubscribedTopicNames(List.of(topic1, topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 3),
mkTopicAssignment(topicId2, 6)))
.build(),
updatedMember
);
}
@ParameterizedTest
@CsvSource({
"10, 11, 11, false", // When advancing to a new target assignment, the assignment should
"10, 11, 11, true", // always take the subscription into account.
"10, 10, 10, true",
})
public void testStableToStableWithAssignmentTopicsNoLongerInSubscription(
int memberEpoch,
int targetAssignmentEpoch,
int expectedMemberEpoch,
boolean hasSubscriptionChanged
) {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(targetAssignmentEpoch, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6))))
.withHasSubscriptionChanged(hasSubscriptionChanged)
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(expectedMemberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId2, 4, 5, 6)))
.build(),
updatedMember
);
}
@ParameterizedTest
@CsvSource({
"10, 11, 10, false", // When advancing to a new target assignment, the assignment should always
"10, 11, 10, true", // take the subscription into account.
"10, 10, 10, true"
})
public void testStableToUnrevokedPartitionsWithAssignmentTopicsNoLongerInSubscription(
int memberEpoch,
int targetAssignmentEpoch,
int expectedMemberEpoch,
boolean hasSubscriptionChanged
) {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(memberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(targetAssignmentEpoch, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4, 5, 6))))
.withHasSubscriptionChanged(hasSubscriptionChanged)
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(expectedMemberEpoch)
.setPreviousMemberEpoch(memberEpoch)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId2, 4, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3)))
.build(),
updatedMember
);
}
@Test
public void testRemainsInUnrevokedPartitionsWithAssignmentTopicsNoLongerInSubscription() {
String topic1 = "topic1";
String topic2 = "topic2";
Uuid topicId1 = Uuid.randomUuid();
Uuid topicId2 = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(topicId1, topic1, 10)
.addTopic(topicId2, topic2, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId1, 2, 3),
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1),
mkTopicAssignment(topicId2, 4)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(12, new Assignment(mkAssignment(
mkTopicAssignment(topicId1, 1, 3, 4),
mkTopicAssignment(topicId2, 6, 7))))
.withHasSubscriptionChanged(true)
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId1)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(topicId2)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(topic2))
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(topicId2, 5, 6)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(topicId1, 1, 2, 3),
mkTopicAssignment(topicId2, 4)))
.build(),
updatedMember
);
}
@Test
public void testSubscribedTopicNameAndUnresolvedRegularExpression() {
String fooTopic = "foo";
String barTopic = "bar";
Uuid fooTopicId = Uuid.randomUuid();
Uuid barTopicId = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(fooTopicId, fooTopic, 10)
.addTopic(barTopicId, barTopic, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(fooTopic))
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(10, new Assignment(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6))))
.withHasSubscriptionChanged(true)
.withResolvedRegularExpressions(Map.of())
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(fooTopicId)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(barTopicId)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(fooTopic))
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3)))
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testUnresolvedRegularExpression() {
String fooTopic = "foo";
String barTopic = "bar";
Uuid fooTopicId = Uuid.randomUuid();
Uuid barTopicId = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(fooTopicId, fooTopic, 10)
.addTopic(barTopicId, barTopic, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of())
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(10, new Assignment(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6))))
.withHasSubscriptionChanged(true)
.withResolvedRegularExpressions(Map.of())
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(fooTopicId)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(barTopicId)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.UNREVOKED_PARTITIONS)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of())
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment())
.setPartitionsPendingRevocation(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build(),
updatedMember
);
}
@Test
public void testSubscribedTopicNameAndResolvedRegularExpression() {
String fooTopic = "foo";
String barTopic = "bar";
Uuid fooTopicId = Uuid.randomUuid();
Uuid barTopicId = Uuid.randomUuid();
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(fooTopicId, fooTopic, 10)
.addTopic(barTopicId, barTopic, 10)
.buildCoordinatorMetadataImage();
ConsumerGroupMember member = new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(fooTopic))
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build();
ConsumerGroupMember updatedMember = new CurrentAssignmentBuilder(member)
.withMetadataImage(metadataImage)
.withTargetAssignment(10, new Assignment(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6))))
.withHasSubscriptionChanged(true)
.withResolvedRegularExpressions(Map.of(
"bar*", new ResolvedRegularExpression(
Set.of("bar"),
12345L,
0L
)
))
.withCurrentPartitionEpoch((topicId, partitionId) -> -1)
.withOwnedTopicPartitions(Arrays.asList(
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(fooTopicId)
.setPartitions(Arrays.asList(1, 2, 3)),
new ConsumerGroupHeartbeatRequestData.TopicPartitions()
.setTopicId(barTopicId)
.setPartitions(Arrays.asList(4, 5, 6))))
.build();
assertEquals(
new ConsumerGroupMember.Builder("member")
.setState(MemberState.STABLE)
.setMemberEpoch(10)
.setPreviousMemberEpoch(10)
.setSubscribedTopicNames(List.of(fooTopic))
.setSubscribedTopicRegex("bar*")
.setAssignedPartitions(mkAssignment(
mkTopicAssignment(fooTopicId, 1, 2, 3),
mkTopicAssignment(barTopicId, 4, 5, 6)))
.build(),
updatedMember
);
}
}
| CurrentAssignmentBuilderTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/completable/CompletableTimeout.java | {
"start": 2964,
"end": 3736
} | class ____ implements Runnable {
private final AtomicBoolean once;
final CompositeDisposable set;
final CompletableObserver downstream;
DisposeTask(AtomicBoolean once, CompositeDisposable set, CompletableObserver observer) {
this.once = once;
this.set = set;
this.downstream = observer;
}
@Override
public void run() {
if (once.compareAndSet(false, true)) {
set.clear();
if (other == null) {
downstream.onError(new TimeoutException(timeoutMessage(timeout, unit)));
} else {
other.subscribe(new DisposeObserver());
}
}
}
final | DisposeTask |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/main/java/org/springframework/boot/webflux/autoconfigure/error/AbstractErrorWebExceptionHandler.java | {
"start": 13461,
"end": 13788
} | class ____ implements ServerResponse.Context {
@Override
public List<HttpMessageWriter<?>> messageWriters() {
return AbstractErrorWebExceptionHandler.this.messageWriters;
}
@Override
public List<ViewResolver> viewResolvers() {
return AbstractErrorWebExceptionHandler.this.viewResolvers;
}
}
}
| ResponseContext |
java | micronaut-projects__micronaut-core | http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/ErrorHandlerStringTest.java | {
"start": 2415,
"end": 2649
} | class ____ implements ExceptionHandler<MyException, String> {
@Override
public String handle(HttpRequest request, MyException exception) {
return "{\"message\":\"hello\"}";
}
}
}
| MyExceptionHandler |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/net/ssl/SslKeyStoreConstants.java | {
"start": 864,
"end": 3186
} | class ____ {
private static final String PATH = "src/test/resources/org/apache/logging/log4j/core/net/ssl/";
/// Trust store (JKS) /////////////////////////////////////////////////////
public static final String TRUSTSTORE_LOCATION = PATH + "trustStore.jks";
public static char[] TRUSTSTORE_PWD() {
return "aTrustStoreSecret".toCharArray();
}
public static final String TRUSTSTORE_TYPE = "JKS";
/// Trust store #2 (JKS) //////////////////////////////////////////////////
public static final String TRUSTSTORE2_LOCATION = PATH + "trustStore2.jks";
public static char[] TRUSTSTORE2_PWD() {
return "aTrustStoreSecret2".toCharArray();
}
public static final String TRUSTSTORE2_TYPE = "JKS";
/// Key store (JKS) ///////////////////////////////////////////////////////
public static final String KEYSTORE_LOCATION = PATH + "keyStore.jks";
public static char[] KEYSTORE_PWD() {
return "aKeyStoreSecret".toCharArray();
}
public static final String KEYSTORE_TYPE = "JKS";
/// Key store #2 (JKS) ////////////////////////////////////////////////////
public static final String KEYSTORE2_LOCATION = PATH + "keyStore2.jks";
public static char[] KEYSTORE2_PWD() {
return "aKeyStoreSecret2".toCharArray();
}
public static final String KEYSTORE2_TYPE = "JKS";
/// Key store (P12) ///////////////////////////////////////////////////////
public static final String KEYSTORE_P12_LOCATION = PATH + "keyStore.p12";
public static char[] KEYSTORE_P12_PWD() {
return "aKeyStoreSecret".toCharArray();
}
public static final String KEYSTORE_P12_TYPE = "PKCS12";
/// Key store (P12 without password) //////////////////////////////////////
public static final String KEYSTORE_P12_NOPASS_LOCATION = PATH + "keyStore-nopass.p12";
public static char[] KEYSTORE_P12_NOPASS_PWD() {
return new char[0];
}
public static final String KEYSTORE_P12_NOPASS_TYPE = "PKCS12";
/// Other /////////////////////////////////////////////////////////////////
public static final char[] NULL_PWD = null;
public static final String WINDOWS_KEYSTORE_TYPE = "Windows-MY";
public static final String WINDOWS_TRUSTSTORE_TYPE = "Windows-ROOT";
}
| SslKeyStoreConstants |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/bug/Issue_685.java | {
"start": 198,
"end": 475
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
OracleStatementParser parser = new OracleStatementParser("select upper(*) from aa order by now()");
SQLStatement st = parser.parseStatement();
st.toString();
}
}
| Issue_685 |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/cache/interceptor/AbstractFallbackCacheOperationSource.java | {
"start": 3554,
"end": 5646
} | class ____ this invocation (can be {@code null})
* @param cacheNull whether {@code null} results should be cached as well
* @return {@link CacheOperation} for this method, or {@code null} if the method
* is not cacheable
*/
private @Nullable Collection<CacheOperation> getCacheOperations(
Method method, @Nullable Class<?> targetClass, boolean cacheNull) {
if (ReflectionUtils.isObjectMethod(method)) {
return null;
}
Object cacheKey = getCacheKey(method, targetClass);
Collection<CacheOperation> cached = this.operationCache.get(cacheKey);
if (cached != null) {
return (cached != NULL_CACHING_MARKER ? cached : null);
}
else {
Collection<CacheOperation> cacheOps = computeCacheOperations(method, targetClass);
if (cacheOps != null) {
if (logger.isTraceEnabled()) {
logger.trace("Adding cacheable method '" + method.getName() + "' with operations: " + cacheOps);
}
this.operationCache.put(cacheKey, cacheOps);
}
else if (cacheNull) {
this.operationCache.put(cacheKey, NULL_CACHING_MARKER);
}
return cacheOps;
}
}
/**
* Determine a cache key for the given method and target class.
* <p>Must not produce same key for overloaded methods.
* Must produce same key for different instances of the same method.
* @param method the method (never {@code null})
* @param targetClass the target class (may be {@code null})
* @return the cache key (never {@code null})
*/
protected Object getCacheKey(Method method, @Nullable Class<?> targetClass) {
return new MethodClassKey(method, targetClass);
}
private @Nullable Collection<CacheOperation> computeCacheOperations(Method method, @Nullable Class<?> targetClass) {
// Don't allow non-public methods, as configured.
if (allowPublicMethodsOnly() && !Modifier.isPublic(method.getModifiers())) {
return null;
}
// Skip setBeanFactory method on BeanFactoryAware.
if (method.getDeclaringClass() == BeanFactoryAware.class) {
return null;
}
// The method may be on an interface, but we need metadata from the target class.
// If the target | for |
java | google__dagger | javatests/dagger/internal/codegen/DependencyCycleValidationTest.java | {
"start": 24108,
"end": 25359
} | interface ____ {",
" Object unqualified();",
"}");
CompilerTests.daggerCompiler(qualifier, module, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
String.join(
"\n",
"Found a dependency cycle:",
" Object is injected at",
" [TestComponent] TestModule.bindQualified(unqualified)",
" @SomeQualifier Object is injected at",
" [TestComponent] TestModule.bindUnqualified(qualified)",
" Object is injected at",
" [TestComponent] TestModule.bindQualified(unqualified)",
" ...",
"",
"The cycle is requested via:",
" Object is requested at",
" [TestComponent] TestComponent.unqualified()"))
.onSource(component)
.onLineContaining(" | TestComponent |
java | apache__maven | its/core-it-suite/src/test/resources/mng-6127-plugin-execution-configuration-interference/plugin/src/main/java/org/apache/maven/its/mng6127/plugin/TestMojo.java | {
"start": 1997,
"end": 3090
} | class ____ extends AbstractMojo {
/**
* The Maven project.
*
* @parameter expression="${project}"
*/
private MavenProject project;
/**
* The name to write.
*
* @parameter
*/
private String name;
/**
* The second name to write.
*
* @parameter
*/
private String secondName;
public void execute() throws MojoExecutionException {
File file = new File(project.getBasedir(), "configuration.txt");
file.getParentFile().mkdirs();
Writer w = null;
try {
w = new OutputStreamWriter(new FileOutputStream(file, true), "UTF-8");
if (name != null) {
w.write("name=" + name + ", ");
}
w.write("secondName=" + secondName);
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
} finally {
if (w != null) {
try {
w.close();
} catch (IOException e) {
}
}
}
}
}
| TestMojo |
java | grpc__grpc-java | api/src/test/java/io/grpc/ContextsTest.java | {
"start": 7219,
"end": 9714
} | class ____ extends ForwardingScheduledExecutorService {
private ScheduledExecutorService delegate = TestingExecutors.noOpScheduledExecutor();
Runnable command;
@Override public ScheduledExecutorService delegate() {
return delegate;
}
@Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) {
if (delay > unit.convert(expectedDelay, expectedUnit)) {
fail("Delay larger than expected: " + delay + " " + unit);
}
this.command = command;
return super.schedule(command, delay, unit);
}
}
MockScheduledExecutorService executorService = new MockScheduledExecutorService();
Context.CancellableContext cancellableContext = Context.current()
.withDeadlineAfter(expectedDelay, expectedUnit, executorService);
executorService.command.run();
assertTrue(cancellableContext.isCancelled());
assertThat(cancellableContext.cancellationCause()).isInstanceOf(TimeoutException.class);
Status status = statusFromCancelled(cancellableContext);
assertNotNull(status);
assertEquals(Status.Code.DEADLINE_EXCEEDED, status.getCode());
assertEquals("context timed out", status.getDescription());
}
@Test
public void statusFromCancelled_returnCancelledIfCauseIsNull() {
Context.CancellableContext cancellableContext = Context.current().withCancellation();
cancellableContext.cancel(null);
assertTrue(cancellableContext.isCancelled());
Status status = statusFromCancelled(cancellableContext);
assertNotNull(status);
assertEquals(Status.Code.CANCELLED, status.getCode());
}
/** This is a whitebox test, to verify a special case of the implementation. */
@Test
public void statusFromCancelled_StatusUnknownShouldWork() {
Context.CancellableContext cancellableContext = Context.current().withCancellation();
Exception e = Status.UNKNOWN.asException();
cancellableContext.cancel(e);
assertTrue(cancellableContext.isCancelled());
Status status = statusFromCancelled(cancellableContext);
assertNotNull(status);
assertEquals(Status.Code.UNKNOWN, status.getCode());
assertSame(e, status.getCause());
}
@Test
public void statusFromCancelled_shouldThrowIfCtxIsNull() {
try {
statusFromCancelled(null);
fail("NPE expected");
} catch (NullPointerException npe) {
assertEquals("context must not be null", npe.getMessage());
}
}
}
| MockScheduledExecutorService |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/boot/database/qualfiedTableNaming/XmlDefinedNamespaceTests.java | {
"start": 3325,
"end": 4235
} | class ____ {
private Integer id;
private String name;
}
private String generateScript(
SchemaExport.Action action,
MetadataImplementor domainModel,
StandardServiceRegistry serviceRegistry) {
SchemaExport schemaExport = new SchemaExport();
schemaExport.setFormat( true );
schemaExport.setDelimiter( ";" );
StringWriter writer = new StringWriter();
schemaExport.doExecution(
action,
false,
domainModel,
serviceRegistry,
new TargetDescriptor() {
@Override
public EnumSet<TargetType> getTargetTypes() {
return EnumSet.of( TargetType.SCRIPT );
}
@Override
public ScriptTargetOutput getScriptTargetOutput() {
return new ScriptTargetOutputToWriter( writer ) {
@Override
public void accept(String command) {
super.accept( command );
}
};
}
}
);
return writer.toString();
}
}
| SimpleEntity |
java | grpc__grpc-java | api/src/main/java/io/grpc/InternalChannelz.java | {
"start": 8681,
"end": 9030
} | class ____ {
public final List<InternalInstrumented<ChannelStats>> channels;
public final boolean end;
/** Creates an instance. */
public RootChannelList(List<InternalInstrumented<ChannelStats>> channels, boolean end) {
this.channels = checkNotNull(channels);
this.end = end;
}
}
public static final | RootChannelList |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/api/common/functions/Function.java | {
"start": 915,
"end": 971
} | interface ____ all user-defined functions.
*
* <p>This | for |
java | apache__camel | components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpClientChunkedResponseTest.java | {
"start": 1055,
"end": 1928
} | class ____ extends BaseNettyTest {
@Test
public void testNettyHttpClientChunkedResponse() throws Exception {
getMockEndpoint("mock:input").expectedBodiesReceived("Hello World");
String out = template.requestBody("netty-http:http://localhost:{{port}}/foo", "Hello World", String.class);
assertEquals("Bye World", out);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("netty-http:http://0.0.0.0:{{port}}/foo")
.to("mock:input")
.setHeader("Transfer-Encoding", constant("chunked"))
.transform().simple("Bye World");
}
};
}
}
| NettyHttpClientChunkedResponseTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java | {
"start": 21465,
"end": 21563
} | interface ____ {
double cosineSimilarity();
}
public static | CosineSimilarityInterface |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/AbstractFileResolvingResource.java | {
"start": 1191,
"end": 1508
} | class ____ resources which resolve URLs into File references,
* such as {@link UrlResource} or {@link ClassPathResource}.
*
* <p>Detects the "file" protocol as well as the JBoss "vfs" protocol in URLs,
* resolving file system references accordingly.
*
* @author Juergen Hoeller
* @since 3.0
*/
public abstract | for |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/multiple_discriminator/PersonMapper.java | {
"start": 718,
"end": 815
} | interface ____ {
Person get(Long id);
Person get2(Long id);
Person getLoop();
}
| PersonMapper |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/htmlunit/DelegatingWebConnection.java | {
"start": 2952,
"end": 3360
} | class ____ {
private final WebRequestMatcher matcher;
private final WebConnection delegate;
public DelegateWebConnection(WebRequestMatcher matcher, WebConnection delegate) {
this.matcher = matcher;
this.delegate = delegate;
}
private WebRequestMatcher getMatcher() {
return this.matcher;
}
private WebConnection getDelegate() {
return this.delegate;
}
}
}
| DelegateWebConnection |
java | apache__flink | flink-clients/src/test/java/org/apache/flink/client/program/rest/RestClusterClientTest.java | {
"start": 27652,
"end": 28548
} | class ____
extends TestHandler<
EmptyRequestBody,
TriggerResponse,
ClusterDataSetDeleteTriggerMessageParameters> {
private TestClusterDatasetDeleteTriggerHandler() {
super(ClusterDataSetDeleteTriggerHeaders.INSTANCE);
}
@Override
protected CompletableFuture<TriggerResponse> handleRequest(
HandlerRequest<EmptyRequestBody> request, DispatcherGateway gateway)
throws RestHandlerException {
assertThat(request.getPathParameter(ClusterDataSetIdPathParameter.class))
.isEqualTo(intermediateDataSetID);
return CompletableFuture.completedFuture(new TriggerResponse(triggerId));
}
}
private | TestClusterDatasetDeleteTriggerHandler |
java | google__auto | factory/src/test/resources/good/CheckerFrameworkNullable.java | {
"start": 873,
"end": 1270
} | class ____ {
CheckerFrameworkNullable(
@NullableDecl String nullableDecl,
@Provided @NullableDecl String providedNullableDecl,
@NullableType String nullableType,
@Provided @NullableType String providedNullableType,
Map.@NullableType Entry<?, ?> nestedNullableType,
@Provided Map.@NullableType Entry<?, ?> providedNestedNullableType) {}
}
| CheckerFrameworkNullable |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/telnet/codec/TelnetCodec.java | {
"start": 1782,
"end": 11966
} | class ____ extends TransportCodec {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(TelnetCodec.class);
private static final String HISTORY_LIST_KEY = "telnet.history.list";
private static final String HISTORY_INDEX_KEY = "telnet.history.index";
private static final byte[] UP = new byte[] {27, 91, 65};
private static final byte[] DOWN = new byte[] {27, 91, 66};
private static final List<?> ENTER =
Arrays.asList(new byte[] {'\r', '\n'} /* Windows Enter */, new byte[] {'\n'} /* Linux Enter */);
private static final List<?> EXIT = Arrays.asList(
new byte[] {3} /* Windows Ctrl+C */,
new byte[] {-1, -12, -1, -3, 6} /* Linux Ctrl+C */,
new byte[] {-1, -19, -1, -3, 6} /* Linux Pause */);
private static Charset getCharset(Channel channel) {
if (channel != null) {
Object attribute = channel.getAttribute(CHARSET_KEY);
if (attribute instanceof String) {
try {
return Charset.forName((String) attribute);
} catch (Throwable t) {
logger.warn(TRANSPORT_UNSUPPORTED_CHARSET, "", "", t.getMessage(), t);
}
} else if (attribute instanceof Charset) {
return (Charset) attribute;
}
URL url = channel.getUrl();
if (url != null) {
String parameter = url.getParameter(CHARSET_KEY);
if (StringUtils.isNotEmpty(parameter)) {
try {
return Charset.forName(parameter);
} catch (Throwable t) {
logger.warn(TRANSPORT_UNSUPPORTED_CHARSET, "", "", t.getMessage(), t);
}
}
}
}
try {
return Charset.forName(DEFAULT_CHARSET);
} catch (Throwable t) {
logger.warn(TRANSPORT_UNSUPPORTED_CHARSET, "", "", t.getMessage(), t);
}
return Charset.defaultCharset();
}
private static String toString(byte[] message, Charset charset) throws UnsupportedEncodingException {
byte[] copy = new byte[message.length];
int index = 0;
for (int i = 0; i < message.length; i++) {
byte b = message[i];
if (b == '\b') { // backspace
if (index > 0) {
index--;
}
if (i > 2 && message[i - 2] < 0) { // double byte char
if (index > 0) {
index--;
}
}
} else if (b == 27) { // escape
if (i < message.length - 4 && message[i + 4] == 126) {
i = i + 4;
} else if (i < message.length - 3 && message[i + 3] == 126) {
i = i + 3;
} else if (i < message.length - 2) {
i = i + 2;
}
} else if (b == -1
&& i < message.length - 2
&& (message[i + 1] == -3 || message[i + 1] == -5)) { // handshake
i = i + 2;
} else {
copy[index++] = message[i];
}
}
if (index == 0) {
return "";
}
return new String(copy, 0, index, charset.name()).trim();
}
private static boolean isEquals(byte[] message, byte[] command) throws IOException {
return message.length == command.length && endsWith(message, command);
}
private static boolean endsWith(byte[] message, byte[] command) throws IOException {
if (message.length < command.length) {
return false;
}
int offset = message.length - command.length;
for (int i = command.length - 1; i >= 0; i--) {
if (message[offset + i] != command[i]) {
return false;
}
}
return true;
}
@Override
public void encode(Channel channel, ChannelBuffer buffer, Object message) throws IOException {
if (message instanceof String) {
if (isClientSide(channel)) {
message = message + "\r\n";
}
byte[] msgData = ((String) message).getBytes(getCharset(channel).name());
buffer.writeBytes(msgData);
} else {
super.encode(channel, buffer, message);
}
}
@Override
public Object decode(Channel channel, ChannelBuffer buffer) throws IOException {
int readable = buffer.readableBytes();
byte[] message = new byte[readable];
buffer.readBytes(message);
return decode(channel, buffer, readable, message);
}
@SuppressWarnings("unchecked")
protected Object decode(Channel channel, ChannelBuffer buffer, int readable, byte[] message) throws IOException {
if (isClientSide(channel)) {
return toString(message, getCharset(channel));
}
checkPayload(channel, readable);
if (message == null || message.length == 0) {
return DecodeResult.NEED_MORE_INPUT;
}
if (message[message.length - 1] == '\b') { // Windows backspace echo
try {
boolean isDoubleChar = message.length >= 3 && message[message.length - 3] < 0; // double byte char
channel.send(new String(
isDoubleChar ? new byte[] {32, 32, 8, 8} : new byte[] {32, 8},
getCharset(channel).name()));
} catch (RemotingException e) {
throw new IOException(StringUtils.toString(e));
}
return DecodeResult.NEED_MORE_INPUT;
}
for (Object command : EXIT) {
if (isEquals(message, (byte[]) command)) {
if (logger.isInfoEnabled()) {
logger.info(new Exception(
"Close channel " + channel + " on exit command: " + Arrays.toString((byte[]) command)));
}
channel.close();
return null;
}
}
boolean up = endsWith(message, UP);
boolean down = endsWith(message, DOWN);
if (up || down) {
LinkedList<String> history = (LinkedList<String>) channel.getAttribute(HISTORY_LIST_KEY);
if (CollectionUtils.isEmpty(history)) {
return DecodeResult.NEED_MORE_INPUT;
}
Integer index = (Integer) channel.getAttribute(HISTORY_INDEX_KEY);
Integer old = index;
if (index == null) {
index = history.size() - 1;
} else {
if (up) {
index = index - 1;
if (index < 0) {
index = history.size() - 1;
}
} else {
index = index + 1;
if (index > history.size() - 1) {
index = 0;
}
}
}
if (old == null || !old.equals(index)) {
channel.setAttribute(HISTORY_INDEX_KEY, index);
String value = history.get(index);
if (old != null && old >= 0 && old < history.size()) {
String ov = history.get(old);
StringBuilder buf = new StringBuilder();
for (int i = 0; i < ov.length(); i++) {
buf.append('\b');
}
for (int i = 0; i < ov.length(); i++) {
buf.append(' ');
}
for (int i = 0; i < ov.length(); i++) {
buf.append('\b');
}
value = buf + value;
}
try {
channel.send(value);
} catch (RemotingException e) {
throw new IOException(StringUtils.toString(e));
}
}
return DecodeResult.NEED_MORE_INPUT;
}
for (Object command : EXIT) {
if (isEquals(message, (byte[]) command)) {
if (logger.isInfoEnabled()) {
logger.info(new Exception("Close channel " + channel + " on exit command " + command));
}
channel.close();
return null;
}
}
byte[] enter = null;
for (Object command : ENTER) {
if (endsWith(message, (byte[]) command)) {
enter = (byte[]) command;
break;
}
}
if (enter == null) {
return DecodeResult.NEED_MORE_INPUT;
}
LinkedList<String> history = (LinkedList<String>) channel.getAttribute(HISTORY_LIST_KEY);
Integer index = (Integer) channel.getAttribute(HISTORY_INDEX_KEY);
channel.removeAttribute(HISTORY_INDEX_KEY);
if (CollectionUtils.isNotEmpty(history) && index != null && index >= 0 && index < history.size()) {
String value = history.get(index);
if (value != null) {
byte[] b1 = value.getBytes(StandardCharsets.UTF_8);
byte[] b2 = new byte[b1.length + message.length];
System.arraycopy(b1, 0, b2, 0, b1.length);
System.arraycopy(message, 0, b2, b1.length, message.length);
message = b2;
}
}
String result = toString(message, getCharset(channel));
if (result.trim().length() > 0) {
if (history == null) {
history = new LinkedList<>();
channel.setAttribute(HISTORY_LIST_KEY, history);
}
if (history.isEmpty()) {
history.addLast(result);
} else if (!result.equals(history.getLast())) {
history.remove(result);
history.addLast(result);
if (history.size() > 10) {
history.removeFirst();
}
}
}
return result;
}
}
| TelnetCodec |
java | netty__netty | transport-classes-epoll/src/main/java/io/netty/channel/epoll/NativeDatagramPacketArray.java | {
"start": 3927,
"end": 6008
} | class ____ implements MessageProcessor {
private boolean connected;
private int maxMessagesPerWrite;
@Override
public boolean processMessage(Object msg) {
final boolean added;
if (msg instanceof DatagramPacket) {
DatagramPacket packet = (DatagramPacket) msg;
ByteBuf buf = packet.content();
int segmentSize = 0;
if (packet instanceof io.netty.channel.unix.SegmentedDatagramPacket) {
int seg = ((io.netty.channel.unix.SegmentedDatagramPacket) packet).segmentSize();
// We only need to tell the kernel that we want to use UDP_SEGMENT if there are multiple
// segments in the packet.
if (buf.readableBytes() > seg) {
segmentSize = seg;
}
}
added = add0(buf, buf.readerIndex(), buf.readableBytes(), segmentSize, packet.recipient());
} else if (msg instanceof ByteBuf && connected) {
ByteBuf buf = (ByteBuf) msg;
added = add0(buf, buf.readerIndex(), buf.readableBytes(), 0, null);
} else {
added = false;
}
if (added) {
maxMessagesPerWrite--;
return maxMessagesPerWrite > 0;
}
return false;
}
}
private static InetSocketAddress newAddress(byte[] addr, int addrLen, int port, int scopeId, byte[] ipv4Bytes)
throws UnknownHostException {
final InetAddress address;
if (addrLen == ipv4Bytes.length) {
System.arraycopy(addr, 0, ipv4Bytes, 0, addrLen);
address = InetAddress.getByAddress(ipv4Bytes);
} else {
address = Inet6Address.getByAddress(null, addr, scopeId);
}
return new InetSocketAddress(address, port);
}
/**
* Used to pass needed data to JNI.
*/
@SuppressWarnings("unused")
@UnstableApi
public final | MyMessageProcessor |
java | quarkusio__quarkus | extensions/reactive-routes/deployment/src/main/java/io/quarkus/vertx/web/deployment/ReactiveRoutesProcessor.java | {
"start": 93377,
"end": 93484
} | interface ____<A, B, C> {
boolean test(A a, B b, C c);
}
@FunctionalInterface
| TriPredicate |
java | micronaut-projects__micronaut-core | test-suite/src/test/java/io/micronaut/context/router/RouteBuilderMediaTypeTest.java | {
"start": 3396,
"end": 4805
} | class ____ extends DefaultRouteBuilder {
CreateSaveRouteBuilder(ExecutionHandleLocator executionHandleLocator,
BeanContext beanContext,
List<ContactController> contactControllerList) {
super(executionHandleLocator);
for (ContactController controller : contactControllerList) {
beanContext.getBeanDefinition(ContactController.class);
BeanDefinition<ContactController> bd = beanContext.getBeanDefinition(ContactController.class);
bd.findMethod("create", HttpRequest.class).ifPresent(m -> {
MethodExecutionHandle<Object, Object> executionHandle = ExecutionHandle.of(controller, (ExecutableMethod) m);
buildRoute(HttpMethod.GET, "/contact/create", executionHandle);
});
bd.findMethod("save", HttpRequest.class, Contact.class).ifPresent(m -> {
MethodExecutionHandle<Object, Object> executionHandle = ExecutionHandle.of(controller, (ExecutableMethod) m);
buildRoute(HttpMethod.POST, "/contact/save", Collections.singletonList(MediaType.APPLICATION_FORM_URLENCODED_TYPE), executionHandle);
});
}
}
}
@Requires(property = "spec.name", value = "RouteBuilderMediaTypeSpec")
@Singleton
static | CreateSaveRouteBuilder |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CosmosDbEndpointBuilderFactory.java | {
"start": 90167,
"end": 92919
} | interface ____ {
/**
* Azure CosmosDB (camel-azure-cosmosdb)
* To read and write records to the CosmosDB database on Azure cloud
* platform.
*
* Category: cloud,database
* Since: 3.10
* Maven coordinates: org.apache.camel:camel-azure-cosmosdb
*
* Syntax: <code>azure-cosmosdb:databaseName/containerName</code>
*
* Path parameter: databaseName
* The name of the Cosmos database that component should connect to. In
* case you are producing data and have createDatabaseIfNotExists=true,
* the component will automatically auto create a Cosmos database.
*
* Path parameter: containerName
* The name of the Cosmos container that component should connect to. In
* case you are producing data and have createContainerIfNotExists=true,
* the component will automatically auto create a Cosmos container.
*
* @param path databaseName/containerName
* @return the dsl builder
*/
default CosmosDbEndpointBuilder azureCosmosdb(String path) {
return CosmosDbEndpointBuilderFactory.endpointBuilder("azure-cosmosdb", path);
}
/**
* Azure CosmosDB (camel-azure-cosmosdb)
* To read and write records to the CosmosDB database on Azure cloud
* platform.
*
* Category: cloud,database
* Since: 3.10
* Maven coordinates: org.apache.camel:camel-azure-cosmosdb
*
* Syntax: <code>azure-cosmosdb:databaseName/containerName</code>
*
* Path parameter: databaseName
* The name of the Cosmos database that component should connect to. In
* case you are producing data and have createDatabaseIfNotExists=true,
* the component will automatically auto create a Cosmos database.
*
* Path parameter: containerName
* The name of the Cosmos container that component should connect to. In
* case you are producing data and have createContainerIfNotExists=true,
* the component will automatically auto create a Cosmos container.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path databaseName/containerName
* @return the dsl builder
*/
default CosmosDbEndpointBuilder azureCosmosdb(String componentName, String path) {
return CosmosDbEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static CosmosDbEndpointBuilder endpointBuilder(String componentName, String path) {
| CosmosDbBuilders |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/naming/remote/request/BasedNamingRequestTest.java | {
"start": 1091,
"end": 2499
} | class ____ {
protected static final String SERVICE = "service";
protected static final String GROUP = "group";
protected static final String NAMESPACE = "namespace";
protected static ObjectMapper mapper;
@BeforeAll
public static void setUp() throws Exception {
mapper = new ObjectMapper();
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
protected void injectNamingRequestBasedInfo(AbstractNamingRequest request) {
request.setServiceName(SERVICE);
request.setGroupName(GROUP);
request.setNamespace(NAMESPACE);
}
protected void checkNamingRequestBasedInfo(AbstractNamingRequest request) {
assertEquals(SERVICE, request.getServiceName());
assertEquals(GROUP, request.getGroupName());
assertEquals(NAMESPACE, request.getNamespace());
assertEquals(NAMING_MODULE, request.getModule());
}
protected void checkSerializeBasedInfo(String json) {
assertTrue(json.contains("\"serviceName\":\"" + SERVICE + "\""));
assertTrue(json.contains("\"groupName\":\"" + GROUP + "\""));
assertTrue(json.contains("\"namespace\":\"" + NAMESPACE + "\""));
assertTrue(json.contains("\"module\":\"" + NAMING_MODULE + "\""));
}
} | BasedNamingRequestTest |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/CompositeTypeTest.java | {
"start": 1149,
"end": 11509
} | class ____ {
private final TupleTypeInfo<?> tupleTypeInfo =
new TupleTypeInfo<Tuple4<Integer, Integer, Integer, Integer>>(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO);
private final TupleTypeInfo<Tuple3<Integer, String, Long>> inNestedTuple1 =
new TupleTypeInfo<Tuple3<Integer, String, Long>>(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO);
private final TupleTypeInfo<Tuple2<Double, Double>> inNestedTuple2 =
new TupleTypeInfo<Tuple2<Double, Double>>(
BasicTypeInfo.DOUBLE_TYPE_INFO, BasicTypeInfo.DOUBLE_TYPE_INFO);
private final TupleTypeInfo<?> nestedTypeInfo =
new TupleTypeInfo<
Tuple4<
Integer,
Tuple3<Integer, String, Long>,
Integer,
Tuple2<Double, Double>>>(
BasicTypeInfo.INT_TYPE_INFO,
inNestedTuple1,
BasicTypeInfo.INT_TYPE_INFO,
inNestedTuple2);
private final TupleTypeInfo<Tuple2<Integer, Tuple2<Integer, Integer>>> inNestedTuple3 =
new TupleTypeInfo<Tuple2<Integer, Tuple2<Integer, Integer>>>(
BasicTypeInfo.INT_TYPE_INFO,
new TupleTypeInfo<Tuple2<Integer, Integer>>(
BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO));
private final TupleTypeInfo<?> deepNestedTupleTypeInfo =
new TupleTypeInfo<Tuple3<Integer, Tuple2<Integer, Tuple2<Integer, Integer>>, Integer>>(
BasicTypeInfo.INT_TYPE_INFO, inNestedTuple3, BasicTypeInfo.INT_TYPE_INFO);
private final PojoTypeInfo<?> pojoTypeInfo =
((PojoTypeInfo<?>) TypeExtractor.getForClass(MyPojo.class));
private final TupleTypeInfo<?> pojoInTupleTypeInfo =
new TupleTypeInfo<Tuple2<Integer, MyPojo>>(BasicTypeInfo.INT_TYPE_INFO, pojoTypeInfo);
@Test
void testGetFlatFields() {
assertThat(tupleTypeInfo.getFlatFields("0").get(0).getPosition()).isZero();
assertThat(tupleTypeInfo.getFlatFields("1").get(0).getPosition()).isOne();
assertThat(tupleTypeInfo.getFlatFields("2").get(0).getPosition()).isEqualTo(2);
assertThat(tupleTypeInfo.getFlatFields("3").get(0).getPosition()).isEqualTo(3);
assertThat(tupleTypeInfo.getFlatFields("f0").get(0).getPosition()).isZero();
assertThat(tupleTypeInfo.getFlatFields("f1").get(0).getPosition()).isOne();
assertThat(tupleTypeInfo.getFlatFields("f2").get(0).getPosition()).isEqualTo(2);
assertThat(tupleTypeInfo.getFlatFields("f3").get(0).getPosition()).isEqualTo(3);
assertThat(nestedTypeInfo.getFlatFields("0").get(0).getPosition()).isZero();
assertThat(nestedTypeInfo.getFlatFields("1.0").get(0).getPosition()).isOne();
assertThat(nestedTypeInfo.getFlatFields("1.1").get(0).getPosition()).isEqualTo(2);
assertThat(nestedTypeInfo.getFlatFields("1.2").get(0).getPosition()).isEqualTo(3);
assertThat(nestedTypeInfo.getFlatFields("2").get(0).getPosition()).isEqualTo(4);
assertThat(nestedTypeInfo.getFlatFields("3.0").get(0).getPosition()).isEqualTo(5);
assertThat(nestedTypeInfo.getFlatFields("3.1").get(0).getPosition()).isEqualTo(6);
assertThat(nestedTypeInfo.getFlatFields("f2").get(0).getPosition()).isEqualTo(4);
assertThat(nestedTypeInfo.getFlatFields("f3.f0").get(0).getPosition()).isEqualTo(5);
assertThat(nestedTypeInfo.getFlatFields("1")).hasSize(3);
assertThat(nestedTypeInfo.getFlatFields("1").get(0).getPosition()).isOne();
assertThat(nestedTypeInfo.getFlatFields("1").get(1).getPosition()).isEqualTo(2);
assertThat(nestedTypeInfo.getFlatFields("1").get(2).getPosition()).isEqualTo(3);
assertThat(nestedTypeInfo.getFlatFields("1.*")).hasSize(3);
assertThat(nestedTypeInfo.getFlatFields("1.*").get(0).getPosition()).isOne();
assertThat(nestedTypeInfo.getFlatFields("1.*").get(1).getPosition()).isEqualTo(2);
assertThat(nestedTypeInfo.getFlatFields("1.*").get(2).getPosition()).isEqualTo(3);
assertThat(nestedTypeInfo.getFlatFields("3")).hasSize(2);
assertThat(nestedTypeInfo.getFlatFields("3").get(0).getPosition()).isEqualTo(5);
assertThat(nestedTypeInfo.getFlatFields("3").get(1).getPosition()).isEqualTo(6);
assertThat(nestedTypeInfo.getFlatFields("f1")).hasSize(3);
assertThat(nestedTypeInfo.getFlatFields("f1").get(0).getPosition()).isOne();
assertThat(nestedTypeInfo.getFlatFields("f1").get(1).getPosition()).isEqualTo(2);
assertThat(nestedTypeInfo.getFlatFields("f1").get(2).getPosition()).isEqualTo(3);
assertThat(nestedTypeInfo.getFlatFields("f3")).hasSize(2);
assertThat(nestedTypeInfo.getFlatFields("f3").get(0).getPosition()).isEqualTo(5);
assertThat(nestedTypeInfo.getFlatFields("f3").get(1).getPosition()).isEqualTo(6);
assertThat(nestedTypeInfo.getFlatFields("0").get(0).getType())
.isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getFlatFields("1.1").get(0).getType())
.isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(nestedTypeInfo.getFlatFields("1").get(2).getType())
.isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(nestedTypeInfo.getFlatFields("3").get(1).getType())
.isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(deepNestedTupleTypeInfo.getFlatFields("1")).hasSize(3);
assertThat(deepNestedTupleTypeInfo.getFlatFields("1").get(0).getPosition()).isOne();
assertThat(deepNestedTupleTypeInfo.getFlatFields("1").get(1).getPosition()).isEqualTo(2);
assertThat(deepNestedTupleTypeInfo.getFlatFields("1").get(2).getPosition()).isEqualTo(3);
assertThat(deepNestedTupleTypeInfo.getFlatFields("*")).hasSize(5);
assertThat(deepNestedTupleTypeInfo.getFlatFields("*").get(0).getPosition()).isZero();
assertThat(deepNestedTupleTypeInfo.getFlatFields("*").get(1).getPosition()).isOne();
assertThat(deepNestedTupleTypeInfo.getFlatFields("*").get(2).getPosition()).isEqualTo(2);
assertThat(deepNestedTupleTypeInfo.getFlatFields("*").get(3).getPosition()).isEqualTo(3);
assertThat(deepNestedTupleTypeInfo.getFlatFields("*").get(4).getPosition()).isEqualTo(4);
assertThat(pojoTypeInfo.getFlatFields("a").get(0).getPosition()).isZero();
assertThat(pojoTypeInfo.getFlatFields("b").get(0).getPosition()).isOne();
assertThat(pojoTypeInfo.getFlatFields("*")).hasSize(2);
assertThat(pojoTypeInfo.getFlatFields("*").get(0).getPosition()).isZero();
assertThat(pojoTypeInfo.getFlatFields("*").get(1).getPosition()).isOne();
assertThat(pojoInTupleTypeInfo.getFlatFields("f1.a").get(0).getPosition()).isOne();
assertThat(pojoInTupleTypeInfo.getFlatFields("1.b").get(0).getPosition()).isEqualTo(2);
assertThat(pojoInTupleTypeInfo.getFlatFields("1")).hasSize(2);
assertThat(pojoInTupleTypeInfo.getFlatFields("1.*").get(0).getPosition()).isOne();
assertThat(pojoInTupleTypeInfo.getFlatFields("1").get(1).getPosition()).isEqualTo(2);
assertThat(pojoInTupleTypeInfo.getFlatFields("f1.*")).hasSize(2);
assertThat(pojoInTupleTypeInfo.getFlatFields("f1.*").get(0).getPosition()).isOne();
assertThat(pojoInTupleTypeInfo.getFlatFields("f1").get(1).getPosition()).isEqualTo(2);
assertThat(pojoInTupleTypeInfo.getFlatFields("*")).hasSize(3);
assertThat(pojoInTupleTypeInfo.getFlatFields("*").get(0).getPosition()).isZero();
assertThat(pojoInTupleTypeInfo.getFlatFields("*").get(1).getPosition()).isOne();
assertThat(pojoInTupleTypeInfo.getFlatFields("*").get(2).getPosition()).isEqualTo(2);
}
@Test
void testFieldAtStringRef() {
assertThat(tupleTypeInfo.getTypeAt("0")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tupleTypeInfo.getTypeAt("2")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tupleTypeInfo.getTypeAt("f1")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(tupleTypeInfo.getTypeAt("f3")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("0")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("1.0")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("1.1")).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("1.2")).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("2")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("3.0")).isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("3.1")).isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("f2")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("f3.f0")).isEqualTo(BasicTypeInfo.DOUBLE_TYPE_INFO);
assertThat(nestedTypeInfo.getTypeAt("1")).isEqualTo(inNestedTuple1);
assertThat(nestedTypeInfo.getTypeAt("3")).isEqualTo(inNestedTuple2);
assertThat(nestedTypeInfo.getTypeAt("f1")).isEqualTo(inNestedTuple1);
assertThat(nestedTypeInfo.getTypeAt("f3")).isEqualTo(inNestedTuple2);
assertThat(deepNestedTupleTypeInfo.getTypeAt("1")).isEqualTo(inNestedTuple3);
assertThat(pojoTypeInfo.getTypeAt("a")).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(pojoTypeInfo.getTypeAt("b")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(pojoInTupleTypeInfo.getTypeAt("f1.a")).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO);
assertThat(pojoInTupleTypeInfo.getTypeAt("1.b")).isEqualTo(BasicTypeInfo.INT_TYPE_INFO);
assertThat(pojoInTupleTypeInfo.getTypeAt("1")).isEqualTo(pojoTypeInfo);
assertThat(pojoInTupleTypeInfo.getTypeAt("f1")).isEqualTo(pojoTypeInfo);
}
public static | CompositeTypeTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/inheritance/ManyToOneInheritanceSubTypeTest.java | {
"start": 6390,
"end": 6505
} | class ____ {
@Id
@GeneratedValue
private Integer id;
}
@Entity( name = "JoinedA" )
public static | JoinedEntity |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/EntityJoinTest.java | {
"start": 11157,
"end": 11969
} | class ____ {
private Integer id;
private Customer customer;
private String lastUpdateBy;
public FinancialRecord() {
}
public FinancialRecord(Integer id, Customer customer, String lastUpdateBy) {
this.id = id;
this.customer = customer;
this.lastUpdateBy = lastUpdateBy;
}
@Id
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@ManyToOne
@JoinColumn
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
public String getLastUpdateBy() {
return lastUpdateBy;
}
public void setLastUpdateBy(String lastUpdateBy) {
this.lastUpdateBy = lastUpdateBy;
}
}
@Entity(name = "User")
@Table(name = "`a:user`")
public static | FinancialRecord |
java | spring-projects__spring-boot | buildSrc/src/main/java/org/springframework/boot/build/mavenplugin/MavenPluginPlugin.java | {
"start": 20795,
"end": 22582
} | class ____ extends DefaultTask {
private FileCollection resolvedBoms;
@InputFiles
@PathSensitive(PathSensitivity.RELATIVE)
public FileCollection getResolvedBoms() {
return this.resolvedBoms;
}
public void setResolvedBoms(FileCollection resolvedBoms) {
this.resolvedBoms = resolvedBoms;
}
@OutputFile
public abstract RegularFileProperty getDestination();
@TaskAction
public void extractVersionProperties() {
ResolvedBom resolvedBom = ResolvedBom.readFrom(this.resolvedBoms.getSingleFile());
Properties versions = extractVersionProperties(resolvedBom);
writeProperties(versions);
}
private void writeProperties(Properties versions) {
File outputFile = getDestination().getAsFile().get();
outputFile.getParentFile().mkdirs();
try (Writer writer = new FileWriter(outputFile)) {
versions.store(writer, null);
}
catch (IOException ex) {
throw new GradleException("Failed to write extracted version properties", ex);
}
}
private Properties extractVersionProperties(ResolvedBom resolvedBom) {
Properties versions = CollectionFactory.createSortedProperties(true);
versions.setProperty("project.version", resolvedBom.id().version());
Set<String> versionProperties = Set.of("log4j2.version", "maven-jar-plugin.version",
"maven-war-plugin.version", "build-helper-maven-plugin.version", "spring-framework.version",
"jakarta-servlet.version", "kotlin.version", "assertj.version", "junit-jupiter.version");
for (ResolvedLibrary library : resolvedBom.libraries()) {
if (library.versionProperty() != null && versionProperties.contains(library.versionProperty())) {
versions.setProperty(library.versionProperty(), library.version());
}
}
return versions;
}
}
}
| ExtractVersionProperties |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/output/KeyValueScoredValueOutput.java | {
"start": 429,
"end": 1416
} | class ____<K, V> extends CommandOutput<K, V, KeyValue<K, ScoredValue<V>>> {
private K key;
private boolean hasKey;
private V value;
private boolean hasValue;
public KeyValueScoredValueOutput(RedisCodec<K, V> codec) {
super(codec, null);
}
@Override
public void set(ByteBuffer bytes) {
if (bytes == null) {
return;
}
if (!hasKey) {
key = codec.decodeKey(bytes);
hasKey = true;
return;
}
if (!hasValue) {
value = codec.decodeValue(bytes);
hasValue = true;
return;
}
double score = LettuceStrings.toDouble(decodeString(bytes));
set(score);
}
@Override
public void set(double number) {
output = KeyValue.just(key, ScoredValue.just(number, value));
key = null;
hasKey = false;
value = null;
hasValue = false;
}
}
| KeyValueScoredValueOutput |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/AtomicDoubleTest.java | {
"start": 611,
"end": 7279
} | class ____ extends JSR166TestCase {
private static final double[] VALUES = {
Double.NEGATIVE_INFINITY,
-Double.MAX_VALUE,
(double) Long.MIN_VALUE,
(double) Integer.MIN_VALUE,
-Math.PI,
-1.0,
-Double.MIN_VALUE,
-0.0,
+0.0,
Double.MIN_VALUE,
1.0,
Math.PI,
(double) Integer.MAX_VALUE,
(double) Long.MAX_VALUE,
Double.MAX_VALUE,
Double.POSITIVE_INFINITY,
Double.NaN,
Float.MAX_VALUE,
};
/** The notion of equality used by AtomicDouble */
static boolean bitEquals(double x, double y) {
return Double.doubleToRawLongBits(x) == Double.doubleToRawLongBits(y);
}
static void assertBitEquals(double x, double y) {
assertEquals(Double.doubleToRawLongBits(x), Double.doubleToRawLongBits(y));
}
/** constructor initializes to given value */
public void testConstructor() {
for (double x : VALUES) {
AtomicDouble a = new AtomicDouble(x);
assertBitEquals(x, a.get());
}
}
/** default constructed initializes to zero */
public void testConstructor2() {
AtomicDouble a = new AtomicDouble();
assertBitEquals(0.0, a.get());
}
/** get returns the last value set */
public void testGetSet() {
AtomicDouble at = new AtomicDouble(1.0);
assertBitEquals(1.0, at.get());
for (double x : VALUES) {
at.set(x);
assertBitEquals(x, at.get());
}
}
/** get returns the last value lazySet in same thread */
public void testGetLazySet() {
AtomicDouble at = new AtomicDouble(1.0);
assertBitEquals(1.0, at.get());
for (double x : VALUES) {
at.lazySet(x);
assertBitEquals(x, at.get());
}
}
/** compareAndSet succeeds in changing value if equal to expected else fails */
public void testCompareAndSet() {
double prev = Math.E;
double unused = Math.E + Math.PI;
AtomicDouble at = new AtomicDouble(prev);
for (double x : VALUES) {
assertBitEquals(prev, at.get());
assertFalse(at.compareAndSet(unused, x));
assertBitEquals(prev, at.get());
assertTrue(at.compareAndSet(prev, x));
assertBitEquals(x, at.get());
prev = x;
}
}
/** compareAndSet in one thread enables another waiting for value to succeed */
public void testCompareAndSetInMultipleThreads() throws Exception {
AtomicDouble at = new AtomicDouble(1.0);
Thread t =
newStartedThread(
new CheckedRunnable() {
@Override
@SuppressWarnings("ThreadPriorityCheck") // doing our best to test for races
public void realRun() {
while (!at.compareAndSet(2.0, 3.0)) {
Thread.yield();
}
}
});
assertTrue(at.compareAndSet(1.0, 2.0));
awaitTermination(t);
assertBitEquals(3.0, at.get());
}
/** repeated weakCompareAndSet succeeds in changing value when equal to expected */
public void testWeakCompareAndSet() {
double prev = Math.E;
double unused = Math.E + Math.PI;
AtomicDouble at = new AtomicDouble(prev);
for (double x : VALUES) {
assertBitEquals(prev, at.get());
assertFalse(at.weakCompareAndSet(unused, x));
assertBitEquals(prev, at.get());
while (!at.weakCompareAndSet(prev, x)) {
;
}
assertBitEquals(x, at.get());
prev = x;
}
}
/** getAndSet returns previous value and sets to given value */
public void testGetAndSet() {
double prev = Math.E;
AtomicDouble at = new AtomicDouble(prev);
for (double x : VALUES) {
assertBitEquals(prev, at.getAndSet(x));
prev = x;
}
}
/** getAndAdd returns previous value and adds given value */
public void testGetAndAdd() {
for (double x : VALUES) {
for (double y : VALUES) {
AtomicDouble a = new AtomicDouble(x);
double z = a.getAndAdd(y);
assertBitEquals(x, z);
assertBitEquals(x + y, a.get());
}
}
}
/** addAndGet adds given value to current, and returns current value */
public void testAddAndGet() {
for (double x : VALUES) {
for (double y : VALUES) {
AtomicDouble a = new AtomicDouble(x);
double z = a.addAndGet(y);
assertBitEquals(x + y, z);
assertBitEquals(x + y, a.get());
}
}
}
/** a deserialized serialized atomic holds same value */
public void testSerialization() throws Exception {
AtomicDouble a = new AtomicDouble();
AtomicDouble b = serialClone(a);
assertNotSame(a, b);
a.set(-22.0);
AtomicDouble c = serialClone(a);
assertNotSame(b, c);
assertBitEquals(-22.0, a.get());
assertBitEquals(0.0, b.get());
assertBitEquals(-22.0, c.get());
for (double x : VALUES) {
AtomicDouble d = new AtomicDouble(x);
assertBitEquals(serialClone(d).get(), d.get());
}
}
/** toString returns current value */
public void testToString() {
AtomicDouble at = new AtomicDouble();
assertEquals("0.0", at.toString());
for (double x : VALUES) {
at.set(x);
assertEquals(Double.toString(x), at.toString());
}
}
/** intValue returns current value. */
public void testIntValue() {
AtomicDouble at = new AtomicDouble();
assertEquals(0, at.intValue());
for (double x : VALUES) {
at.set(x);
assertEquals((int) x, at.intValue());
}
}
/** longValue returns current value. */
public void testLongValue() {
AtomicDouble at = new AtomicDouble();
assertEquals(0L, at.longValue());
for (double x : VALUES) {
at.set(x);
assertEquals((long) x, at.longValue());
}
}
/** floatValue returns current value. */
public void testFloatValue() {
AtomicDouble at = new AtomicDouble();
assertEquals(0.0f, at.floatValue());
for (double x : VALUES) {
at.set(x);
assertEquals((float) x, at.floatValue());
}
}
/** doubleValue returns current value. */
public void testDoubleValue() {
AtomicDouble at = new AtomicDouble();
assertThat(at.doubleValue()).isEqualTo(0.0d);
for (double x : VALUES) {
at.set(x);
assertBitEquals(x, at.doubleValue());
}
}
/** compareAndSet treats +0.0 and -0.0 as distinct values */
public void testDistinctZeros() {
AtomicDouble at = new AtomicDouble(+0.0);
assertFalse(at.compareAndSet(-0.0, 7.0));
assertFalse(at.weakCompareAndSet(-0.0, 7.0));
assertBitEquals(+0.0, at.get());
assertTrue(at.compareAndSet(+0.0, -0.0));
assertBitEquals(-0.0, at.get());
assertFalse(at.compareAndSet(+0.0, 7.0));
assertFalse(at.weakCompareAndSet(+0.0, 7.0));
assertBitEquals(-0.0, at.get());
}
}
| AtomicDoubleTest |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authorization/method/PostAuthorizeAuthorizationManagerTests.java | {
"start": 10347,
"end": 10638
} | class ____ implements InterfaceAnnotationsThree {
@PostAuthorize("hasRole('ADMIN')")
public void securedAdmin() {
}
public void securedUser() {
}
@Override
@PostAuthorize("hasRole('ADMIN')")
public void inheritedAnnotations() {
}
}
public static | ClassLevelAnnotations |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/inference/TypeTransformations.java | {
"start": 2014,
"end": 3460
} | class ____ {@link java.sql.Timestamp}/{@link java.sql.Time}/{@link java.sql.Date} if the
* original data type is TIMESTAMP/TIME/DATE.
*/
public static TypeTransformation timeToSqlTypes() {
Map<LogicalTypeRoot, Class<?>> conversions = new HashMap<>();
conversions.put(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE, Timestamp.class);
conversions.put(LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE, Time.class);
conversions.put(LogicalTypeRoot.DATE, Date.class);
return new ConversionClassTransformation(conversions);
}
/**
* Returns a type transformation that transforms LEGACY('RAW', ...) type to the RAW(..., ?)
* type.
*/
public static TypeTransformation legacyRawToTypeInfoRaw() {
return LegacyRawTypeTransformation.INSTANCE;
}
/** Returns a type transformation that transforms LEGACY(...) type to a non-legacy type. */
public static TypeTransformation legacyToNonLegacy() {
return LegacyToNonLegacyTransformation.INSTANCE;
}
/**
* Returns a type transformation that transforms data type to nullable data type but keeps other
* information unchanged.
*/
public static TypeTransformation toNullable() {
return DataType::nullable;
}
// --------------------------------------------------------------------------------------------
private TypeTransformations() {
// no instantiation
}
}
| is |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/jwt/JwtAuthenticatorAccessTokenTypeTests.java | {
"start": 541,
"end": 1966
} | class ____ extends JwtAuthenticatorTests {
@Override
protected JwtRealmSettings.TokenType getTokenType() {
return JwtRealmSettings.TokenType.ACCESS_TOKEN;
}
public void testSubjectIsRequired() throws ParseException {
final IllegalArgumentException e = doTestSubjectIsRequired(buildJwtAuthenticator());
if (fallbackSub != null) {
assertThat(e.getMessage(), containsString("missing required string claim [" + fallbackSub + " (fallback of sub)]"));
}
}
public void testAccessTokenTypeMandatesAllowedSubjects() {
allowedSubject = null;
allowedSubjectPattern = null;
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> buildJwtAuthenticator());
assertThat(
e.getCause().getMessage(),
containsString(
"One of either ["
+ RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECTS)
+ "] or ["
+ RealmSettings.getFullSettingKey(realmName, JwtRealmSettings.ALLOWED_SUBJECT_PATTERNS)
+ "] must be specified and not be empty."
)
);
}
public void testInvalidIssuerIsCheckedBeforeAlgorithm() throws ParseException {
doTestInvalidIssuerIsCheckedBeforeAlgorithm(buildJwtAuthenticator());
}
}
| JwtAuthenticatorAccessTokenTypeTests |
java | apache__logging-log4j2 | log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/junit/AbstractExternalFileCleaner.java | {
"start": 1430,
"end": 6303
} | class ____ extends ExternalResource {
protected static final String CLEANER_MARKER = "CLEANER";
private static final int SLEEP_RETRY_MILLIS = 200;
private final boolean cleanAfter;
private final boolean cleanBefore;
private final Set<Path> files;
private final int maxTries;
private final PrintStream printStream;
public AbstractExternalFileCleaner(
final boolean before,
final boolean after,
final int maxTries,
final PrintStream logger,
final File... files) {
this.cleanBefore = before;
this.cleanAfter = after;
this.maxTries = maxTries;
this.files = new HashSet<>(files.length);
this.printStream = logger;
for (final File file : files) {
this.files.add(file.toPath());
}
}
public AbstractExternalFileCleaner(
final boolean before,
final boolean after,
final int maxTries,
final PrintStream logger,
final Path... files) {
this.cleanBefore = before;
this.cleanAfter = after;
this.maxTries = maxTries;
this.printStream = logger;
this.files = new HashSet<>(Arrays.asList(files));
}
public AbstractExternalFileCleaner(
final boolean before,
final boolean after,
final int maxTries,
final PrintStream logger,
final String... fileNames) {
this.cleanBefore = before;
this.cleanAfter = after;
this.maxTries = maxTries;
this.printStream = logger;
this.files = new HashSet<>(fileNames.length);
for (final String fileName : fileNames) {
this.files.add(Paths.get(fileName));
}
}
@Override
protected void after() {
if (cleanAfter()) {
this.clean();
}
}
@Override
protected void before() {
if (cleanBefore()) {
this.clean();
}
}
protected void clean() {
final Map<Path, IOException> failures = new HashMap<>();
// Clean and gather failures
for (final Path path : getPaths()) {
if (Files.exists(path)) {
for (int i = 0; i < getMaxTries(); i++) {
try {
if (clean(path, i)) {
if (failures.containsKey(path)) {
failures.remove(path);
}
break;
}
} catch (final IOException e) {
println(CLEANER_MARKER + ": Caught exception cleaning: " + this);
printStackTrace(e);
// We will try again.
failures.put(path, e);
}
try {
Thread.sleep(SLEEP_RETRY_MILLIS);
} catch (final InterruptedException ignored) {
// ignore
}
}
}
}
// Fail on failures
if (failures.size() > 0) {
final StringBuilder sb = new StringBuilder();
boolean first = true;
for (final Map.Entry<Path, IOException> failure : failures.entrySet()) {
failure.getValue().printStackTrace();
if (!first) {
sb.append(", ");
}
sb.append(failure.getKey()).append(" failed with ").append(failure.getValue());
first = false;
}
Assert.fail(sb.toString());
}
}
protected abstract boolean clean(Path path, int tryIndex) throws IOException;
public boolean cleanAfter() {
return cleanAfter;
}
public boolean cleanBefore() {
return cleanBefore;
}
public int getMaxTries() {
return maxTries;
}
public Set<Path> getPaths() {
return files;
}
public PrintStream getPrintStream() {
return printStream;
}
protected void printf(final String format, final Object... args) {
if (printStream != null) {
printStream.printf(format, args);
}
}
protected void println(final String msg) {
if (printStream != null) {
printStream.println(msg);
}
}
@SuppressFBWarnings("INFORMATION_EXPOSURE_THROUGH_AN_ERROR_MESSAGE")
protected void printStackTrace(final Throwable t) {
if (printStream != null) {
t.printStackTrace(printStream);
}
}
@Override
public String toString() {
return getClass().getSimpleName() + " [files=" + files + ", cleanAfter=" + cleanAfter + ", cleanBefore="
+ cleanBefore + "]";
}
}
| AbstractExternalFileCleaner |
java | apache__spark | sql/core/src/test/java/test/org/apache/spark/sql/connector/catalog/functions/JavaStrLen.java | {
"start": 1370,
"end": 2102
} | class ____ implements UnboundFunction {
private final BoundFunction fn;
public JavaStrLen(BoundFunction fn) {
this.fn = fn;
}
@Override
public String name() {
return "strlen";
}
@Override
public BoundFunction bind(StructType inputType) {
if (inputType.fields().length != 1) {
throw new UnsupportedOperationException("Expect exactly one argument");
}
if (inputType.fields()[0].dataType() instanceof StringType) {
return fn;
}
throw new UnsupportedOperationException("Expect StringType");
}
@Override
public String description() {
return "strlen: returns the length of the input string\n" +
" strlen(string) -> int";
}
private abstract static | JavaStrLen |
java | google__dagger | javatests/artifacts/hilt-android/simple/app/src/sharedTest/java/dagger/hilt/android/simple/AliasOfMultipleScopesTest.java | {
"start": 3625,
"end": 3917
} | class ____ extends Hilt_AliasOfMultipleScopesTest_TestActivity {
@Inject Provider<UnscopedDep> unscopedDep;
@Inject Provider<ActivityScopedDep> activityScopedDep;
@Inject Provider<AliasScopedDep> aliasScopedDep;
}
@EntryPoint
@InstallIn(SingletonComponent.class)
| TestActivity |
java | quarkusio__quarkus | extensions/funqy/funqy-amazon-lambda/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/FunqyLambdaBuildStep.java | {
"start": 1115,
"end": 1246
} | class ____ {
public static final String FUNQY_AMAZON_LAMBDA = "funqy-amazon-lambda";
public static final | FunqyLambdaBuildStep |
java | google__dagger | javatests/dagger/functional/producers/optional/OptionalBindingComponentsAbsentTest.java | {
"start": 1004,
"end": 2012
} | class ____ {
private AbsentOptionalBindingComponent absent;
@Before
public void setUp() {
absent = DaggerOptionalBindingComponents_AbsentOptionalBindingComponent.create();
}
@Test
public void optional() throws Exception {
assertThat(absent.optionalInstance().get()).isAbsent();
}
@Test
public void optionalProducer() throws Exception {
assertThat(absent.optionalProducer().get()).isAbsent();
}
@Test
public void optionalProduced() throws Exception {
assertThat(absent.optionalProduced().get()).isAbsent();
}
@Test
public void qualifiedOptional() throws Exception {
assertThat(absent.qualifiedOptionalInstance().get()).isAbsent();
}
@Test
public void qualifiedOptionalProducer() throws Exception {
assertThat(absent.qualifiedOptionalProducer().get()).isAbsent();
}
@Test
public void qualifiedOptionalProduced() throws Exception {
assertThat(absent.qualifiedOptionalProduced().get()).isAbsent();
}
}
| OptionalBindingComponentsAbsentTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/converters/delayed/DelayedCdiHostedConverterTest.java | {
"start": 1451,
"end": 2882
} | class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
@ExtendWith(MonitorBean.Resetter.class )
@CdiContainer(beanClasses = {MonitorBean.class, ConverterBean.class})
@ServiceRegistry(
settings = @Setting(name=DELAY_CDI_ACCESS, value = "true"),
resolvableSettings = @ServiceRegistry.ResolvableSetting(
settingName = CDI_BEAN_MANAGER,
resolver = CdiContainerLinker.StandardResolver.class
)
)
@DomainModel(annotatedClasses = TheEntity.class)
@SessionFactory
public void testIt(CdiContainerScope containerScope, SessionFactoryScope factoryScope) {
// The CDI bean should _not_ have been built immediately...
assertFalse( MonitorBean.wasInstantiated() );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 0, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
session.persist( new TheEntity( 1, "me", 5 ) );
} );
// The CDI bean should have been built on first use
assertTrue( MonitorBean.wasInstantiated() );
assertEquals( 0, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
factoryScope.inTransaction( (session) -> {
TheEntity it = session.find( TheEntity.class, 1 );
assertNotNull( it );
} );
assertEquals( 1, MonitorBean.currentFromDbCount() );
assertEquals( 1, MonitorBean.currentToDbCount() );
}
}
| DelayedCdiHostedConverterTest |
java | playframework__playframework | documentation/manual/working/javaGuide/main/upload/code/JavaFileUpload.java | {
"start": 1572,
"end": 5336
} | class ____
extends BodyParser.DelegatingMultipartFormDataBodyParser<File> {
@Inject
public MultipartFormDataWithFileBodyParser(
Materializer materializer,
play.api.http.HttpConfiguration config,
HttpErrorHandler errorHandler) {
super(
materializer,
config.parser().maxMemoryBuffer(), // Small buffer used for parsing the body
config.parser().maxDiskBuffer(), // Maximum allowed length of the request body
config.parser().allowEmptyFiles(),
errorHandler);
}
/** Creates a file part handler that uses a custom accumulator. */
@Override
public Function<Multipart.FileInfo, Accumulator<ByteString, FilePart<File>>>
createFilePartHandler() {
return (Multipart.FileInfo fileInfo) -> {
final String filename = fileInfo.fileName();
final String partname = fileInfo.partName();
final String contentType = fileInfo.contentType().getOrElse(null);
final File file = generateTempFile();
final String dispositionType = fileInfo.dispositionType();
final Sink<ByteString, CompletionStage<IOResult>> sink = FileIO.toPath(file.toPath());
return Accumulator.fromSink(
sink.mapMaterializedValue(
completionStage ->
completionStage.thenApplyAsync(
results ->
new Http.MultipartFormData.FilePart<>(
partname,
filename,
contentType,
file,
results.getCount(),
dispositionType))));
};
}
/** Generates a temp file directly without going through TemporaryFile. */
private File generateTempFile() {
try {
final Path path = Files.createTempFile("multipartBody", "tempFile");
return path.toFile();
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}
// #customfileparthandler
@Test
public void testCustomMultipart() throws IOException {
play.libs.Files.TemporaryFileCreator tfc = play.libs.Files.singletonTemporaryFileCreator();
Path tmpFile = Files.createTempFile("temp", "txt");
Files.write(tmpFile, "foo".getBytes());
Source<ByteString, ?> source = FileIO.fromPath(tmpFile);
Http.MultipartFormData.FilePart<Source<ByteString, ?>> dp =
new Http.MultipartFormData.FilePart<>(
"name", "filename", "text/plain", source, Files.size(tmpFile));
assertThat(
contentAsString(
call(
new javaguide.testhelpers.MockJavaAction(
instanceOf(JavaHandlerComponents.class)) {
@BodyParser.Of(MultipartFormDataWithFileBodyParser.class)
public Result uploadCustomMultiPart(Http.Request request) throws Exception {
final Http.MultipartFormData<File> formData =
request.body().asMultipartFormData();
final Http.MultipartFormData.FilePart<File> filePart =
formData.getFile("name");
final File file = filePart.getRef();
final long size = filePart.getFileSize();
Files.deleteIfExists(file.toPath());
return ok("Got: file size = " + size + "");
}
},
fakeRequest("POST", "/").bodyRaw(Collections.singletonList(dp), tfc, mat),
mat)))
.isEqualTo("Got: file size = 3");
}
}
| MultipartFormDataWithFileBodyParser |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/ast/tree/predicate/FilterPredicate.java | {
"start": 744,
"end": 2597
} | class ____ implements Predicate {
private final List<FilterFragmentPredicate> fragments = new ArrayList<>();
// private List<FilterJdbcParameter> parameters;
public FilterPredicate() {
}
public void applyFragment(FilterFragmentPredicate predicate) {
fragments.add( predicate );
}
public void applyFragment(String processedFragment, Filter filter, List<String> parameterNames) {
applyFragment( new FilterFragmentPredicate( processedFragment, filter, parameterNames ) );
}
// public void applyParameter(FilterJdbcParameter parameter) {
// if ( parameters == null ) {
// parameters = new ArrayList<>();
// }
// parameters.add( parameter );
// }
public List<FilterFragmentPredicate> getFragments() {
return fragments;
}
// public List<FilterJdbcParameter> getParameters() {
// return parameters;
// }
@Override
public boolean isEmpty() {
return fragments.isEmpty();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitFilterPredicate( this );
}
@Override
public JdbcMappingContainer getExpressionType() {
return null;
}
private static List<FilterFragmentParameter> fragmentParameters(Filter filter, List<String> parameterNames) {
if ( CollectionHelper.isEmpty( parameterNames ) ) {
return null;
}
else {
final int parameterCount = parameterNames.size();
final List<FilterFragmentParameter> parameters = arrayList( parameterCount );
for ( int i = 0; i < parameterCount; i++ ) {
final String paramName = parameterNames.get( i );
final Object paramValue = filter.getParameterValue( paramName );
final var jdbcMapping = filter.getFilterDefinition().getParameterJdbcMapping( paramName );
parameters.add( new FilterFragmentParameter( filter.getName(), paramName, jdbcMapping, paramValue ) );
}
return parameters;
}
}
public static | FilterPredicate |
java | google__guava | android/guava/src/com/google/common/math/PairedStats.java | {
"start": 1544,
"end": 12880
} | class ____ implements Serializable {
private final Stats xStats;
private final Stats yStats;
private final double sumOfProductsOfDeltas;
/**
* Internal constructor. Users should use {@link PairedStatsAccumulator#snapshot}.
*
* <p>To ensure that the created instance obeys its contract, the parameters should satisfy the
* following constraints. This is the callers responsibility and is not enforced here.
*
* <ul>
* <li>Both {@code xStats} and {@code yStats} must have the same {@code count}.
* <li>If that {@code count} is 1, {@code sumOfProductsOfDeltas} must be exactly 0.0.
* <li>If that {@code count} is more than 1, {@code sumOfProductsOfDeltas} must be finite.
* </ul>
*/
PairedStats(Stats xStats, Stats yStats, double sumOfProductsOfDeltas) {
this.xStats = xStats;
this.yStats = yStats;
this.sumOfProductsOfDeltas = sumOfProductsOfDeltas;
}
/** Returns the number of pairs in the dataset. */
public long count() {
return xStats.count();
}
/** Returns the statistics on the {@code x} values alone. */
public Stats xStats() {
return xStats;
}
/** Returns the statistics on the {@code y} values alone. */
public Stats yStats() {
return yStats;
}
/**
* Returns the population covariance of the values. The count must be non-zero.
*
* <p>This is guaranteed to return zero if the dataset contains a single pair of finite values. It
* is not guaranteed to return zero when the dataset consists of the same pair of values multiple
* times, due to numerical errors.
*
* <h3>Non-finite values</h3>
*
* <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, {@link
* Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.
*
* @throws IllegalStateException if the dataset is empty
*/
public double populationCovariance() {
checkState(count() != 0);
return sumOfProductsOfDeltas / count();
}
/**
* Returns the sample covariance of the values. The count must be greater than one.
*
* <p>This is not guaranteed to return zero when the dataset consists of the same pair of values
* multiple times, due to numerical errors.
*
* <h3>Non-finite values</h3>
*
* <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, {@link
* Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.
*
* @throws IllegalStateException if the dataset is empty or contains a single pair of values
*/
public double sampleCovariance() {
checkState(count() > 1);
return sumOfProductsOfDeltas / (count() - 1);
}
/**
* Returns the <a href="http://mathworld.wolfram.com/CorrelationCoefficient.html">Pearson's or
* product-moment correlation coefficient</a> of the values. The count must greater than one, and
* the {@code x} and {@code y} values must both have non-zero population variance (i.e. {@code
* xStats().populationVariance() > 0.0 && yStats().populationVariance() > 0.0}). The result is not
* guaranteed to be exactly +/-1 even when the data are perfectly (anti-)correlated, due to
* numerical errors. However, it is guaranteed to be in the inclusive range [-1, +1].
*
* <h3>Non-finite values</h3>
*
* <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, {@link
* Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link Double#NaN}.
*
* @throws IllegalStateException if the dataset is empty or contains a single pair of values, or
* either the {@code x} and {@code y} dataset has zero population variance
*/
public double pearsonsCorrelationCoefficient() {
checkState(count() > 1);
if (isNaN(sumOfProductsOfDeltas)) {
return NaN;
}
double xSumOfSquaresOfDeltas = xStats().sumOfSquaresOfDeltas();
double ySumOfSquaresOfDeltas = yStats().sumOfSquaresOfDeltas();
checkState(xSumOfSquaresOfDeltas > 0.0);
checkState(ySumOfSquaresOfDeltas > 0.0);
// The product of two positive numbers can be zero if the multiplication underflowed. We
// force a positive value by effectively rounding up to MIN_VALUE.
double productOfSumsOfSquaresOfDeltas =
ensurePositive(xSumOfSquaresOfDeltas * ySumOfSquaresOfDeltas);
return ensureInUnitRange(sumOfProductsOfDeltas / Math.sqrt(productOfSumsOfSquaresOfDeltas));
}
/**
* Returns a linear transformation giving the best fit to the data according to <a
* href="http://mathworld.wolfram.com/LeastSquaresFitting.html">Ordinary Least Squares linear
* regression</a> of {@code y} as a function of {@code x}. The count must be greater than one, and
* either the {@code x} or {@code y} data must have a non-zero population variance (i.e. {@code
* xStats().populationVariance() > 0.0 || yStats().populationVariance() > 0.0}). The result is
* guaranteed to be horizontal if there is variance in the {@code x} data but not the {@code y}
* data, and vertical if there is variance in the {@code y} data but not the {@code x} data.
*
* <p>This fit minimizes the root-mean-square error in {@code y} as a function of {@code x}. This
* error is defined as the square root of the mean of the squares of the differences between the
* actual {@code y} values of the data and the values predicted by the fit for the {@code x}
* values (i.e. it is the square root of the mean of the squares of the vertical distances between
* the data points and the best fit line). For this fit, this error is a fraction {@code sqrt(1 -
* R*R)} of the population standard deviation of {@code y}, where {@code R} is the Pearson's
* correlation coefficient (as given by {@link #pearsonsCorrelationCoefficient()}).
*
* <p>The corresponding root-mean-square error in {@code x} as a function of {@code y} is a
* fraction {@code sqrt(1/(R*R) - 1)} of the population standard deviation of {@code x}. This fit
* does not normally minimize that error: to do that, you should swap the roles of {@code x} and
* {@code y}.
*
* <h3>Non-finite values</h3>
*
* <p>If the dataset contains any non-finite values ({@link Double#POSITIVE_INFINITY}, {@link
* Double#NEGATIVE_INFINITY}, or {@link Double#NaN}) then the result is {@link
* LinearTransformation#forNaN()}.
*
* @throws IllegalStateException if the dataset is empty or contains a single pair of values, or
* both the {@code x} and {@code y} dataset must have zero population variance
*/
public LinearTransformation leastSquaresFit() {
checkState(count() > 1);
if (isNaN(sumOfProductsOfDeltas)) {
return LinearTransformation.forNaN();
}
double xSumOfSquaresOfDeltas = xStats.sumOfSquaresOfDeltas();
if (xSumOfSquaresOfDeltas > 0.0) {
if (yStats.sumOfSquaresOfDeltas() > 0.0) {
return LinearTransformation.mapping(xStats.mean(), yStats.mean())
.withSlope(sumOfProductsOfDeltas / xSumOfSquaresOfDeltas);
} else {
return LinearTransformation.horizontal(yStats.mean());
}
} else {
checkState(yStats.sumOfSquaresOfDeltas() > 0.0);
return LinearTransformation.vertical(xStats.mean());
}
}
/**
* {@inheritDoc}
*
* <p><b>Note:</b> This tests exact equality of the calculated statistics, including the floating
* point values. Two instances are guaranteed to be considered equal if one is copied from the
* other using {@code second = new PairedStatsAccumulator().addAll(first).snapshot()}, if both
* were obtained by calling {@code snapshot()} on the same {@link PairedStatsAccumulator} without
* adding any values in between the two calls, or if one is obtained from the other after
* round-tripping through java serialization. However, floating point rounding errors mean that it
* may be false for some instances where the statistics are mathematically equal, including
* instances constructed from the same values in a different order... or (in the general case)
* even in the same order. (It is guaranteed to return true for instances constructed from the
* same values in the same order if {@code strictfp} is in effect, or if the system architecture
* guarantees {@code strictfp}-like semantics.)
*/
@Override
public boolean equals(@Nullable Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
PairedStats other = (PairedStats) obj;
return xStats.equals(other.xStats)
&& yStats.equals(other.yStats)
&& doubleToLongBits(sumOfProductsOfDeltas) == doubleToLongBits(other.sumOfProductsOfDeltas);
}
/**
* {@inheritDoc}
*
* <p><b>Note:</b> This hash code is consistent with exact equality of the calculated statistics,
* including the floating point values. See the note on {@link #equals} for details.
*/
@Override
public int hashCode() {
return Objects.hash(xStats, yStats, sumOfProductsOfDeltas);
}
@Override
public String toString() {
if (count() > 0) {
return MoreObjects.toStringHelper(this)
.add("xStats", xStats)
.add("yStats", yStats)
.add("populationCovariance", populationCovariance())
.toString();
} else {
return MoreObjects.toStringHelper(this)
.add("xStats", xStats)
.add("yStats", yStats)
.toString();
}
}
double sumOfProductsOfDeltas() {
return sumOfProductsOfDeltas;
}
private static double ensurePositive(double value) {
if (value > 0.0) {
return value;
} else {
return Double.MIN_VALUE;
}
}
private static double ensureInUnitRange(double value) {
if (value >= 1.0) {
return 1.0;
}
if (value <= -1.0) {
return -1.0;
}
return value;
}
// Serialization helpers
/** The size of byte array representation in bytes. */
private static final int BYTES = Stats.BYTES * 2 + Double.SIZE / Byte.SIZE;
/**
* Gets a byte array representation of this instance.
*
* <p><b>Note:</b> No guarantees are made regarding stability of the representation between
* versions.
*/
public byte[] toByteArray() {
ByteBuffer buffer = ByteBuffer.allocate(BYTES).order(ByteOrder.LITTLE_ENDIAN);
xStats.writeTo(buffer);
yStats.writeTo(buffer);
buffer.putDouble(sumOfProductsOfDeltas);
return buffer.array();
}
/**
* Creates a {@link PairedStats} instance from the given byte representation which was obtained by
* {@link #toByteArray}.
*
* <p><b>Note:</b> No guarantees are made regarding stability of the representation between
* versions.
*/
public static PairedStats fromByteArray(byte[] byteArray) {
checkNotNull(byteArray);
checkArgument(
byteArray.length == BYTES,
"Expected PairedStats.BYTES = %s, got %s",
BYTES,
byteArray.length);
ByteBuffer buffer = ByteBuffer.wrap(byteArray).order(ByteOrder.LITTLE_ENDIAN);
Stats xStats = Stats.readFrom(buffer);
Stats yStats = Stats.readFrom(buffer);
double sumOfProductsOfDeltas = buffer.getDouble();
return new PairedStats(xStats, yStats, sumOfProductsOfDeltas);
}
private static final long serialVersionUID = 0;
}
| PairedStats |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/ValidateTest.java | {
"start": 29382,
"end": 29981
} | class ____ {
@Test
void shouldNotThrowExceptionWhenValueIsInstanceOfClass() {
Validate.isInstanceOf(String.class, "hi");
}
@Test
void shouldThrowIllegalArgumentExceptionWithDefaultMessageWhenValueIsNotInstanceOfClass() {
final IllegalArgumentException ex = assertIllegalArgumentException(() -> Validate.isInstanceOf(List.class, "hi"));
assertEquals("Expected type: java.util.List, actual: java.lang.String", ex.getMessage());
}
}
}
@Nested
final | WithoutMessage |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/foreignkeys/sorting/A.java | {
"start": 384,
"end": 538
} | class ____ {
@Id
@GeneratedValue
private int id;
@ManyToOne(cascade = CascadeType.PERSIST)
B b;
public A() {
}
public A(B b) {
this.b = b;
}
}
| A |
java | apache__camel | core/camel-util/src/main/java/org/apache/camel/util/CastUtils.java | {
"start": 1230,
"end": 3673
} | class ____ {
private CastUtils() {
//utility class, never constructed
}
public static <T, U> Map<T, U> cast(Map<?, ?> p) {
return (Map<T, U>) p;
}
public static <T, U> Map<T, U> cast(Map<?, ?> p, Class<T> t, Class<U> u) {
return (Map<T, U>) p;
}
public static <T> Collection<T> cast(Collection<?> p) {
return (Collection<T>) p;
}
public static <T> Collection<T> cast(Collection<?> p, Class<T> cls) {
return (Collection<T>) p;
}
public static <T> List<T> cast(List<?> p) {
return (List<T>) p;
}
public static <T> List<T> cast(List<?> p, Class<T> cls) {
return (List<T>) p;
}
public static <T> Iterator<T> cast(Iterator<?> p) {
return (Iterator<T>) p;
}
public static <T> Iterator<T> cast(Iterator<?> p, Class<T> cls) {
return (Iterator<T>) p;
}
public static <T> Set<T> cast(Set<?> p) {
return (Set<T>) p;
}
public static <T> Set<T> cast(Set<?> p, Class<T> cls) {
return (Set<T>) p;
}
public static <T> Queue<T> cast(Queue<?> p) {
return (Queue<T>) p;
}
public static <T> Queue<T> cast(Queue<?> p, Class<T> cls) {
return (Queue<T>) p;
}
public static <T> Deque<T> cast(Deque<?> p) {
return (Deque<T>) p;
}
public static <T> Deque<T> cast(Deque<?> p, Class<T> cls) {
return (Deque<T>) p;
}
public static <T, U> Hashtable<T, U> cast(Hashtable<?, ?> p) {
return (Hashtable<T, U>) p;
}
public static <T, U> Hashtable<T, U> cast(Hashtable<?, ?> p, Class<T> pc, Class<U> uc) {
return (Hashtable<T, U>) p;
}
public static <T, U> Map.Entry<T, U> cast(Map.Entry<?, ?> p) {
return (Map.Entry<T, U>) p;
}
public static <T, U> Map.Entry<T, U> cast(Map.Entry<?, ?> p, Class<T> pc, Class<U> uc) {
return (Map.Entry<T, U>) p;
}
public static <T> Enumeration<T> cast(Enumeration<?> p) {
return (Enumeration<T>) p;
}
public static <T> NamingEnumeration<T> cast(NamingEnumeration<?> p) {
return (NamingEnumeration<T>) p;
}
public static <T> Class<T> cast(Class<?> p) {
return (Class<T>) p;
}
public static <T> Class<T> cast(Class<?> p, Class<T> cls) {
return (Class<T>) p;
}
public static <T> Future<T> cast(Future<?> p) {
return (Future<T>) p;
}
}
| CastUtils |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/CollectionDeserTest.java | {
"start": 861,
"end": 921
} | class ____ extends LinkedList<String> { }
static | CustomList |
java | redisson__redisson | redisson/src/test/java/org/redisson/executor/RedissonScheduledExecutorServiceTest.java | {
"start": 701,
"end": 1265
} | class ____ extends RedisDockerTest {
private static RedissonNode node;
@BeforeEach
public void before() throws IOException, InterruptedException {
Config config = createConfig();
RedissonNodeConfig nodeConfig = new RedissonNodeConfig(config);
nodeConfig.setExecutorServiceWorkers(Collections.singletonMap("test", 5));
node = RedissonNode.create(nodeConfig);
node.start();
}
@AfterEach
public void after() {
node.shutdown();
}
public static | RedissonScheduledExecutorServiceTest |
java | apache__kafka | clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/consumer/PlaintextConsumerPollTest.java | {
"start": 4555,
"end": 18865
} | class ____ {
public static final int BROKER_COUNT = 3;
public static final double EPSILON = 0.1;
public static final long GROUP_MAX_SESSION_TIMEOUT_MS = 60000L;
private final ClusterInstance cluster;
private final String topic = "topic";
private final TopicPartition tp = new TopicPartition(topic, 0);
private final TopicPartition tp2 = new TopicPartition(topic, 1);
public PlaintextConsumerPollTest(ClusterInstance cluster) {
this.cluster = cluster;
}
@BeforeEach
public void setup() throws InterruptedException {
cluster.createTopic(topic, 2, (short) BROKER_COUNT);
}
@ClusterTest
public void testClassicConsumerMaxPollRecords() throws InterruptedException {
testMaxPollRecords(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerMaxPollRecords() throws InterruptedException {
testMaxPollRecords(GroupProtocol.CONSUMER);
}
private void testMaxPollRecords(GroupProtocol groupProtocol) throws InterruptedException {
var maxPollRecords = 100;
var numRecords = 5000;
Map<String, Object> config = Map.of(
MAX_POLL_RECORDS_CONFIG, maxPollRecords,
GROUP_PROTOCOL_CONFIG, groupProtocol.name().toLowerCase(Locale.ROOT)
);
var startingTimestamp = System.currentTimeMillis();
sendRecords(cluster, tp, numRecords, startingTimestamp);
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
consumer.assign(List.of(tp));
consumeAndVerifyRecords(
consumer,
tp,
numRecords,
maxPollRecords,
0,
0,
startingTimestamp,
-1
);
}
}
@ClusterTest
public void testClassicConsumerMaxPollIntervalMs() throws InterruptedException {
testMaxPollIntervalMs(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 1000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CLASSIC.name().toLowerCase(Locale.ROOT),
HEARTBEAT_INTERVAL_MS_CONFIG, 500,
SESSION_TIMEOUT_MS_CONFIG, 2000
));
}
@ClusterTest
public void testAsyncConsumerMaxPollIntervalMs() throws InterruptedException {
testMaxPollIntervalMs(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 1000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name().toLowerCase(Locale.ROOT)
));
}
private void testMaxPollIntervalMs(Map<String, Object> config) throws InterruptedException {
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
var listener = new TestConsumerReassignmentListener();
consumer.subscribe(List.of(topic), listener);
// rebalance to get the initial assignment
awaitRebalance(consumer, listener);
assertEquals(1, listener.callsToAssigned);
assertEquals(0, listener.callsToRevoked);
// after we extend longer than max.poll a rebalance should be triggered
// NOTE we need to have a relatively much larger value than max.poll to let heartbeat expired for sure
TimeUnit.MILLISECONDS.sleep(3000);
awaitRebalance(consumer, listener);
assertEquals(2, listener.callsToAssigned);
assertEquals(1, listener.callsToRevoked);
}
}
@ClusterTest
public void testClassicConsumerMaxPollIntervalMsDelayInRevocation() throws InterruptedException {
testMaxPollIntervalMsDelayInRevocation(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 5000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CLASSIC.name().toLowerCase(Locale.ROOT),
HEARTBEAT_INTERVAL_MS_CONFIG, 500,
SESSION_TIMEOUT_MS_CONFIG, 1000,
ENABLE_AUTO_COMMIT_CONFIG, false
));
}
@ClusterTest
public void testAsyncConsumerMaxPollIntervalMsDelayInRevocation() throws InterruptedException {
testMaxPollIntervalMsDelayInRevocation(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 5000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name().toLowerCase(Locale.ROOT),
ENABLE_AUTO_COMMIT_CONFIG, false
));
}
private void testMaxPollIntervalMsDelayInRevocation(Map<String, Object> config) throws InterruptedException {
var commitCompleted = new AtomicBoolean(false);
var committedPosition = new AtomicLong(-1);
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
var listener = new TestConsumerReassignmentListener() {
@Override
public void onPartitionsLost(Collection<TopicPartition> partitions) {
// no op
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
if (!partitions.isEmpty() && partitions.contains(tp)) {
// on the second rebalance (after we have joined the group initially), sleep longer
// than session timeout and then try a commit. We should still be in the group,
// so the commit should succeed
Utils.sleep(1500);
committedPosition.set(consumer.position(tp));
var offsets = Map.of(tp, new OffsetAndMetadata(committedPosition.get()));
consumer.commitSync(offsets);
commitCompleted.set(true);
}
super.onPartitionsRevoked(partitions);
}
};
consumer.subscribe(List.of(topic), listener);
// rebalance to get the initial assignment
awaitRebalance(consumer, listener);
// force a rebalance to trigger an invocation of the revocation callback while in the group
consumer.subscribe(List.of("otherTopic"), listener);
awaitRebalance(consumer, listener);
assertEquals(0, committedPosition.get());
assertTrue(commitCompleted.get());
}
}
@ClusterTest
public void testClassicConsumerMaxPollIntervalMsDelayInAssignment() throws InterruptedException {
testMaxPollIntervalMsDelayInAssignment(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 5000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CLASSIC.name().toLowerCase(Locale.ROOT),
HEARTBEAT_INTERVAL_MS_CONFIG, 500,
SESSION_TIMEOUT_MS_CONFIG, 1000,
ENABLE_AUTO_COMMIT_CONFIG, false
));
}
@ClusterTest
public void testAsyncConsumerMaxPollIntervalMsDelayInAssignment() throws InterruptedException {
testMaxPollIntervalMsDelayInAssignment(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 5000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name().toLowerCase(Locale.ROOT),
ENABLE_AUTO_COMMIT_CONFIG, false
));
}
private void testMaxPollIntervalMsDelayInAssignment(Map<String, Object> config) throws InterruptedException {
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
var listener = new TestConsumerReassignmentListener() {
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
// sleep longer than the session timeout, we should still be in the group after invocation
Utils.sleep(1500);
super.onPartitionsAssigned(partitions);
}
};
consumer.subscribe(List.of(topic), listener);
// rebalance to get the initial assignment
awaitRebalance(consumer, listener);
// We should still be in the group after this invocation
ensureNoRebalance(consumer, listener);
}
}
@ClusterTest
public void testClassicConsumerMaxPollIntervalMsShorterThanPollTimeout() throws InterruptedException {
testMaxPollIntervalMsShorterThanPollTimeout(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 1000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CLASSIC.name().toLowerCase(Locale.ROOT),
HEARTBEAT_INTERVAL_MS_CONFIG, 500
));
}
@ClusterTest
public void testAsyncConsumerMaxPollIntervalMsShorterThanPollTimeout() throws InterruptedException {
testMaxPollIntervalMsShorterThanPollTimeout(Map.of(
MAX_POLL_INTERVAL_MS_CONFIG, 1000,
GROUP_PROTOCOL_CONFIG, GroupProtocol.CONSUMER.name().toLowerCase(Locale.ROOT)
));
}
private void testMaxPollIntervalMsShorterThanPollTimeout(Map<String, Object> config) throws InterruptedException {
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
var listener = new TestConsumerReassignmentListener();
consumer.subscribe(List.of(topic), listener);
// rebalance to get the initial assignment
awaitRebalance(consumer, listener);
var callsToAssignedAfterFirstRebalance = listener.callsToAssigned;
consumer.poll(Duration.ofMillis(2000));
// If the poll above times out, it would trigger a rebalance.
// Leave some time for the rebalance to happen and check for the rebalance event.
consumer.poll(Duration.ofMillis(500));
consumer.poll(Duration.ofMillis(500));
assertEquals(callsToAssignedAfterFirstRebalance, listener.callsToAssigned);
}
}
@ClusterTest
public void testClassicConsumerPerPartitionLeadWithMaxPollRecords() throws InterruptedException {
testPerPartitionLeadWithMaxPollRecords(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerPerPartitionLeadWithMaxPollRecords() throws InterruptedException {
testPerPartitionLeadWithMaxPollRecords(GroupProtocol.CONSUMER);
}
private void testPerPartitionLeadWithMaxPollRecords(GroupProtocol groupProtocol) throws InterruptedException {
int numMessages = 1000;
int maxPollRecords = 10;
Map<String, Object> config = Map.of(
GROUP_PROTOCOL_CONFIG, groupProtocol.name().toLowerCase(Locale.ROOT),
GROUP_ID_CONFIG, "testPerPartitionLeadWithMaxPollRecords",
CLIENT_ID_CONFIG, "testPerPartitionLeadWithMaxPollRecords",
MAX_POLL_RECORDS_CONFIG, maxPollRecords
);
sendRecords(cluster, tp, numMessages);
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
consumer.assign(List.of(tp));
awaitNonEmptyRecords(consumer, tp, 100);
var tags = Map.of(
"client-id", "testPerPartitionLeadWithMaxPollRecords",
"topic", tp.topic(),
"partition", String.valueOf(tp.partition())
);
var lead = consumer.metrics()
.get(new MetricName("records-lead", "consumer-fetch-manager-metrics", "", tags));
assertEquals(maxPollRecords, (Double) lead.metricValue(), "The lead should be " + maxPollRecords);
}
}
@ClusterTest
public void testClassicConsumerPerPartitionLagWithMaxPollRecords() throws InterruptedException {
testPerPartitionLagWithMaxPollRecords(GroupProtocol.CLASSIC);
}
@ClusterTest
public void testAsyncConsumerPerPartitionLagWithMaxPollRecords() throws InterruptedException {
testPerPartitionLagWithMaxPollRecords(GroupProtocol.CONSUMER);
}
private void testPerPartitionLagWithMaxPollRecords(GroupProtocol groupProtocol) throws InterruptedException {
int numMessages = 1000;
int maxPollRecords = 10;
Map<String, Object> config = Map.of(
GROUP_PROTOCOL_CONFIG, groupProtocol.name().toLowerCase(Locale.ROOT),
GROUP_ID_CONFIG, "testPerPartitionLagWithMaxPollRecords",
CLIENT_ID_CONFIG, "testPerPartitionLagWithMaxPollRecords",
MAX_POLL_RECORDS_CONFIG, maxPollRecords
);
sendRecords(cluster, tp, numMessages);
try (Consumer<byte[], byte[]> consumer = cluster.consumer(config)) {
consumer.assign(List.of(tp));
var records = awaitNonEmptyRecords(consumer, tp, 100);
var tags = Map.of(
"client-id", "testPerPartitionLagWithMaxPollRecords",
"topic", tp.topic(),
"partition", String.valueOf(tp.partition())
);
var lag = consumer.metrics()
.get(new MetricName("records-lag", "consumer-fetch-manager-metrics", "", tags));
// Count the number of records received
var recordCount = records.count();
assertEquals(
numMessages - recordCount,
(Double) lag.metricValue(),
EPSILON,
"The lag should be " + (numMessages - recordCount)
);
}
}
@ClusterTest
public void runCloseClassicConsumerMultiConsumerSessionTimeoutTest() throws InterruptedException {
runMultiConsumerSessionTimeoutTest(GroupProtocol.CLASSIC, true);
}
@ClusterTest
public void runClassicConsumerMultiConsumerSessionTimeoutTest() throws InterruptedException {
runMultiConsumerSessionTimeoutTest(GroupProtocol.CLASSIC, false);
}
@ClusterTest
public void runCloseAsyncConsumerMultiConsumerSessionTimeoutTest() throws InterruptedException {
runMultiConsumerSessionTimeoutTest(GroupProtocol.CONSUMER, true);
}
@ClusterTest
public void runAsyncConsumerMultiConsumerSessionTimeoutTest() throws InterruptedException {
runMultiConsumerSessionTimeoutTest(GroupProtocol.CONSUMER, false);
}
private void runMultiConsumerSessionTimeoutTest(GroupProtocol groupProtocol, boolean closeConsumer) throws InterruptedException {
String topic1 = "topic1";
int partitions = 6;
Map<String, Object> config = Map.of(
GROUP_PROTOCOL_CONFIG, groupProtocol.name().toLowerCase(Locale.ROOT),
GROUP_ID_CONFIG, "test-group",
MAX_POLL_INTERVAL_MS_CONFIG, 100
);
// use consumers defined in this | PlaintextConsumerPollTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/AnnotatedClassType.java | {
"start": 214,
"end": 281
} | class ____ give its type
*
* @author Emmanuel Bernard
*/
public | will |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/SearchServiceSingleNodeTests.java | {
"start": 9579,
"end": 10152
} | class ____ extends ESSingleNodeTestCase {
private static final int SEARCH_POOL_SIZE = 10;
@Override
protected boolean resetNodeAfterTest() {
return true;
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(
FailOnRewriteQueryPlugin.class,
CustomScriptPlugin.class,
ReaderWrapperCountPlugin.class,
InternalOrPrivateSettingsPlugin.class,
MockSearchService.TestPlugin.class
);
}
public static | SearchServiceSingleNodeTests |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java | {
"start": 28898,
"end": 29358
} | enum ____
implements Function<io.reactivex.rxjava3.core.Observable<?>, io.reactivex.rxjava3.core.Maybe<?>> {
INSTANCE;
@Override
public io.reactivex.rxjava3.core.Maybe<?> apply(io.reactivex.rxjava3.core.Observable<?> source) {
return source.singleElement();
}
}
/**
* An adapter {@link Function} to adopt a {@link Single} to {@link Single}.
*/
public | RxJava3ObservableToMaybeAdapter |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/beans/factory/xml/XmlBeanFactoryTestTypes.java | {
"start": 6703,
"end": 6936
} | class ____ {
private Object obj;
public MapAndSet(Map<?, ?> map) {
this.obj = map;
}
public MapAndSet(Set<?> set) {
this.obj = set;
}
public Object getObject() {
return obj;
}
}
/**
* @author Rod Johnson
*/
| MapAndSet |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java | {
"start": 10737,
"end": 16505
} | class ____ extends LuceneSourceOperator.LimitingCollector {
private Scorable scorable;
@Override
public void setScorer(Scorable scorer) {
this.scorable = scorer;
}
@Override
public void collect(int doc) throws IOException {
super.collect(doc);
scoreBuilder.appendDouble(scorable.score());
}
}
@Override
public boolean isFinished() {
return doneCollecting || limiter.remaining() == 0;
}
@Override
public void finish() {
doneCollecting = true;
}
@Override
public Page getCheckedOutput() throws IOException {
if (isFinished()) {
assert currentPagePos == 0 : currentPagePos;
return null;
}
long start = System.nanoTime();
try {
final LuceneScorer scorer = getCurrentOrLoadNextScorer();
if (scorer == null) {
return null;
}
final int remainingDocsStart = remainingDocs = limiter.remaining();
try {
scorer.scoreNextRange(
leafCollector,
scorer.leafReaderContext().reader().getLiveDocs(),
// Note: if (maxPageSize - currentPagePos) is a small "remaining" interval, this could lead to slow collection with a
// highly selective filter. Having a large "enough" difference between max- and minPageSize (and thus currentPagePos)
// alleviates this issue.
maxPageSize - currentPagePos
);
} catch (CollectionTerminatedException ex) {
// The leaf collector terminated the execution
doneCollecting = true;
scorer.markAsDone();
}
final int collectedDocs = remainingDocsStart - remainingDocs;
final int discardedDocs = collectedDocs - limiter.tryAccumulateHits(collectedDocs);
Page page = null;
if (currentPagePos >= minPageSize || scorer.isDone() || (remainingDocs = limiter.remaining()) == 0) {
IntVector shard = null;
IntVector leaf = null;
IntVector docs = null;
int metadataBlocks = numMetadataBlocks();
Block[] blocks = new Block[1 + metadataBlocks + scorer.tags().size()];
currentPagePos -= discardedDocs;
try {
int shardId = scorer.shardContext().index();
shard = blockFactory.newConstantIntVector(shardId, currentPagePos);
leaf = blockFactory.newConstantIntVector(scorer.leafReaderContext().ord, currentPagePos);
docs = buildDocsVector(currentPagePos);
docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize));
int b = 0;
blocks[b++] = new DocVector(refCounteds, shard, leaf, docs, true).asBlock();
shard = null;
leaf = null;
docs = null;
buildMetadataBlocks(blocks, b, currentPagePos);
b += metadataBlocks;
for (Object e : scorer.tags()) {
blocks[b++] = BlockUtils.constantBlock(blockFactory, e, currentPagePos);
}
page = new Page(currentPagePos, blocks);
} finally {
if (page == null) {
Releasables.closeExpectNoException(shard, leaf, docs, Releasables.wrap(blocks));
}
}
currentPagePos = 0;
}
return page;
} finally {
processingNanos += System.nanoTime() - start;
}
}
private IntVector buildDocsVector(int upToPositions) {
final IntVector docs = docsBuilder.build();
assert docs.getPositionCount() >= upToPositions : docs.getPositionCount() + " < " + upToPositions;
if (docs.getPositionCount() == upToPositions) {
return docs;
}
try (docs) {
try (var slice = blockFactory.newIntVectorFixedBuilder(upToPositions)) {
for (int i = 0; i < upToPositions; i++) {
slice.appendInt(docs.getInt(i));
}
return slice.build();
}
}
}
private DoubleVector buildScoresVector(int upToPositions) {
final DoubleVector scores = scoreBuilder.build();
assert scores.getPositionCount() >= upToPositions : scores.getPositionCount() + " < " + upToPositions;
if (scores.getPositionCount() == upToPositions) {
return scores;
}
try (scores) {
try (var slice = blockFactory.newDoubleVectorBuilder(upToPositions)) {
for (int i = 0; i < upToPositions; i++) {
slice.appendDouble(scores.getDouble(i));
}
return slice.build();
}
}
}
protected int numMetadataBlocks() {
return scoreBuilder != null ? 1 : 0;
}
protected void buildMetadataBlocks(Block[] blocks, int offset, int currentPagePos) {
if (scoreBuilder != null) {
blocks[offset] = buildScoresVector(currentPagePos).asBlock();
scoreBuilder = blockFactory.newDoubleVectorBuilder(Math.min(remainingDocs, maxPageSize));
}
}
@Override
public void additionalClose() {
Releasables.close(docsBuilder, scoreBuilder);
}
@Override
protected void describe(StringBuilder sb) {
sb.append(", remainingDocs = ").append(remainingDocs);
}
}
| ScoringCollector |
java | apache__camel | components/camel-ldap/src/generated/java/org/apache/camel/component/ldap/LdapComponentConfigurer.java | {
"start": 731,
"end": 2287
} | class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
LdapComponent target = (LdapComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
LdapComponent target = (LdapComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
default: return null;
}
}
}
| LdapComponentConfigurer |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ImpossibleNullComparisonTest.java | {
"start": 21300,
"end": 21831
} | class ____ {
public void o(int i, Integer boxed) {
// BUG: Diagnostic contains:
assertThat(i).isNotNull();
assertThat(boxed).isNotNull();
}
}
""")
.doTest();
}
@Test
public void valueOf() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
import java.util.concurrent.TimeUnit;
public | Test |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/style/SimpleValueStylerTests.java | {
"start": 2974,
"end": 4439
} | class ____ {
private final SimpleValueStyler styler = new SimpleValueStyler();
@Test
void styleClass() {
assertThat(styler.style(String.class)).isEqualTo("java.lang.String");
assertThat(styler.style(getClass())).isEqualTo(getClass().getCanonicalName());
assertThat(styler.style(String[].class)).isEqualTo("java.lang.String[]");
assertThat(styler.style(int[][].class)).isEqualTo("int[][]");
}
@Test
void styleMethod() throws NoSuchMethodException {
assertThat(styler.style(String.class.getMethod("toString"))).isEqualTo("toString()");
assertThat(styler.style(String.class.getMethod("getBytes", Charset.class))).isEqualTo("getBytes(Charset)");
}
@Test
void styleClassMap() {
Map<String, Class<?>> map = new LinkedHashMap<>() {{
put("key1", Integer.class);
put("key2", DefaultClassAndMethodStylers.class);
}};
assertThat(styler.style(map)).isEqualTo(
"{\"key1\" -> java.lang.Integer, \"key2\" -> %s}",
DefaultClassAndMethodStylers.class.getCanonicalName());
}
@Test
void styleClassList() {
assertThat(styler.style(List.of(Integer.class, String.class)))
.isEqualTo("[java.lang.Integer, java.lang.String]");
}
@Test
void styleClassArray() {
Class<?>[] array = new Class<?>[] { Integer.class, getClass() };
assertThat(styler.style(array))
.isEqualTo("[%s, %s]", Integer.class.getCanonicalName(), getClass().getCanonicalName());
}
}
@Nested
| DefaultClassAndMethodStylers |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/resource/ResourceManagerTest.java | {
"start": 1052,
"end": 1438
} | class ____ extends VertxTestBase {
private static final Object TEST_KEY = new Object();
public Future<Resource> getResource(ContextInternal ctx, ResourceManager<Object, TestResource> mgr, Function<Object, TestResource> provider, Object key) {
return mgr.withResourceAsync(key, provider, (endpoint, created) -> endpoint.acquire(ctx, 0L));
}
static abstract | ResourceManagerTest |
java | google__guava | android/guava-testlib/src/com/google/common/testing/EquivalenceTester.java | {
"start": 1882,
"end": 2037
} | class ____ having
* to explicitly add another equivalence group.
*
* @author Gregory Kick
* @since 10.0
*/
@GwtCompatible
@NullMarked
public final | without |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/ResultSetExtractor.java | {
"start": 1985,
"end": 2689
} | interface ____<T extends @Nullable Object> {
/**
* Implementations must implement this method to process the entire ResultSet.
* @param rs the ResultSet to extract data from. Implementations should
* not close this: it will be closed by the calling JdbcTemplate.
* @return an arbitrary result object, or {@code null} if none
* (the extractor will typically be stateful in the latter case).
* @throws SQLException if an SQLException is encountered getting column
* values or navigating (that is, there's no need to catch SQLException)
* @throws DataAccessException in case of custom exceptions
*/
T extractData(ResultSet rs) throws SQLException, DataAccessException;
}
| ResultSetExtractor |
java | netty__netty | codec-http3/src/main/java/io/netty/handler/codec/http3/Http3Settings.java | {
"start": 1148,
"end": 1596
} | class ____ type-safe accessors for standard HTTP/3 settings such as:
* <ul>
* <li>{@code QPACK_MAX_TABLE_CAPACITY} (0x1)</li>
* <li>{@code MAX_FIELD_SECTION_SIZE} (0x6)</li>
* <li>{@code QPACK_BLOCKED_STREAMS} (0x7)</li>
* <li>{@code ENABLE_CONNECT_PROTOCOL} (0x8)</li>
* </ul>
*
* Non-standard settings are permitted as long as they use positive values.
* Reserved HTTP/2 setting identifiers are rejected.
*
*/
public final | provides |
java | quarkusio__quarkus | extensions/websockets/server/deployment/src/test/java/io/quarkus/websockets/test/EchoWebSocket.java | {
"start": 184,
"end": 341
} | class ____ {
@Inject
EchoService echoService;
@OnMessage
String echo(String msg) {
return echoService.echo(msg);
}
}
| EchoWebSocket |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootContextLoaderTests.java | {
"start": 13736,
"end": 13868
} | class ____ {
}
@SpringBootTest(properties = { "key=myValue", "otherKey=otherValue" }, classes = Config.class)
static | OverrideConfig |
java | alibaba__nacos | client/src/test/java/com/alibaba/nacos/client/utils/ParamUtilTest.java | {
"start": 1085,
"end": 5009
} | class ____ {
private int defaultConnectTimeout;
private int defaultReadTimeout;
private double defaultPerTaskConfigSize;
@BeforeEach
void before() {
defaultConnectTimeout = 1000;
defaultReadTimeout = 3000;
defaultPerTaskConfigSize = 3000.0;
}
@AfterEach
void after() {
ParamUtil.setConnectTimeout(defaultConnectTimeout);
ParamUtil.setReadTimeout(defaultReadTimeout);
ParamUtil.setPerTaskConfigSize(defaultPerTaskConfigSize);
System.clearProperty("NACOS.CONNECT.TIMEOUT");
System.clearProperty("NACOS_READ_TIMEOUT");
System.clearProperty("PER_TASK_CONFIG_SIZE");
System.clearProperty(PropertyKeyConst.SystemEnv.ALIBABA_ALIWARE_ENDPOINT_URL);
}
@Test
void testSetConnectTimeout() {
int defaultVal = ParamUtil.getConnectTimeout();
assertEquals(defaultConnectTimeout, defaultVal);
int expect = 50;
ParamUtil.setConnectTimeout(expect);
assertEquals(expect, ParamUtil.getConnectTimeout());
}
@Test
void testSetReadTimeout() {
int defaultVal = ParamUtil.getReadTimeout();
assertEquals(defaultReadTimeout, defaultVal);
int expect = 3000;
ParamUtil.setReadTimeout(expect);
assertEquals(expect, ParamUtil.getReadTimeout());
}
@Test
void testGetPerTaskConfigSize() {
double defaultVal = ParamUtil.getPerTaskConfigSize();
assertEquals(defaultPerTaskConfigSize, defaultVal, 0.01);
double expect = 50.0;
ParamUtil.setPerTaskConfigSize(expect);
assertEquals(expect, ParamUtil.getPerTaskConfigSize(), 0.01);
}
@Test
void testInitConnectionTimeoutWithException() throws Throwable {
assertThrows(IllegalArgumentException.class, () -> {
Method method = ParamUtil.class.getDeclaredMethod("initConnectionTimeout");
method.setAccessible(true);
System.setProperty("NACOS.CONNECT.TIMEOUT", "test");
try {
method.invoke(null);
} catch (InvocationTargetException e) {
throw e.getCause();
}
});
}
@Test
void testInitReadTimeoutWithException() throws Throwable {
assertThrows(IllegalArgumentException.class, () -> {
Method method = ParamUtil.class.getDeclaredMethod("initReadTimeout");
method.setAccessible(true);
System.setProperty("NACOS.READ.TIMEOUT", "test");
try {
method.invoke(null);
} catch (InvocationTargetException e) {
throw e.getCause();
}
});
}
@Test
void testInitPerTaskConfigSizeWithException() throws Throwable {
assertThrows(IllegalArgumentException.class, () -> {
Method method = ParamUtil.class.getDeclaredMethod("initPerTaskConfigSize");
method.setAccessible(true);
System.setProperty("PER_TASK_CONFIG_SIZE", "test");
try {
method.invoke(null);
} catch (InvocationTargetException e) {
throw e.getCause();
}
});
}
@Test
void testSimplyEnvNameIfOverLimit() {
StringBuilder envNameOverLimitBuilder = new StringBuilder("test");
for (int i = 0; i < 50; i++) {
envNameOverLimitBuilder.append(i);
}
String envName = envNameOverLimitBuilder.toString();
String actual = ParamUtil.simplyEnvNameIfOverLimit(envName);
String expect = envName.substring(0, 50) + MD5Utils.md5Hex(envName, "UTF-8");
assertEquals(expect, actual);
}
@Test
void testSimplyEnvNameNotOverLimit() {
String expect = "test";
assertEquals(expect, ParamUtil.simplyEnvNameIfOverLimit(expect));
}
} | ParamUtilTest |
java | apache__flink | flink-metrics/flink-metrics-core/src/main/java/org/apache/flink/events/Event.java | {
"start": 1063,
"end": 1881
} | interface ____ {
static EventBuilder builder(Class<?> classScope, String name) {
return new EventBuilder(classScope, name);
}
/** The timestamp for when the event happened or was observed, in milliseconds. */
long getObservedTsMillis();
/** The name of the event. */
String getName();
/** The scope of the event, typically the fully qualified name of the emitting class. */
String getClassScope();
/** Textual description of the event. */
String getBody();
/** Severity of the event, e.g. DEBUG, INFO, ... */
String getSeverity();
/**
* Additional attributes for the event. Currently, returned values can be of type String, Long
* or Double, however more types can be added in the future.
*/
Map<String, Object> getAttributes();
}
| Event |
java | apache__maven | compat/maven-plugin-api/src/main/java/org/apache/maven/plugin/AbstractMojoExecutionException.java | {
"start": 888,
"end": 1810
} | class ____ extends Exception {
protected Object source;
protected String longMessage;
public AbstractMojoExecutionException(String message) {
super(message);
}
public AbstractMojoExecutionException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructs a new {@code AbstractMojoExecutionException} exception wrapping an underlying {@code Throwable}.
*
* @param cause the cause which is saved for later retrieval by the {@link #getCause()} method.
* A {@code null} value is permitted, and indicates that the cause is nonexistent or unknown.
* @since 3.8.3
*/
public AbstractMojoExecutionException(Throwable cause) {
super(cause);
}
public String getLongMessage() {
return longMessage;
}
public Object getSource() {
return source;
}
}
| AbstractMojoExecutionException |
java | quarkusio__quarkus | integration-tests/istio/maven-invoker-way/src/it/xds-grpc/src/main/java/org/acme/HelloEndpoint.java | {
"start": 383,
"end": 1007
} | class ____ {
private static final Logger log = LoggerFactory.getLogger(HelloEndpoint.class);
@GrpcClient
GreeterGrpc.GreeterBlockingStub stub;
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
HelloRequest request = HelloRequest.newBuilder().setName("XDS gRPC").build();
HelloReply response;
try {
response = stub.sayHello(request);
} catch (StatusRuntimeException e) {
String msg = "RPC failed: " + e.getStatus();
log.warn(msg);
return msg;
}
return response.getMessage();
}
}
| HelloEndpoint |
java | elastic__elasticsearch | libs/plugin-scanner/src/test/java/org/elasticsearch/plugin/scanner/ClassReadersTests.java | {
"start": 971,
"end": 2195
} | class ____ extends ESTestCase {
public void testModuleInfoIsNotReturnedAsAClassFromJar() throws IOException {
final Path tmp = createTempDir(getTestName());
final Path dirWithJar = tmp.resolve("jars-dir");
Files.createDirectories(dirWithJar);
Path jar = dirWithJar.resolve("api.jar");
JarUtils.createJarWithEntries(jar, Map.of("module-info.class", InMemoryJavaCompiler.compile("module-info", """
module p {}
""")));
List<ClassReader> classReaders = ClassReaders.ofPaths(Stream.of(jar));
org.hamcrest.MatcherAssert.assertThat(classReaders, Matchers.empty());
// aggressively delete the jar dir, so that any leaked filed handles fail this specific test on windows
IOUtils.rm(tmp);
}
public void testTwoClassesInAStreamFromJar() throws IOException {
final Path tmp = createTempDir(getTestName());
final Path dirWithJar = tmp.resolve("jars-dir");
Files.createDirectories(dirWithJar);
Path jar = dirWithJar.resolve("api.jar");
JarUtils.createJarWithEntries(jar, Map.of("p/A.class", InMemoryJavaCompiler.compile("p.A", """
package p;
public | ClassReadersTests |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/producer/async/AsyncProducerTest.java | {
"start": 1560,
"end": 1926
} | class ____ {
private static CompletableFuture<Long> future;
static void complete(long code) {
future.complete(code);
}
static void reset() {
future = new CompletableFuture<>();
}
@Produces
CompletionStage<Long> produceLong() {
return future;
}
}
}
| LongProducer |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/bytecode/enhancement/ClassEnhancementSelectors.java | {
"start": 465,
"end": 542
} | interface ____ {
ClassEnhancementSelector[] value();
}
| ClassEnhancementSelectors |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/jsr250/Jsr250LifecycleTests.java | {
"start": 1547,
"end": 1798
} | class ____ not really contain actual <em>tests</em> per se. Rather it
* can be used to empirically verify the expected log output (see below). In
* order to see the log output, one would naturally need to ensure that the
* logger category for this | does |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/BufferEntityInExceptionMapperTest.java | {
"start": 2178,
"end": 2376
} | interface ____ {
@Path("/hello")
@GET
Uni<String> uniHello();
@Path("/hello")
@GET
String hello();
}
@Path("resource")
public static | Client |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLPurgeTemporaryOutputStatement.java | {
"start": 848,
"end": 1499
} | class ____ extends SQLStatementImpl implements SQLDropStatement {
private SQLName name;
public SQLPurgeTemporaryOutputStatement() {
}
public SQLPurgeTemporaryOutputStatement(DbType dbType) {
super(dbType);
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, name);
}
visitor.endVisit(this);
}
public SQLName getName() {
return name;
}
public void setName(SQLName x) {
if (x != null) {
x.setParent(this);
}
this.name = x;
}
}
| SQLPurgeTemporaryOutputStatement |
java | google__guice | core/test/com/google/inject/BinderTest.java | {
"start": 2753,
"end": 5122
} | class ____ {}
public void testMissingBindings() {
try {
Guice.createInjector(
// We put each binding in a separate module so the order of the error messages doesn't
// depend on line numbers
new AbstractModule() {
@Override
public void configure() {
getProvider(Runnable.class);
}
},
new AbstractModule() {
@Override
public void configure() {
bind(Comparator.class);
}
},
new AbstractModule() {
@Override
public void configure() {
requireBinding(Key.get(new TypeLiteral<Callable<String>>() {}));
}
},
new AbstractModule() {
@Override
public void configure() {
bind(Date.class).annotatedWith(Names.named("date"));
}
});
fail("Expected CreationException");
} catch (CreationException e) {
assertEquals(4, e.getErrorMessages().size());
String segment1 = "No implementation for Runnable was bound.";
String segment2 = "No implementation for Comparator was bound.";
String segment3 = "No implementation for Callable<String> was bound.";
String segment4 =
String.format(
"No implementation for Date annotated with @Named(%s) was bound.",
Annotations.memberValueString("value", "date"));
String sourceFileName = getDeclaringSourcePart(getClass());
assertContains(
e.getMessage(),
segment1,
sourceFileName,
segment2,
sourceFileName,
segment3,
sourceFileName,
segment4,
sourceFileName);
}
}
public void testMissingDependency() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
public void configure() {
bind(NeedsRunnable.class);
}
});
fail("Expected CreationException");
} catch (CreationException e) {
assertEquals(1, e.getErrorMessages().size());
assertContains(
e.getMessage(),
"No implementation for Runnable was bound.",
".runnable",
"for field runnable",
"at BinderTest$7.configure");
}
}
static | Foo |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.