language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/erroneous/propertymapping/ErroneousMapper3.java
|
{
"start": 315,
"end": 480
}
|
interface ____ {
@BeanMapping( ignoreByDefault = true )
@Mapping( target = "constant", constant = "constant" )
Target map(Source source);
}
|
ErroneousMapper3
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/CharRange.java
|
{
"start": 1215,
"end": 1385
}
|
class ____ implements Iterable<Character>, Serializable {
/**
* Character {@link Iterator}.
* <p>#NotThreadSafe#</p>
*/
private static final
|
CharRange
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/conditional/basic/ConditionalMethodForSourceParameterAndPropertyMapper.java
|
{
"start": 387,
"end": 904
}
|
interface ____ {
ConditionalMethodForSourceParameterAndPropertyMapper INSTANCE = Mappers.getMapper(
ConditionalMethodForSourceParameterAndPropertyMapper.class );
Employee map(EmployeeDto employee);
@Condition(appliesTo = {
ConditionStrategy.SOURCE_PARAMETERS,
ConditionStrategy.PROPERTIES
})
default boolean canMapEmployeeDto(EmployeeDto employee) {
return employee != null && employee.getId() != null;
}
|
ConditionalMethodForSourceParameterAndPropertyMapper
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/AbstractTemporalAssertBaseTest.java
|
{
"start": 744,
"end": 1126
}
|
class ____ extends TemporalAssertBaseTest<ConcreteTemporalAssert, ZonedDateTime> {
@Override
protected ConcreteTemporalAssert create_assertions() {
return new ConcreteTemporalAssert(ZonedDateTime.now());
}
@Override
protected Comparables getComparables(ConcreteTemporalAssert someAssertions) {
return someAssertions.comparables;
}
}
|
AbstractTemporalAssertBaseTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-jdbc-test/src/main/java/org/springframework/boot/jdbc/test/autoconfigure/JdbcTest.java
|
{
"start": 3240,
"end": 4476
}
|
interface ____ {
/**
* Properties in form {@literal key=value} that should be added to the Spring
* {@link Environment} before the test runs.
* @return the properties to add
*/
String[] properties() default {};
/**
* Determines if default filtering should be used with
* {@link SpringBootApplication @SpringBootApplication}. By default no beans are
* included.
* @see #includeFilters()
* @see #excludeFilters()
* @return if default filters should be used
*/
boolean useDefaultFilters() default true;
/**
* A set of include filters which can be used to add otherwise filtered beans to the
* application context.
* @return include filters to apply
*/
ComponentScan.Filter[] includeFilters() default {};
/**
* A set of exclude filters which can be used to filter beans that would otherwise be
* added to the application context.
* @return exclude filters to apply
*/
ComponentScan.Filter[] excludeFilters() default {};
/**
* Auto-configuration exclusions that should be applied for this test.
* @return auto-configuration exclusions to apply
*/
@AliasFor(annotation = ImportAutoConfiguration.class, attribute = "exclude")
Class<?>[] excludeAutoConfiguration() default {};
}
|
JdbcTest
|
java
|
grpc__grpc-java
|
examples/example-orca/src/main/java/io/grpc/examples/orca/CustomBackendMetricsLoadBalancerProvider.java
|
{
"start": 1367,
"end": 1904
}
|
class ____ extends LoadBalancerProvider {
static final String EXAMPLE_LOAD_BALANCER = "example_backend_metrics_load_balancer";
@Override
public LoadBalancer newLoadBalancer(LoadBalancer.Helper helper) {
return new CustomBackendMetricsLoadBalancer(helper);
}
@Override
public boolean isAvailable() {
return true;
}
@Override
public int getPriority() {
return 5;
}
@Override
public String getPolicyName() {
return EXAMPLE_LOAD_BALANCER;
}
private final
|
CustomBackendMetricsLoadBalancerProvider
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java
|
{
"start": 1751,
"end": 7336
}
|
class ____ extends ESTestCase {
String policy;
ProjectState state;
Index index;
@Before
public void setupClusterState() {
policy = randomAlphaOfLength(10);
LifecyclePolicy lifecyclePolicy = LifecyclePolicyTests.randomTestLifecyclePolicy(policy);
IndexMetadata indexMetadata = IndexMetadata.builder(randomAlphaOfLength(5))
.settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policy))
.numberOfShards(randomIntBetween(1, 5))
.numberOfReplicas(randomIntBetween(0, 5))
.build();
index = indexMetadata.getIndex();
IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata(
Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())),
OperationMode.RUNNING
);
ProjectMetadata project = ProjectMetadata.builder(randomProjectIdOrDefault())
.put(IndexMetadata.builder(indexMetadata))
.putCustom(IndexLifecycleMetadata.TYPE, ilmMeta)
.build();
state = ClusterState.builder(ClusterName.DEFAULT).putProjectMetadata(project).build().projectState(project.id());
}
public void testExecuteSuccessfullyMoved() throws Exception {
StepKey currentStepKey = new StepKey("current-phase", "current-action", "current-name");
StepKey nextStepKey = new StepKey("next-phase", "next-action", "next-step-name");
long now = randomNonNegativeLong();
Exception cause = new ElasticsearchException("THIS IS AN EXPECTED CAUSE");
setStateToKey(currentStepKey);
MoveToErrorStepUpdateTask task = new MoveToErrorStepUpdateTask(
state.projectId(),
index,
policy,
currentStepKey,
cause,
() -> now,
(idxMeta, stepKey) -> new MockStep(stepKey, nextStepKey),
state -> {}
);
ClusterState newState = task.execute(state);
LifecycleExecutionState lifecycleState = newState.metadata()
.getProject(state.projectId())
.index(index)
.getLifecycleExecutionState();
StepKey actualKey = Step.getCurrentStepKey(lifecycleState);
assertThat(actualKey, equalTo(new StepKey(currentStepKey.phase(), currentStepKey.action(), ErrorStep.NAME)));
assertThat(lifecycleState.failedStep(), equalTo(currentStepKey.name()));
assertThat(lifecycleState.phaseTime(), nullValue());
assertThat(lifecycleState.actionTime(), nullValue());
assertThat(lifecycleState.stepTime(), equalTo(now));
assertThat(lifecycleState.stepInfo(), containsString("""
{"type":"exception","reason":"THIS IS AN EXPECTED CAUSE\""""));
}
public void testExecuteNoopDifferentStep() throws Exception {
StepKey currentStepKey = new StepKey("current-phase", "current-action", "current-name");
StepKey notCurrentStepKey = new StepKey("not-current", "not-current", "not-current");
long now = randomNonNegativeLong();
Exception cause = new ElasticsearchException("THIS IS AN EXPECTED CAUSE");
setStateToKey(notCurrentStepKey);
MoveToErrorStepUpdateTask task = new MoveToErrorStepUpdateTask(
state.projectId(),
index,
policy,
currentStepKey,
cause,
() -> now,
(idxMeta, stepKey) -> new MockStep(stepKey, new StepKey("next-phase", "action", "step")),
state -> {}
);
ClusterState newState = task.doExecute(state);
assertThat(newState, sameInstance(state.cluster()));
}
public void testExecuteNoopDifferentPolicy() throws Exception {
StepKey currentStepKey = new StepKey("current-phase", "current-action", "current-name");
long now = randomNonNegativeLong();
Exception cause = new ElasticsearchException("THIS IS AN EXPECTED CAUSE");
setStateToKey(currentStepKey);
setStatePolicy("not-" + policy);
MoveToErrorStepUpdateTask task = new MoveToErrorStepUpdateTask(
state.projectId(),
index,
policy,
currentStepKey,
cause,
() -> now,
(idxMeta, stepKey) -> new MockStep(stepKey, new StepKey("next-phase", "action", "step")),
state -> {}
);
ClusterState newState = task.doExecute(state);
assertThat(newState, sameInstance(state.cluster()));
}
private void setStatePolicy(String policyValue) {
state = state.updateProject(
ProjectMetadata.builder(state.metadata())
.updateSettings(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, policyValue).build(), index.getName())
.build()
);
}
private void setStateToKey(StepKey stepKey) {
LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(
state.metadata().index(index).getLifecycleExecutionState()
);
lifecycleState.setPhase(stepKey.phase());
lifecycleState.setAction(stepKey.action());
lifecycleState.setStep(stepKey.name());
state = state.updateProject(
ProjectMetadata.builder(state.metadata())
.put(
IndexMetadata.builder(state.metadata().index(index)).putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.build().asMap())
)
.build()
);
}
}
|
MoveToErrorStepUpdateTaskTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestS3ADeleteCost.java
|
{
"start": 2163,
"end": 8220
}
|
class ____ extends AbstractS3ACostTest {
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3ADeleteCost.class);
@Override
public Configuration createConfiguration() {
return setPerformanceFlags(
super.createConfiguration(),
"");
}
@AfterEach
@Override
public void teardown() throws Exception {
// do this ourselves to avoid audits teardown failing
// when surplus markers are found
deleteTestDirInTeardown();
super.teardown();
}
/**
* This creates a directory with a child and then deletes it.
* The parent dir must be found and declared as empty.
* <p>When deleting markers, that forces the recreation of a new marker.</p>
*/
@Test
public void testDeleteSingleFileInDir() throws Throwable {
describe("delete a file");
S3AFileSystem fs = getFileSystem();
// creates the marker
Path dir = dir(methodPath());
Path simpleFile = file(new Path(dir, "simple.txt"));
boolean bulkDelete = isBulkDelete();
verifyMetrics(() -> {
fs.delete(simpleFile, false);
return "after fs.delete(simpleFile) " + getMetricSummary();
},
probe(OBJECT_METADATA_REQUESTS,
FILESTATUS_FILE_PROBE_H),
with(OBJECT_LIST_REQUEST,
FILESTATUS_FILE_PROBE_L + FILESTATUS_DIR_PROBE_L),
with(DIRECTORIES_DELETED, 0),
with(FILES_DELETED, 1),
// a single DELETE call is made to delete the object
with(OBJECT_DELETE_REQUEST, DELETE_OBJECT_REQUEST),
// create no parent dirs or delete parents
with(DIRECTORIES_CREATED, 0),
// even when bulk delete is enabled, there is no use of this.
with(OBJECT_BULK_DELETE_REQUEST, 0)
);
// there is an empty dir for a parent
S3AFileStatus status = verifyInnerGetFileStatus(dir, true,
StatusProbeEnum.ALL, GET_FILE_STATUS_ON_DIR);
assertEmptyDirStatus(status, Tristate.TRUE);
}
/**
* This creates a directory with a two files and then deletes one of the
* files.
*/
@Test
public void testDeleteFileInDir() throws Throwable {
describe("delete a file in a directory with multiple files");
S3AFileSystem fs = getFileSystem();
// creates the marker
Path dir = dir(methodPath());
// file creation may have deleted that marker, but it may
// still be there
Path file1 = file(new Path(dir, "file1.txt"));
Path file2 = file(new Path(dir, "file2.txt"));
verifyMetrics(() -> {
fs.delete(file1, false);
return "after fs.delete(file1) " + getMetricSummary();
},
// delete file.
probe(OBJECT_METADATA_REQUESTS,
FILESTATUS_FILE_PROBE_H),
with(OBJECT_LIST_REQUEST,
FILESTATUS_FILE_PROBE_L + FILESTATUS_DIR_PROBE_L),
with(DIRECTORIES_DELETED, 0),
with(FILES_DELETED, 1),
// no need to create a parent
with(DIRECTORIES_CREATED, 0),
// create no parent dirs or delete parents
with(OBJECT_DELETE_REQUEST, DELETE_OBJECT_REQUEST));
}
@Test
public void testDirMarkersSubdir() throws Throwable {
describe("verify cost of deep subdir creation");
Path methodPath = methodPath();
Path parent = new Path(methodPath, "parent");
Path subDir = new Path(parent, "1/2/3/4/5/6");
S3AFileSystem fs = getFileSystem();
// this creates a peer of the parent dir, so ensures
// that when parent dir is deleted, no markers need to
// be recreated...that complicates all the metrics which
// are measured
Path sibling = new Path(methodPath, "sibling");
ContractTestUtils.touch(fs, sibling);
int dirsCreated = 2;
fs.delete(parent, true);
LOG.info("creating parent dir {}", parent);
fs.mkdirs(parent);
LOG.info("creating sub directory {}", subDir);
// one dir created, possibly a parent removed
final int fakeDirectoriesToDelete = directoriesInPath(subDir) - 1;
final Statistic stat = getDeleteMarkerStatistic();
verifyMetrics(() -> {
mkdirs(subDir);
return "after mkdir(subDir) " + getMetricSummary();
},
with(DIRECTORIES_CREATED, 1),
with(DIRECTORIES_DELETED, 0),
with(stat, 0),
with(FAKE_DIRECTORIES_DELETED, 0));
LOG.info("About to delete {}", parent);
// now delete the deep tree.
verifyMetrics(() -> {
fs.delete(parent, true);
return "deleting parent dir " + parent + " " + getMetricSummary();
},
// the parent dir marker needs deletion alongside
// the subdir one.
with(OBJECT_DELETE_OBJECTS, dirsCreated));
// followup with list calls to make sure all is clear.
verifyNoListing(parent);
verifyNoListing(subDir);
// now reinstate the directory, which in HADOOP-17244 hitting problems
fs.mkdirs(parent);
FileStatus[] children = fs.listStatus(parent);
Assertions.assertThat(children)
.describedAs("Children of %s", parent)
.isEmpty();
}
/**
* List a path, verify that there are no direct child entries.
* @param path path to scan
*/
protected void verifyNoListing(final Path path) throws Exception {
intercept(FileNotFoundException.class, () -> {
FileStatus[] statuses = getFileSystem().listStatus(path);
return Arrays.deepToString(statuses);
});
}
@Test
public void testDirMarkersFileCreation() throws Throwable {
describe("verify cost of file creation");
Path srcBaseDir = dir(methodPath());
Path srcDir = dir(new Path(srcBaseDir, "1/2/3/4/5/6"));
final Statistic stat = getDeleteMarkerStatistic();
verifyMetrics(() -> {
final Path srcPath = new Path(srcDir, "source.txt");
file(srcPath);
LOG.info("Metrics: {}\n{}", getMetricSummary(), getFileSystem());
return "after touch(fs, " + srcPath + ")" + getMetricSummary();
},
with(DIRECTORIES_CREATED, 0),
with(DIRECTORIES_DELETED, 0),
// no delete operations.
with(stat, 0),
with(FAKE_DIRECTORIES_DELETED, 0));
}
}
|
ITestS3ADeleteCost
|
java
|
apache__camel
|
components/camel-test/camel-test-main-junit5/src/test/java/org/apache/camel/test/main/junit5/annotation/SupportParameterizedTest.java
|
{
"start": 2068,
"end": 2312
}
|
class ____ {
@ParameterizedTest
@ValueSource(strings = { "hello", "nested", "test" })
void shouldSupportNestedTest(String value) throws Exception {
shouldSupportMultipleCalls(value);
}
}
}
|
NestedTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSaveNamespace.java
|
{
"start": 3658,
"end": 3866
}
|
class ____ {
static {
GenericTestUtils.setLogLevel(FSImage.LOG, Level.TRACE);
}
private static final Logger LOG = LoggerFactory.getLogger(TestSaveNamespace.class);
private static
|
TestSaveNamespace
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/BraintreeComponentBuilderFactory.java
|
{
"start": 4527,
"end": 5487
}
|
class ____
extends AbstractComponentBuilder<BraintreeComponent>
implements BraintreeComponentBuilder {
@Override
protected BraintreeComponent buildConcreteComponent() {
return new BraintreeComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((BraintreeComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((BraintreeComponent) component).setAutowiredEnabled((boolean) value); return true;
case "configuration": ((BraintreeComponent) component).setConfiguration((org.apache.camel.component.braintree.BraintreeConfiguration) value); return true;
default: return false;
}
}
}
}
|
BraintreeComponentBuilderImpl
|
java
|
apache__camel
|
core/camel-base/src/main/java/org/apache/camel/impl/event/RouteStartedEvent.java
|
{
"start": 944,
"end": 1300
}
|
class ____ extends AbstractRouteEvent implements CamelEvent.RouteStartedEvent {
private static final @Serial long serialVersionUID = 1330257282431407329L;
public RouteStartedEvent(Route source) {
super(source);
}
@Override
public String toString() {
return "Route started: " + getRoute().getId();
}
}
|
RouteStartedEvent
|
java
|
apache__camel
|
components/camel-pdf/src/main/java/org/apache/camel/component/pdf/text/LineBuilderStrategy.java
|
{
"start": 956,
"end": 1077
}
|
interface ____ {
Collection<String> buildLines(Collection<String> splittedText) throws IOException;
}
|
LineBuilderStrategy
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java
|
{
"start": 1198,
"end": 2786
}
|
class ____ extends HandledTransportAction<InvalidateTokenRequest, InvalidateTokenResponse> {
private final TokenService tokenService;
@Inject
public TransportInvalidateTokenAction(TransportService transportService, ActionFilters actionFilters, TokenService tokenService) {
super(
InvalidateTokenAction.NAME,
transportService,
actionFilters,
InvalidateTokenRequest::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
this.tokenService = tokenService;
}
@Override
protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener<InvalidateTokenResponse> listener) {
final ActionListener<TokensInvalidationResult> invalidateListener = ActionListener.wrap(
tokensInvalidationResult -> listener.onResponse(new InvalidateTokenResponse(tokensInvalidationResult)),
listener::onFailure
);
if (Strings.hasText(request.getUserName()) || Strings.hasText(request.getRealmName())) {
tokenService.invalidateActiveTokens(request.getRealmName(), request.getUserName(), null, invalidateListener);
} else if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) {
tokenService.invalidateAccessToken(request.getTokenString(), invalidateListener);
} else {
assert request.getTokenType() == InvalidateTokenRequest.Type.REFRESH_TOKEN;
tokenService.invalidateRefreshToken(request.getTokenString(), invalidateListener);
}
}
}
|
TransportInvalidateTokenAction
|
java
|
apache__camel
|
components/camel-jms/src/test/java/org/apache/camel/component/jms/SimpleJmsRequestReplyExclusiveReplyToTest.java
|
{
"start": 1496,
"end": 4182
}
|
class ____ extends AbstractJMSTest {
@Order(2)
@RegisterExtension
public static CamelContextExtension camelContextExtension = new DefaultCamelContextExtension();
protected CamelContext context;
protected ProducerTemplate template;
protected ConsumerTemplate consumer;
@Test
public void testJmsRequestReplyExclusiveFixedReplyTo() {
assertEquals("Hello A",
template.requestBody(
"activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplyExclusiveReplyToTest.bar&replyToType=Exclusive&replyToConsumerType=Simple",
"A"));
assertEquals("Hello B",
template.requestBody(
"activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplyExclusiveReplyToTest.bar&replyToType=Exclusive&replyToConsumerType=Simple",
"B"));
assertEquals("Hello C",
template.requestBody(
"activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplyExclusiveReplyToTest.bar&replyToType=Exclusive&replyToConsumerType=Simple",
"C"));
assertEquals("Hello D",
template.requestBody(
"activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplyExclusiveReplyToTest.bar&replyToType=Exclusive&replyToConsumerType=Simple",
"D"));
assertEquals("Hello E",
template.requestBody(
"activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo?replyTo=queue:SimpleJmsRequestReplyExclusiveReplyToTest.bar&replyToType=Exclusive&replyToConsumerType=Simple",
"E"));
}
@Override
protected String getComponentName() {
return "activemq";
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:queue:SimpleJmsRequestReplyExclusiveReplyToTest.foo")
.transform(body().prepend("Hello "));
}
};
}
@Override
public CamelContextExtension getCamelContextExtension() {
return camelContextExtension;
}
@BeforeEach
void setUpRequirements() {
context = camelContextExtension.getContext();
template = camelContextExtension.getProducerTemplate();
consumer = camelContextExtension.getConsumerTemplate();
}
}
|
SimpleJmsRequestReplyExclusiveReplyToTest
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/cache/annotation/ReactiveCachingTests.java
|
{
"start": 2054,
"end": 9947
}
|
class ____ {
@ParameterizedTest
@ValueSource(classes = {EarlyCacheHitDeterminationConfig.class,
EarlyCacheHitDeterminationWithoutNullValuesConfig.class,
LateCacheHitDeterminationConfig.class,
LateCacheHitDeterminationWithValueWrapperConfig.class})
void cacheHitDetermination(Class<?> configClass) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
configClass, ReactiveCacheableService.class);
ReactiveCacheableService service = ctx.getBean(ReactiveCacheableService.class);
Object key = new Object();
Long r1 = service.cacheFuture(key).join();
Long r2 = service.cacheFuture(key).join();
Long r3 = service.cacheFuture(key).join();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFuture").isSameAs(r2).isSameAs(r3);
key = new Object();
r1 = service.cacheMono(key).block();
r2 = service.cacheMono(key).block();
r3 = service.cacheMono(key).block();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheMono").isSameAs(r2).isSameAs(r3);
key = new Object();
r1 = service.cacheFlux(key).blockFirst();
r2 = service.cacheFlux(key).blockFirst();
r3 = service.cacheFlux(key).blockFirst();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFlux blockFirst").isSameAs(r2).isSameAs(r3);
key = new Object();
List<Long> l1 = service.cacheFlux(key).collectList().block();
List<Long> l2 = service.cacheFlux(key).collectList().block();
List<Long> l3 = service.cacheFlux(key).collectList().block();
assertThat(l1).isNotNull();
assertThat(l1).as("cacheFlux collectList").isEqualTo(l2).isEqualTo(l3);
key = new Object();
r1 = service.cacheMono(key).block();
r2 = service.cacheMono(key).block();
r3 = service.cacheMono(key).block();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheMono common key").isSameAs(r2).isSameAs(r3);
// Same key as for Mono, reusing its cached value
r1 = service.cacheFlux(key).blockFirst();
r2 = service.cacheFlux(key).blockFirst();
r3 = service.cacheFlux(key).blockFirst();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFlux blockFirst common key").isSameAs(r2).isSameAs(r3);
ctx.close();
}
@ParameterizedTest
@ValueSource(classes = {EarlyCacheHitDeterminationConfig.class,
EarlyCacheHitDeterminationWithoutNullValuesConfig.class,
LateCacheHitDeterminationConfig.class,
LateCacheHitDeterminationWithValueWrapperConfig.class})
void fluxCacheDoesntDependOnFirstRequest(Class<?> configClass) {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
configClass, ReactiveCacheableService.class);
ReactiveCacheableService service = ctx.getBean(ReactiveCacheableService.class);
Object key = new Object();
List<Long> l1 = service.cacheFlux(key).take(1L, true).collectList().block();
List<Long> l2 = service.cacheFlux(key).take(3L, true).collectList().block();
List<Long> l3 = service.cacheFlux(key).collectList().block();
Long first = l1.get(0);
assertThat(l1).as("l1").containsExactly(first);
assertThat(l2).as("l2").containsExactly(first, 0L, -1L);
assertThat(l3).as("l3").containsExactly(first, 0L, -1L, -2L, -3L);
ctx.close();
}
@Test
void cacheErrorHandlerWithSimpleCacheErrorHandler() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
ExceptionCacheManager.class, ReactiveCacheableService.class);
ReactiveCacheableService service = ctx.getBean(ReactiveCacheableService.class);
assertThatExceptionOfType(CompletionException.class)
.isThrownBy(() -> service.cacheFuture(new Object()).join())
.withCauseInstanceOf(UnsupportedOperationException.class);
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> service.cacheMono(new Object()).block());
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> service.cacheFlux(new Object()).blockFirst());
}
@Test
void cacheErrorHandlerWithSimpleCacheErrorHandlerAndSync() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
ExceptionCacheManager.class, ReactiveSyncCacheableService.class);
ReactiveSyncCacheableService service = ctx.getBean(ReactiveSyncCacheableService.class);
assertThatExceptionOfType(CompletionException.class)
.isThrownBy(() -> service.cacheFuture(new Object()).join())
.withCauseInstanceOf(UnsupportedOperationException.class);
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> service.cacheMono(new Object()).block());
assertThatExceptionOfType(UnsupportedOperationException.class)
.isThrownBy(() -> service.cacheFlux(new Object()).blockFirst());
}
@Test
void cacheErrorHandlerWithLoggingCacheErrorHandler() {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
ExceptionCacheManager.class, ReactiveCacheableService.class, ErrorHandlerCachingConfiguration.class);
ReactiveCacheableService service = ctx.getBean(ReactiveCacheableService.class);
Long r1 = service.cacheFuture(new Object()).join();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFuture").isEqualTo(0L);
r1 = service.cacheMono(new Object()).block();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheMono").isEqualTo(1L);
r1 = service.cacheFlux(new Object()).blockFirst();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFlux blockFirst").isEqualTo(2L);
}
@Test
void cacheErrorHandlerWithLoggingCacheErrorHandlerAndSync() {
AnnotationConfigApplicationContext ctx =
new AnnotationConfigApplicationContext(ExceptionCacheManager.class, ReactiveSyncCacheableService.class, ErrorHandlerCachingConfiguration.class);
ReactiveSyncCacheableService service = ctx.getBean(ReactiveSyncCacheableService.class);
Long r1 = service.cacheFuture(new Object()).join();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFuture").isEqualTo(0L);
r1 = service.cacheMono(new Object()).block();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheMono").isEqualTo(1L);
r1 = service.cacheFlux(new Object()).blockFirst();
assertThat(r1).isNotNull();
assertThat(r1).as("cacheFlux blockFirst").isEqualTo(2L);
}
@Test
void cacheErrorHandlerWithLoggingCacheErrorHandlerAndOperationException() {
AnnotationConfigApplicationContext ctx =
new AnnotationConfigApplicationContext(EarlyCacheHitDeterminationConfig.class, ReactiveFailureCacheableService.class, ErrorHandlerCachingConfiguration.class);
ReactiveFailureCacheableService service = ctx.getBean(ReactiveFailureCacheableService.class);
assertThatExceptionOfType(CompletionException.class).isThrownBy(() -> service.cacheFuture(new Object()).join())
.withMessage(IllegalStateException.class.getName() + ": future service error");
StepVerifier.create(service.cacheMono(new Object()))
.expectErrorMessage("mono service error")
.verify();
StepVerifier.create(service.cacheFlux(new Object()))
.expectErrorMessage("flux service error")
.verify();
}
@Test
void cacheErrorHandlerWithLoggingCacheErrorHandlerAndOperationExceptionAndSync() {
AnnotationConfigApplicationContext ctx =
new AnnotationConfigApplicationContext(EarlyCacheHitDeterminationConfig.class, ReactiveSyncFailureCacheableService.class, ErrorHandlerCachingConfiguration.class);
ReactiveSyncFailureCacheableService service = ctx.getBean(ReactiveSyncFailureCacheableService.class);
assertThatExceptionOfType(CompletionException.class).isThrownBy(() -> service.cacheFuture(new Object()).join())
.withMessage(IllegalStateException.class.getName() + ": future service error");
StepVerifier.create(service.cacheMono(new Object()))
.expectErrorMessage("mono service error")
.verify();
StepVerifier.create(service.cacheFlux(new Object()))
.expectErrorMessage("flux service error")
.verify();
}
@CacheConfig("first")
static
|
ReactiveCachingTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/procedure/internal/ProcedureCallImpl.java
|
{
"start": 4409,
"end": 25697
}
|
class ____<R>
extends AbstractQuery<R>
implements ProcedureCallImplementor<R>, ResultContext {
private final String procedureName;
private FunctionReturnImpl<R> functionReturn;
private final ProcedureParameterMetadataImpl parameterMetadata;
private final ProcedureParamBindings parameterBindings;
private final ResultSetMapping resultSetMapping;
private Set<String> synchronizedQuerySpaces;
private final QueryOptionsImpl queryOptions = new QueryOptionsImpl();
private ProcedureOutputsImpl outputs;
private static String mappingId(String procedureName, Class<?>[] resultClasses) {
return procedureName + ":" + join( ",", resultClasses );
}
private static String mappingId(String procedureName, String[] resultSetMappingNames) {
return procedureName + ":" + join( ",", resultSetMappingNames );
}
/**
* The no-returns form.
*
* @param session The session
* @param procedureName The name of the procedure to call
*/
public ProcedureCallImpl(SharedSessionContractImplementor session, String procedureName) {
super( session );
this.procedureName = procedureName;
parameterMetadata = new ProcedureParameterMetadataImpl();
parameterBindings = new ProcedureParamBindings( parameterMetadata, getSessionFactory() );
resultSetMapping = resolveResultSetMapping( procedureName, true, session.getSessionFactory() );
synchronizedQuerySpaces = null;
}
/**
* The result Class(es) return form
*
* @param session The session
* @param procedureName The name of the procedure to call
* @param resultClasses The classes making up the result
*/
public ProcedureCallImpl(SharedSessionContractImplementor session, String procedureName, Class<?>... resultClasses) {
super( session );
assert resultClasses != null && resultClasses.length > 0;
this.procedureName = procedureName;
final var factory = session.getSessionFactory();
parameterMetadata = new ProcedureParameterMetadataImpl();
parameterBindings = new ProcedureParamBindings( parameterMetadata, factory );
synchronizedQuerySpaces = new HashSet<>();
resultSetMapping = resolveResultSetMapping( mappingId( procedureName, resultClasses ), factory );
resolveResultSetMappingClasses(
resultClasses,
resultSetMapping,
synchronizedQuerySpaces::add,
this::getSessionFactory
);
}
/**
* The result-set-mapping(s) return form
*
* @param session The session
* @param procedureName The name of the procedure to call
* @param resultSetMappingNames The names of the result set mappings making up the result
*/
public ProcedureCallImpl(
final SharedSessionContractImplementor session,
String procedureName,
String... resultSetMappingNames) {
super( session );
assert resultSetMappingNames != null && resultSetMappingNames.length > 0;
this.procedureName = procedureName;
final var factory = session.getSessionFactory();
parameterMetadata = new ProcedureParameterMetadataImpl();
parameterBindings = new ProcedureParamBindings( parameterMetadata, factory );
synchronizedQuerySpaces = new HashSet<>();
resultSetMapping = resolveResultSetMapping( mappingId( procedureName, resultSetMappingNames ), factory );
Util.resolveResultSetMappingNames(
resultSetMappingNames,
resultSetMapping,
synchronizedQuerySpaces::add,
this::getSessionFactory
);
}
/**
* The named/stored copy constructor
*
* @param session The session
* @param memento The named/stored memento
*/
ProcedureCallImpl(SharedSessionContractImplementor session, NamedCallableQueryMemento memento) {
super( session );
procedureName = memento.getCallableName();
final var factory = session.getSessionFactory();
parameterMetadata = new ProcedureParameterMetadataImpl( memento, session );
parameterBindings = new ProcedureParamBindings( parameterMetadata, factory );
synchronizedQuerySpaces = makeCopy( memento.getQuerySpaces() );
resultSetMapping = resolveResultSetMapping( memento.getRegistrationName(), factory );
resolveResultSetMappings(
memento.getResultSetMappingNames(),
memento.getResultSetMappingClasses(),
resultSetMapping,
synchronizedQuerySpaces::add,
this::getSessionFactory
);
applyOptions( memento );
}
/**
* The named/stored copy constructor
*
* @param session The session
* @param memento The named/stored memento
*/
ProcedureCallImpl(
SharedSessionContractImplementor session,
NamedCallableQueryMemento memento,
Class<?>... resultTypes) {
super( session );
procedureName = memento.getCallableName();
final var factory = session.getSessionFactory();
parameterMetadata = new ProcedureParameterMetadataImpl( memento, session );
parameterBindings = new ProcedureParamBindings( parameterMetadata, factory );
synchronizedQuerySpaces = makeCopy( memento.getQuerySpaces() );
resultSetMapping = resolveResultSetMapping( mappingId( procedureName, resultTypes ), factory );
resolveResultSetMappings(
null,
resultTypes,
resultSetMapping,
synchronizedQuerySpaces::add,
this::getSessionFactory
);
applyOptions( memento );
}
public ProcedureCallImpl(
SharedSessionContractImplementor session,
NamedCallableQueryMementoImpl memento,
String... resultSetMappingNames) {
super( session );
procedureName = memento.getCallableName();
final var factory = session.getSessionFactory();
parameterMetadata = new ProcedureParameterMetadataImpl( memento, session );
parameterBindings = new ProcedureParamBindings( parameterMetadata, factory );
synchronizedQuerySpaces = makeCopy( memento.getQuerySpaces() );
resultSetMapping = resolveResultSetMapping( mappingId( procedureName, resultSetMappingNames ), factory );
resolveResultSetMappings(
resultSetMappingNames,
null,
resultSetMapping,
synchronizedQuerySpaces::add,
this::getSessionFactory
);
applyOptions( memento );
}
private void applyCallableFunctionHint() {
final List<Class<?>> resultTypes = new ArrayList<>();
resultSetMapping.visitResultBuilders(
(index, resultBuilder) -> resultTypes.add( resultBuilder.getJavaType() )
);
if ( resultTypes.size() == 1 ) {
final var basicType =
getTypeConfiguration()
.getBasicTypeForJavaType( resultTypes.get(0) );
if ( basicType != null ) {
markAsFunctionCall( basicType );
}
else {
markAsFunctionCallRefRefCursor();
}
}
else {
markAsFunctionCallRefRefCursor();
}
}
@Override
public String getProcedureName() {
return procedureName;
}
@Override
public MutableQueryOptions getQueryOptions() {
return queryOptions;
}
@Override
public ProcedureParameterMetadataImpl getParameterMetadata() {
return parameterMetadata;
}
@Override
public QueryParameterBindings getQueryParameterBindings() {
return parameterBindings;
}
public ParameterStrategy getParameterStrategy() {
return getParameterMetadata().getParameterStrategy();
}
@Override
public boolean isFunctionCall() {
return functionReturn != null;
}
@Override
public FunctionReturnImplementor<R> getFunctionReturn() {
return functionReturn;
}
@Override
public ProcedureCallImplementor<R> markAsFunctionCall(int sqlType) {
functionReturn = new FunctionReturnImpl<>( this, sqlType );
return this;
}
private void markAsFunctionCallRefRefCursor() {
functionReturn = new FunctionReturnImpl<>( this, Types.REF_CURSOR );
}
@Override
public ProcedureCallImpl<R> markAsFunctionCall(Class<?> resultType) {
final var basicType = getTypeConfiguration().getBasicTypeForJavaType( resultType );
if ( basicType == null ) {
throw new IllegalArgumentException( "Could not resolve a BasicType for the java type: " + resultType.getName() );
}
markAsFunctionCall( basicType );
return this;
}
@Override
public ProcedureCall markAsFunctionCall(Type<?> typeReference) {
if ( !(typeReference instanceof OutputableType<?> outputableType) ) {
throw new IllegalArgumentException( "Given type is not an OutputableType: " + typeReference );
}
if ( resultSetMapping.getNumberOfResultBuilders() == 0 ) {
final SqmExpressible<?> expressible = resolveExpressible( typeReference );
// Function returns might not be represented as callable parameters,
// but we still want to convert the result to the requested java type if possible
resultSetMapping.addResultBuilder( new ScalarDomainResultBuilder<>( expressible.getExpressibleJavaType() ) );
}
//noinspection unchecked
functionReturn = new FunctionReturnImpl<>( this, (OutputableType<R>) outputableType );
return this;
}
private void markAsFunctionCall(BasicType<?> basicType) {
if ( resultSetMapping.getNumberOfResultBuilders() == 0 ) {
// Function returns might not be represented as callable parameters,
// but we still want to convert the result to the requested java type if possible
resultSetMapping.addResultBuilder( new ScalarDomainResultBuilder<>( basicType.getExpressibleJavaType() ) );
}
//noinspection unchecked
functionReturn = new FunctionReturnImpl<>( this, (OutputableType<R>) basicType );
}
@Override
public QueryParameterBindings getParameterBindings() {
return parameterBindings;
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Parameter registrations
@Override
public ProcedureCallImplementor<R> registerStoredProcedureParameter(int position, Class<?> type, ParameterMode mode) {
getSession().checkOpen( true );
try {
registerParameter( position, type, mode );
}
catch (HibernateException he) {
throw getExceptionConverter().convert( he );
}
catch (RuntimeException e) {
getSession().markForRollbackOnly();
throw e;
}
return this;
}
@Override
public ProcedureCallImplementor<R> registerStoredProcedureParameter(
String parameterName,
Class<?> type,
ParameterMode mode) {
getSession().checkOpen( true );
try {
registerParameter( parameterName, type, mode );
}
catch (HibernateException he) {
throw getExceptionConverter().convert( he );
}
catch (RuntimeException e) {
getSession().markForRollbackOnly();
throw e;
}
return this;
}
@Override
public ProcedureCallImplementor<R> registerStoredProcedureParameter(
int position,
Type<?> type,
ParameterMode mode) {
getSession().checkOpen( true );
try {
registerParameter( position, type, mode );
}
catch (HibernateException he) {
throw getExceptionConverter().convert( he );
}
catch (RuntimeException e) {
getSession().markForRollbackOnly();
throw e;
}
return this;
}
@Override
public ProcedureCallImplementor<R> registerStoredProcedureParameter(
String parameterName,
Type<?> type,
ParameterMode mode) {
getSession().checkOpen( true );
try {
registerParameter( parameterName, type, mode );
}
catch (HibernateException he) {
throw getExceptionConverter().convert( he );
}
catch (RuntimeException e) {
getSession().markForRollbackOnly();
throw e;
}
return this;
}
@Override
public <T> ProcedureParameter<T> registerParameter(int position, Class<T> javaType, ParameterMode mode) {
final var parameterType = getMappingMetamodel().resolveParameterBindType( javaType );
final var procedureParameter =
new ProcedureParameterImpl<>( position, mode, getExpressibleJavaType( parameterType ), parameterType );
registerParameter( procedureParameter );
return procedureParameter;
}
@Override
public <T> ProcedureParameter<T> registerParameter(
int position,
Type<T> typeReference,
ParameterMode mode) {
final var expressible = resolveExpressible( typeReference );
final var procedureParameter =
new ProcedureParameterImpl<>( position, mode, typeReference.getJavaType(), expressible );
registerParameter( procedureParameter );
return procedureParameter;
}
private <T> SqmBindableType<T> resolveExpressible(Type<T> typeReference) {
return getSessionFactory().getRuntimeMetamodels().resolveExpressible( typeReference );
}
private void registerParameter(ProcedureParameterImplementor<?> parameter) {
getParameterMetadata().registerParameter( parameter );
}
@Override
public ProcedureParameterImplementor<?> getParameterRegistration(int position) {
return getParameterMetadata().getQueryParameter( position );
}
@Override
public <T> ProcedureParameterImplementor<T> registerParameter(String name, Class<T> javaType, ParameterMode mode) {
final var parameterType = getMappingMetamodel().resolveParameterBindType( javaType );
final var parameter =
new ProcedureParameterImpl<>( name, mode, getExpressibleJavaType( parameterType ), parameterType );
registerParameter( parameter );
return parameter;
}
private <T> Class<T> getExpressibleJavaType(Type<T> parameterType) {
if ( parameterType == null ) {
return null;
}
else {
final var sqmExpressible = getNodeBuilder().resolveExpressible( parameterType );
assert sqmExpressible != null;
return sqmExpressible.getExpressibleJavaType().getJavaTypeClass();
}
}
private NodeBuilder getNodeBuilder() {
return getSessionFactory().getQueryEngine().getCriteriaBuilder();
}
@Override
public <T> ProcedureParameterImplementor<T> registerParameter(
String name,
Type<T> typeReference,
ParameterMode mode) {
final var expressible = resolveExpressible( typeReference );
final var parameter =
new ProcedureParameterImpl<>( name, mode, typeReference.getJavaType(), expressible );
registerParameter( parameter );
return parameter;
}
@Override
public ProcedureParameterImplementor<?> getParameterRegistration(String name) {
return getParameterMetadata().getQueryParameter( name );
}
@Override
public List<ProcedureParameter<?>> getRegisteredParameters() {
return unmodifiableList( getParameterMetadata().getRegistrationsAsList() );
}
@Override
public ProcedureOutputs getOutputs() {
if ( outputs == null ) {
outputs = buildOutputs();
}
return outputs;
}
private ProcedureOutputsImpl buildOutputs() {
// todo : going to need a very specialized Loader for this.
// or, might be a good time to look at splitting Loader up into:
// 1) building statement objects
// 2) executing statement objects
// 3) processing result sets
// for now assume there are no resultClasses nor mappings defined
// TOTAL PROOF-OF-CONCEPT!!!!!!
// todo : how to identify calls which should be in the form `{? = call procName...}` ??? (note leading param marker)
// more than likely this will need to be a method on the native API. I can see this as a trigger to
// both: (1) add the `? = ` part and also (2) register a REFCURSOR parameter for DBs (Oracle, PGSQL) that
// need it.
final var jdbcServices = getSession().getJdbcServices();
final var callableStatementSupport =
jdbcServices.getJdbcEnvironment().getDialect()
.getCallableStatementSupport();
final var call = callableStatementSupport.interpretCall( this );
final var parameterRegistrations = collectParameterRegistrations( call );
final var refCursorExtractors = collectRefCursorExtractors( call );
final var jdbcCoordinator = getSession().getJdbcCoordinator();
final String sqlString = call.getSqlString();
final var statement =
jdbcCoordinator.getStatementPreparer()
.prepareCallableStatement( sqlString );
try {
// Register the parameter mode and type
callableStatementSupport.registerParameters(
procedureName,
call,
statement,
parameterMetadata,
getSession()
);
final var jdbcParameterBindings = parameterBindings( parameterRegistrations );
final var executionContext = new OutputsExecutionContext( getSession() );
// Note that this should actually happen in an executor
int paramBindingPosition = call.getFunctionReturn() == null ? 1 : 2;
for ( var parameterBinder : call.getParameterBinders() ) {
parameterBinder.bindParameterValue(
statement,
paramBindingPosition,
jdbcParameterBindings,
executionContext
);
paramBindingPosition++;
}
}
catch (SQLException e) {
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( statement );
jdbcCoordinator.afterStatementExecution();
throw jdbcServices.getSqlExceptionHelper().convert(
e,
"Error registering CallableStatement parameters",
procedureName
);
}
return new ProcedureOutputsImpl(
this,
parameterRegistrations,
refCursorExtractors.toArray( new JdbcCallRefCursorExtractor[0] ),
statement,
sqlString
);
}
private Map<ProcedureParameter<?>, JdbcCallParameterRegistration> collectParameterRegistrations(JdbcOperationQueryCall call) {
final Map<ProcedureParameter<?>, JdbcCallParameterRegistration> parameterRegistrations = new IdentityHashMap<>();
if ( call.getFunctionReturn() != null ) {
parameterRegistrations.put( functionReturn, call.getFunctionReturn() );
}
final var registrations = getParameterMetadata().getRegistrationsAsList();
final var jdbcParameters = call.getParameterRegistrations();
for ( int i = 0; i < registrations.size(); i++ ) {
final var jdbcCallParameterRegistration = jdbcParameters.get( i );
parameterRegistrations.put( registrations.get( i ), jdbcCallParameterRegistration );
}
return parameterRegistrations;
}
private List<JdbcCallRefCursorExtractor> collectRefCursorExtractors(JdbcOperationQueryCall call) {
final List<JdbcCallRefCursorExtractor> refCursorExtractors = new ArrayList<>();
if ( call.getFunctionReturn() != null ) {
final var refCursorExtractor = call.getFunctionReturn().getRefCursorExtractor();
if ( refCursorExtractor != null ) {
refCursorExtractors.add( refCursorExtractor );
}
}
final var registrations = getParameterMetadata().getRegistrationsAsList();
final var jdbcParameters = call.getParameterRegistrations();
for ( int i = 0; i < registrations.size(); i++ ) {
final var jdbcCallParameterRegistration = jdbcParameters.get( i );
final var refCursorExtractor = jdbcCallParameterRegistration.getRefCursorExtractor();
if ( refCursorExtractor != null ) {
refCursorExtractors.add( refCursorExtractor );
}
}
return refCursorExtractors;
}
private JdbcParameterBindings parameterBindings(
Map<ProcedureParameter<?>, JdbcCallParameterRegistration> parameterRegistrations) {
final JdbcParameterBindings jdbcParameterBindings =
new JdbcParameterBindingsImpl( parameterRegistrations.size() );
for ( var entry : parameterRegistrations.entrySet() ) {
final var registration = entry.getValue();
final var parameterBinder = registration.getParameterBinder();
if ( parameterBinder != null ) {
final var parameter = entry.getKey();
final var binding = getParameterBindings().getBinding( parameter );
if ( !binding.isBound() ) {
if ( parameter.getPosition() == null ) {
throw new IllegalArgumentException( "The parameter named [" + parameter + "] was not set! You need to call the setParameter method." );
}
else {
throw new IllegalArgumentException( "The parameter at position [" + parameter + "] was not set! You need to call the setParameter method." );
}
}
final var parameterType = (JdbcMapping) registration.getParameterType();
jdbcParameterBindings.addBinding(
(JdbcParameter) parameterBinder,
new JdbcParameterBindingImpl(
parameterType,
parameterType.convertToRelationalValue( binding.getBindValue() )
)
);
}
}
return jdbcParameterBindings;
}
@Override
public String getQueryString() {
return null;
}
/**
* Use this form instead of {@link #getSynchronizedQuerySpaces()} when you want to make sure the
* underlying Set is instantiated (aka, on add)
*
* @return The spaces
*/
protected Set<String> synchronizedQuerySpaces() {
if ( synchronizedQuerySpaces == null ) {
synchronizedQuerySpaces = new HashSet<>();
}
return synchronizedQuerySpaces;
}
@Override
public Set<String> getSynchronizedQuerySpaces() {
return synchronizedQuerySpaces == null ? emptySet() : unmodifiableSet( synchronizedQuerySpaces );
}
@Override
public ProcedureCallImplementor<R> addSynchronizedQuerySpace(String querySpace) {
synchronizedQuerySpaces().add( querySpace );
return this;
}
@Override
public ProcedureCallImplementor<R> addSynchronizedEntityName(String entityName) {
final var entityDescriptor = getMappingMetamodel().getEntityDescriptor( entityName );
addSynchronizedQuerySpaces( entityDescriptor );
return this;
}
protected void addSynchronizedQuerySpaces(EntityPersister persister) {
synchronizedQuerySpaces().addAll( asList( (String[]) persister.getQuerySpaces() ) );
}
@Override
public ProcedureCallImplementor<R> addSynchronizedEntityClass(@SuppressWarnings("rawtypes") Class entityClass) {
final var entityDescriptor = getMappingMetamodel().getEntityDescriptor( entityClass );
addSynchronizedQuerySpaces( entityDescriptor );
return this;
}
@Override
public NamedCallableQueryMemento toMemento(String name) {
return new NamedCallableQueryMementoImpl(
name,
procedureName,
getParameterStrategy(),
toParameterMementos( parameterMetadata ),
// todo (6.0) : result-set-mapping names
null,
// todo (6.0) : result-set-mapping
|
ProcedureCallImpl
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/domain/JpaSort.java
|
{
"start": 1595,
"end": 7919
}
|
class ____ extends Sort {
@Serial private static final long serialVersionUID = 1L;
private JpaSort(Direction direction, List<Path<?, ?>> paths) {
this(Collections.<Order> emptyList(), direction, paths);
}
private JpaSort(List<Order> orders, @Nullable Direction direction, List<Path<?, ?>> paths) {
super(combine(orders, direction, paths));
}
private JpaSort(List<Order> orders) {
super(orders);
}
/**
* Creates a new {@link JpaSort} for the given attributes with the default sort direction.
*
* @param attributes must not be {@literal null} or empty.
*/
public static JpaSort of(Attribute<?, ?>... attributes) {
return new JpaSort(DEFAULT_DIRECTION, Arrays.asList(paths(attributes)));
}
/**
* Creates a new {@link JpaSort} instance with the given {@link Path}s.
*
* @param paths must not be {@literal null} or empty.
*/
public static JpaSort of(JpaSort.Path<?, ?>... paths) {
return new JpaSort(DEFAULT_DIRECTION, Arrays.asList(paths));
}
/**
* Creates a new {@link JpaSort} for the given direction and attributes.
*
* @param direction the sorting direction.
* @param attributes must not be {@literal null} or empty.
*/
public static JpaSort of(Direction direction, Attribute<?, ?>... attributes) {
return new JpaSort(direction, Arrays.asList(paths(attributes)));
}
/**
* Creates a new {@link JpaSort} for the given direction and {@link Path}s.
*
* @param direction the sorting direction.
* @param paths must not be {@literal null} or empty.
*/
public static JpaSort of(Direction direction, Path<?, ?>... paths) {
return new JpaSort(direction, Arrays.asList(paths));
}
/**
* Returns a new {@link JpaSort} with the given sorting criteria added to the current one.
*
* @param direction can be {@literal null}.
* @param attributes must not be {@literal null}.
* @return
*/
@Contract("_, _ -> new")
@CheckReturnValue
public JpaSort and(@Nullable Direction direction, Attribute<?, ?>... attributes) {
Assert.notNull(attributes, "Attributes must not be null");
return and(direction, paths(attributes));
}
/**
* Returns a new {@link JpaSort} with the given sorting criteria added to the current one.
*
* @param direction can be {@literal null}.
* @param paths must not be {@literal null}.
* @return
*/
@Contract("_, _ -> new")
@CheckReturnValue
public JpaSort and(@Nullable Direction direction, Path<?, ?>... paths) {
Assert.notNull(paths, "Paths must not be null");
List<Order> existing = new ArrayList<>();
for (Order order : this) {
existing.add(order);
}
return new JpaSort(existing, direction, Arrays.asList(paths));
}
/**
* Returns a new {@link JpaSort} with the given sorting criteria added to the current one.
*
* @param direction can be {@literal null}.
* @param properties must not be {@literal null} or empty.
* @return
*/
@Contract("_, _ -> new")
@CheckReturnValue
public JpaSort andUnsafe(@Nullable Direction direction, String... properties) {
Assert.notEmpty(properties, "Properties must not be empty");
List<Order> orders = new ArrayList<>();
for (Order order : this) {
orders.add(order);
}
for (String property : properties) {
orders.add(new JpaOrder(direction, property));
}
return new JpaSort(orders, direction, Collections.<Path<?, ?>> emptyList());
}
/**
* Turns the given {@link Attribute}s into {@link Path}s.
*
* @param attributes must not be {@literal null} or empty.
* @return
*/
private static Path<?, ?>[] paths(Attribute<?, ?>[] attributes) {
Assert.notNull(attributes, "Attributes must not be null");
Assert.notEmpty(attributes, "Attributes must not be empty");
Path<?, ?>[] paths = new Path[attributes.length];
for (int i = 0; i < attributes.length; i++) {
paths[i] = path(attributes[i]);
}
return paths;
}
private static List<Order> combine(List<Order> orders, @Nullable Direction direction, List<Path<?, ?>> paths) {
List<Order> result = new ArrayList<>(orders);
for (Path<?, ?> path : paths) {
result.add(new Order(direction, path.toString()));
}
return result;
}
/**
* Creates a new {@link Path} for the given {@link Attribute}.
*
* @param attribute must not be {@literal null}.
* @return
*/
public static <A extends Attribute<T, S>, T, S> Path<T, S> path(A attribute) {
Assert.notNull(attribute, "Attribute must not be null");
return new Path<>(Collections.singletonList(attribute));
}
/**
* Creates a new {@link Path} for the given {@link PluralAttribute}.
*
* @param attribute must not be {@literal null}.
* @return
*/
public static <P extends PluralAttribute<T, ?, S>, T, S> Path<T, S> path(P attribute) {
Assert.notNull(attribute, "Attribute must not be null");
return new Path<>(Collections.singletonList(attribute));
}
/**
* Creates new unsafe {@link JpaSort} based on given properties.
*
* @param properties must not be {@literal null} or empty.
* @return
*/
public static JpaSort unsafe(String... properties) {
return unsafe(Sort.DEFAULT_DIRECTION, properties);
}
/**
* Creates new unsafe {@link JpaSort} based on given {@link Direction} and properties.
*
* @param direction must not be {@literal null}.
* @param properties must not be {@literal null} or empty.
* @return
*/
public static JpaSort unsafe(Direction direction, String... properties) {
Assert.notNull(direction, "Direction must not be null");
Assert.notEmpty(properties, "Properties must not be empty");
Assert.noNullElements(properties, "Properties must not contain null values");
return unsafe(direction, Arrays.asList(properties));
}
/**
* Creates new unsafe {@link JpaSort} based on given {@link Direction} and properties.
*
* @param direction must not be {@literal null}.
* @param properties must not be {@literal null} or empty.
* @return
*/
public static JpaSort unsafe(Direction direction, List<String> properties) {
Assert.notEmpty(properties, "Properties must not be empty");
List<Order> orders = new ArrayList<>(properties.size());
for (String property : properties) {
orders.add(new JpaOrder(direction, property));
}
return new JpaSort(orders);
}
/**
* Value object to abstract a collection of {@link Attribute}s.
*
* @author Oliver Gierke
*/
public static
|
JpaSort
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/test/lombok/BarCommand.java
|
{
"start": 305,
"end": 401
}
|
class ____ {
private String foo;
private String bar;
private String baz;
}
|
BarCommand
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/association/IncidentStatus.java
|
{
"start": 298,
"end": 698
}
|
class ____ {
@Id
String id;
@OneToOne(mappedBy = "incidentStatus")
Incident incident;
public IncidentStatus() {
}
public IncidentStatus(String id) {
this.id = id;
}
public Incident getIncident() {
return incident;
}
public void setIncident(Incident incident) {
this.incident = incident;
}
@Override
public String toString() {
return "IncidentStatus " + id;
}
}
|
IncidentStatus
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/security/SecurityAnnotationCombinationsPanacheRepositoryResourceTest.java
|
{
"start": 1478,
"end": 1693
}
|
interface ____ extends PanacheRepositoryResource<PiecesRepository, Piece, Long> {
@RolesAllowed("admin")
boolean delete(Long id);
}
@ApplicationScoped
public static
|
PiecesRepositoryResource
|
java
|
quarkusio__quarkus
|
integration-tests/mongodb-panache/src/main/java/io/quarkus/it/mongodb/panache/book/BookEntity.java
|
{
"start": 550,
"end": 2058
}
|
class ____ extends PanacheMongoEntity {
@BsonProperty("bookTitle")
private String title;
private String author;
@BsonIgnore
private String transientDescription;
@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd")
private LocalDate creationDate;
private List<String> categories = new ArrayList<>();
private BookDetail details;
public String getTitle() {
return title;
}
public BookEntity setTitle(String title) {
this.title = title;
return this;
}
public String getAuthor() {
return author;
}
public BookEntity setAuthor(String author) {
this.author = author;
return this;
}
public List<String> getCategories() {
return categories;
}
public BookEntity setCategories(List<String> categories) {
this.categories = categories;
return this;
}
public BookDetail getDetails() {
return details;
}
public BookEntity setDetails(BookDetail details) {
this.details = details;
return this;
}
public String getTransientDescription() {
return transientDescription;
}
public void setTransientDescription(String transientDescription) {
this.transientDescription = transientDescription;
}
public LocalDate getCreationDate() {
return creationDate;
}
public void setCreationDate(LocalDate creationDate) {
this.creationDate = creationDate;
}
}
|
BookEntity
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/map/MapKeyConversionTest.java
|
{
"start": 1456,
"end": 1799
}
|
class ____ {
@Id
private Integer id;
@ElementCollection(fetch = FetchType.EAGER)
@CollectionTable( name = "cust_color", joinColumns = @JoinColumn( name = "cust_fk" ) )
private Map<ColorType, String> colors = new HashMap<ColorType, String>();
public Customer() {
}
public Customer(Integer id) {
this.id = id;
}
}
}
|
Customer
|
java
|
dropwizard__dropwizard
|
dropwizard-logging/src/main/java/io/dropwizard/logging/common/socket/DropwizardSocketAppender.java
|
{
"start": 408,
"end": 1486
}
|
class ____<E extends DeferredProcessingAware> extends OutputStreamAppender<E> {
private final String host;
private final int port;
private final int connectionTimeoutMs;
private final int sendBufferSize;
private final SocketFactory socketFactory;
public DropwizardSocketAppender(String host, int port, int connectionTimeoutMs, int sendBufferSize,
SocketFactory socketFactory) {
this.host = host;
this.port = port;
this.connectionTimeoutMs = connectionTimeoutMs;
this.sendBufferSize = sendBufferSize;
this.socketFactory = socketFactory;
}
@Override
public void start() {
setOutputStream(socketOutputStream());
super.start();
}
protected OutputStream socketOutputStream() {
final ResilientSocketOutputStream outputStream = new ResilientSocketOutputStream(host, port,
connectionTimeoutMs, sendBufferSize, socketFactory);
outputStream.setContext(context);
return outputStream;
}
}
|
DropwizardSocketAppender
|
java
|
apache__dubbo
|
dubbo-compatible/src/main/java/com/alibaba/dubbo/config/MethodConfig.java
|
{
"start": 856,
"end": 1240
}
|
class ____ extends org.apache.dubbo.config.MethodConfig {
public void addArgument(com.alibaba.dubbo.config.ArgumentConfig argumentConfig) {
super.addArgument(argumentConfig);
}
public void setMock(Boolean mock) {
if (mock == null) {
setMock((String) null);
} else {
setMock(String.valueOf(mock));
}
}
}
|
MethodConfig
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/collect/CollectStreamSink.java
|
{
"start": 1315,
"end": 1633
}
|
class ____<T> extends DataStreamSink<T> {
public CollectStreamSink(DataStream<T> inputStream, CollectSinkOperatorFactory<T> factory) {
super(
new LegacySinkTransformation<T>(
inputStream.getTransformation(), "Collect Stream Sink", factory, 1));
}
}
|
CollectStreamSink
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/formula/ManyToManyNotIgnoreLazyFetchingTest.java
|
{
"start": 1235,
"end": 3216
}
|
class ____ {
@BeforeAll
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
StockCode code = new StockCode();
code.setId( 1L );
code.setCopeType( CodeType.TYPE_A );
code.setRefNumber( "ABC" );
entityManager.persist( code );
Stock stock1 = new Stock();
stock1.setId( 1L );
stock1.getCodes().add( code );
entityManager.persist( stock1 );
Stock stock2 = new Stock();
stock2.setId( 2L );
entityManager.persist( stock2 );
entityManager.flush();
entityManager.remove( code );
stock1.getCodes().remove( code );
} );
}
@Test
public void testLazyLoading(EntityManagerFactoryScope scope) {
List<Stock> stocks = scope.fromTransaction(
entityManager -> {
List<Stock> list = entityManager.createQuery( "select s from Stock s order by id", Stock.class )
.getResultList();
for ( Stock s : list ) {
assertThat( Hibernate.isInitialized( s.getCodes() ) ).isFalse();
Hibernate.initialize( s.getCodes() );
}
return list;
} );
assertThat( stocks ).hasSize( 2 );
final Stock firstStock = stocks.get( 0 );
final Stock secondStock = stocks.get( 1 );
assertThat( firstStock.getCodes() ).hasSize( 0 );
assertThat( secondStock.getCodes() ).hasSize( 0 );
}
@Test
public void testEagerLoading(EntityManagerFactoryScope scope) {
List<Stock> stocks = scope.fromTransaction(
entityManager -> entityManager
.createQuery( "select s from Stock s left join fetch s.codes order by s.id", Stock.class )
.getResultList()
);
assertThat( stocks ).hasSize( 2 );
for ( Stock s : stocks ) {
assertThat( Hibernate.isInitialized( s.getCodes() ) ).isTrue();
}
final Stock firstStock = stocks.get( 0 );
final Stock secondStock = stocks.get( 1 );
assertThat( firstStock.getCodes() ).hasSize( 0 );
assertThat( secondStock.getCodes() ).hasSize( 0 );
}
@Entity(name = "Stock")
public static
|
ManyToManyNotIgnoreLazyFetchingTest
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/Constants.java
|
{
"start": 4812,
"end": 5353
}
|
class ____ be found or {@code false} otherwise.
*/
private static boolean isClassAvailable(final String className) {
try {
return LoaderUtil.loadClass(className) != null;
} catch (final Throwable e) {
return false;
}
}
/**
* The empty array.
* @since 2.15.0
*/
public static final Object[] EMPTY_OBJECT_ARRAY = {};
/**
* The empty array.
* @since 2.15.0
*/
public static final byte[] EMPTY_BYTE_ARRAY = {};
/**
* Prevent
|
could
|
java
|
elastic__elasticsearch
|
libs/geo/src/main/java/org/elasticsearch/geometry/Circle.java
|
{
"start": 742,
"end": 3415
}
|
class ____ implements Geometry {
public static final Circle EMPTY = new Circle();
private final double y;
private final double x;
private final double z;
private final double radiusMeters;
private Circle() {
y = 0;
x = 0;
z = Double.NaN;
radiusMeters = -1;
}
public Circle(final double x, final double y, final double radiusMeters) {
this(x, y, Double.NaN, radiusMeters);
}
public Circle(final double x, final double y, final double z, final double radiusMeters) {
this.y = y;
this.x = x;
this.radiusMeters = radiusMeters;
this.z = z;
if (radiusMeters < 0) {
throw new IllegalArgumentException("Circle radius [" + radiusMeters + "] cannot be negative");
}
}
@Override
public ShapeType type() {
return ShapeType.CIRCLE;
}
public double getY() {
return y;
}
public double getX() {
return x;
}
public double getRadiusMeters() {
return radiusMeters;
}
public double getZ() {
return z;
}
public double getLat() {
return y;
}
public double getLon() {
return x;
}
public double getAlt() {
return z;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Circle circle = (Circle) o;
if (Double.compare(circle.y, y) != 0) return false;
if (Double.compare(circle.x, x) != 0) return false;
if (Double.compare(circle.radiusMeters, radiusMeters) != 0) return false;
return (Double.compare(circle.z, z) == 0);
}
@Override
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(y);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(x);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(radiusMeters);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(z);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public <T, E extends Exception> T visit(GeometryVisitor<T, E> visitor) throws E {
return visitor.visit(this);
}
@Override
public boolean isEmpty() {
return radiusMeters < 0;
}
@Override
public String toString() {
return WellKnownText.toWKT(this);
}
@Override
public boolean hasZ() {
return Double.isNaN(z) == false;
}
}
|
Circle
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/actuate/GatewayControllerEndpointRedisRefreshTest.java
|
{
"start": 2418,
"end": 5241
}
|
class ____ {
@Container
public static GenericContainer redis = new GenericContainer<>("redis:5.0.14-alpine").withExposedPorts(6379);
@BeforeAll
public static void startRedisContainer() {
redis.start();
}
@DynamicPropertySource
static void containerProperties(DynamicPropertyRegistry registry) {
registry.add("spring.data.redis.host", redis::getHost);
registry.add("spring.data.redis.port", redis::getFirstMappedPort);
}
@Autowired
WebTestClient testClient;
@LocalServerPort
int port;
@Test
public void testCorsConfigurationAfterReload() {
Map<String, Object> cors = new HashMap<>();
cors.put("allowCredentials", false);
cors.put("allowedOrigins", "*");
cors.put("allowedMethods", "GET");
createOrUpdateRouteWithCors(cors);
Awaitility.await().atMost(Duration.ofSeconds(3)).untilAsserted(() -> assertRouteHasCorsConfig(cors));
Awaitility.await().atMost(Duration.ofSeconds(3)).untilAsserted(() -> assertPreflightAllowOrigin("*"));
cors.put("allowedOrigins", "http://example.org");
createOrUpdateRouteWithCors(cors);
Awaitility.await().atMost(Duration.ofSeconds(3)).untilAsserted(() -> assertRouteHasCorsConfig(cors));
Awaitility.await()
.atMost(Duration.ofSeconds(3))
.untilAsserted(() -> assertPreflightAllowOrigin("http://example.org"));
}
void createOrUpdateRouteWithCors(Map<String, Object> cors) {
RouteDefinition testRouteDefinition = new RouteDefinition();
testRouteDefinition.setUri(URI.create("http://example.org"));
PredicateDefinition methodRoutePredicateDefinition = new PredicateDefinition("Method=GET");
testRouteDefinition.setPredicates(List.of(methodRoutePredicateDefinition));
testRouteDefinition.setMetadata(Map.of("cors", cors));
testClient.post()
.uri("http://localhost:" + port + "/actuator/gateway/routes/cors-test-route")
.accept(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromValue(testRouteDefinition))
.exchange()
.expectStatus()
.isCreated();
testClient.post()
.uri("http://localhost:" + port + "/actuator/gateway/refresh")
.exchange()
.expectStatus()
.isOk();
}
void assertRouteHasCorsConfig(Map<String, Object> cors) {
testClient.get()
.uri("http://localhost:" + port + "/actuator/gateway/routes/cors-test-route")
.exchange()
.expectStatus()
.isOk()
.expectBody()
.jsonPath("$.metadata")
.value(map -> assertThat((Map<String, Object>) map).hasSize(1).containsEntry("cors", cors));
}
void assertPreflightAllowOrigin(String origin) {
testClient.options()
.uri("http://localhost:" + port + "/")
.header("Origin", "http://example.org")
.header("Access-Control-Request-Method", "GET")
.exchange()
.expectStatus()
.isOk()
.expectHeader()
.valueEquals("Access-Control-Allow-Origin", origin);
}
}
|
GatewayControllerEndpointRedisRefreshTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java
|
{
"start": 1507,
"end": 1658
}
|
class ____ {@link CommandLineHttpClient} For extensive tests related to
* ssl settings can be found {@link SSLConfigurationSettingsTests}
*/
public
|
tests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/CollectionMapWithComponentValueTest.java
|
{
"start": 9644,
"end": 9805
}
|
class ____ {
@Column(name = "val")
Integer value;
EmbeddableValue() {
}
EmbeddableValue(Integer value) {
this.value = value;
}
}
}
|
EmbeddableValue
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/seda/SedaInOnlyTest.java
|
{
"start": 978,
"end": 1571
}
|
class ____ extends ContextTestSupport {
@Test
public void testInOnly() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("seda:foo");
from("seda:foo").to("mock:result");
}
};
}
}
|
SedaInOnlyTest
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/listener/endpoint/JmsActivationSpecFactory.java
|
{
"start": 1389,
"end": 1883
}
|
interface ____ {
/**
* Create a JCA 1.5 ActivationSpec object based on the given
* {@link JmsActivationSpecConfig} object.
* @param adapter the ResourceAdapter to create an ActivationSpec object for
* @param config the configured object holding common JMS settings
* @return the provider-specific JCA ActivationSpec object,
* representing the same settings
*/
ActivationSpec createActivationSpec(ResourceAdapter adapter, JmsActivationSpecConfig config);
}
|
JmsActivationSpecFactory
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/datagram/impl/DatagramSocketImpl.java
|
{
"start": 12253,
"end": 13856
}
|
class ____ extends VertxConnection {
public Connection(ContextInternal context, ChannelHandlerContext channel) {
super(context, channel);
}
@Override
public NetworkMetrics metrics() {
return metrics;
}
@Override
protected boolean handleException(Throwable t) {
super.handleException(t);
Handler<Throwable> handler;
synchronized (DatagramSocketImpl.this) {
handler = exceptionHandler;
}
if (handler != null) {
handler.handle(t);
}
return true;
}
@Override
protected void handleClosed() {
super.handleClosed();
DatagramSocketMetrics metrics;
synchronized (DatagramSocketImpl.this) {
metrics = DatagramSocketImpl.this.metrics;
}
if (metrics != null) {
metrics.close();
}
}
public void handleMessage(Object msg) {
if (msg instanceof DatagramPacket) {
DatagramPacket packet = (DatagramPacket) msg;
ByteBuf content = packet.content();
Buffer buffer = BufferInternal.safeBuffer(content);
handlePacket(new DatagramPacketImpl(packet.sender(), buffer));
}
}
void handlePacket(io.vertx.core.datagram.DatagramPacket packet) {
Handler<io.vertx.core.datagram.DatagramPacket> handler;
synchronized (DatagramSocketImpl.this) {
if (metrics != null) {
metrics.bytesRead(null, packet.sender(), packet.data().length());
}
handler = packetHandler;
}
if (handler != null) {
context.emit(packet, handler);
}
}
}
}
|
Connection
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java
|
{
"start": 2602,
"end": 19474
}
|
class ____ extends ESTestCase {
private static DfsSearchResult newSearchResult(int shardIndex, ShardSearchContextId contextId, SearchShardTarget target) {
DfsSearchResult result = new DfsSearchResult(contextId, target, null);
result.setShardIndex(shardIndex);
result.setShardSearchRequest(new ShardSearchRequest(target.getShardId(), System.currentTimeMillis(), AliasFilter.EMPTY));
return result;
}
public void testDfsWith2Shards() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(
0,
newSearchResult(0, new ShardSearchContextId("", 1), new SearchShardTarget("node1", new ShardId("test", "na", 0), null))
);
results.set(
1,
newSearchResult(1, new ShardSearchContextId("", 2), new SearchShardTarget("node2", new ShardId("test", "na", 0), null))
);
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchTransportService searchTransportService = new SearchTransportService(null, null, null) {
@Override
public void sendExecuteQuery(
Transport.Connection connection,
QuerySearchRequest request,
SearchTask task,
ActionListener<QuerySearchResult> listener
) {
if (request.contextId().getId() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(
new ShardSearchContextId("", 123),
new SearchShardTarget("node1", new ShardId("test", "na", 0), null),
null
);
try {
queryResult.topDocs(
new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }),
2.0F
),
new DocValueFormat[0]
);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} finally {
queryResult.decRef();
}
} else if (request.contextId().getId() == 2) {
QuerySearchResult queryResult = new QuerySearchResult(
new ShardSearchContextId("", 123),
new SearchShardTarget("node2", new ShardId("test", "na", 0), null),
null
);
try {
queryResult.topDocs(
new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(84, 2.0F) }),
2.0F
),
new DocValueFormat[0]
);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} finally {
queryResult.decRef();
}
} else {
fail("no such request ID: " + request.contextId());
}
}
};
SearchPhaseController searchPhaseController = searchPhaseController();
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
try (
SearchPhaseResults<SearchPhaseResult> consumer = searchPhaseController.newSearchPhaseResults(
EsExecutors.DIRECT_EXECUTOR_SERVICE,
new NoopCircuitBreaker(CircuitBreaker.REQUEST),
() -> false,
SearchProgressListener.NOOP,
mockSearchPhaseContext.getRequest(),
results.length(),
exc -> {}
)
) {
DfsQueryPhase phase = makeDfsPhase(results, consumer, mockSearchPhaseContext, responseRef);
assertEquals("dfs_query", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
assertNotNull(responseRef.get());
assertNotNull(responseRef.get().get(0));
assertNull(responseRef.get().get(0).fetchResult());
assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value());
assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc);
assertNotNull(responseRef.get().get(1));
assertNull(responseRef.get().get(1).fetchResult());
assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value());
assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc);
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
assertEquals(2, mockSearchPhaseContext.numSuccess.get());
mockSearchPhaseContext.results.close();
}
}
public void testDfsWith1ShardFailed() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(
0,
newSearchResult(0, new ShardSearchContextId("", 1), new SearchShardTarget("node1", new ShardId("test", "na", 0), null))
);
results.set(
1,
newSearchResult(1, new ShardSearchContextId("", 2), new SearchShardTarget("node2", new ShardId("test", "na", 0), null))
);
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchTransportService searchTransportService = new SearchTransportService(null, null, null) {
@Override
public void sendExecuteQuery(
Transport.Connection connection,
QuerySearchRequest request,
SearchTask task,
ActionListener<QuerySearchResult> listener
) {
if (request.contextId().getId() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(
new ShardSearchContextId("", 123),
new SearchShardTarget("node1", new ShardId("test", "na", 0), null),
null
);
try {
queryResult.topDocs(
new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }),
2.0F
),
new DocValueFormat[0]
);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} finally {
queryResult.decRef();
}
} else if (request.contextId().getId() == 2) {
listener.onFailure(new MockDirectoryWrapper.FakeIOException());
} else {
fail("no such request ID: " + request.contextId());
}
}
};
SearchPhaseController searchPhaseController = searchPhaseController();
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
try (
SearchPhaseResults<SearchPhaseResult> consumer = searchPhaseController.newSearchPhaseResults(
EsExecutors.DIRECT_EXECUTOR_SERVICE,
new NoopCircuitBreaker(CircuitBreaker.REQUEST),
() -> false,
SearchProgressListener.NOOP,
mockSearchPhaseContext.getRequest(),
results.length(),
exc -> {}
)
) {
DfsQueryPhase phase = makeDfsPhase(results, consumer, mockSearchPhaseContext, responseRef);
assertEquals("dfs_query", phase.getName());
phase.run();
mockSearchPhaseContext.assertNoFailure();
assertNotNull(responseRef.get());
assertNotNull(responseRef.get().get(0));
assertNull(responseRef.get().get(0).fetchResult());
assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value());
assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc);
assertNull(responseRef.get().get(1));
assertEquals(1, mockSearchPhaseContext.numSuccess.get());
assertEquals(1, mockSearchPhaseContext.failures.size());
assertTrue(mockSearchPhaseContext.failures.get(0).getCause() instanceof MockDirectoryWrapper.FakeIOException);
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(new ShardSearchContextId("", 2L)));
assertNull(responseRef.get().get(1));
mockSearchPhaseContext.results.close();
}
}
public void testFailPhaseOnException() throws IOException {
AtomicArray<DfsSearchResult> results = new AtomicArray<>(2);
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef = new AtomicReference<>();
results.set(
0,
newSearchResult(0, new ShardSearchContextId("", 1), new SearchShardTarget("node1", new ShardId("test", "na", 0), null))
);
results.set(
1,
newSearchResult(1, new ShardSearchContextId("", 2), new SearchShardTarget("node2", new ShardId("test", "na", 0), null))
);
results.get(0).termsStatistics(new Term[0], new TermStatistics[0]);
results.get(1).termsStatistics(new Term[0], new TermStatistics[0]);
SearchTransportService searchTransportService = new SearchTransportService(null, null, null) {
@Override
public void sendExecuteQuery(
Transport.Connection connection,
QuerySearchRequest request,
SearchTask task,
ActionListener<QuerySearchResult> listener
) {
if (request.contextId().getId() == 1) {
QuerySearchResult queryResult = new QuerySearchResult(
new ShardSearchContextId("", 123),
new SearchShardTarget("node1", new ShardId("test", "na", 0), null),
null
);
try {
queryResult.topDocs(
new TopDocsAndMaxScore(
new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] { new ScoreDoc(42, 1.0F) }),
2.0F
),
new DocValueFormat[0]
);
queryResult.size(2); // the size of the result set
listener.onResponse(queryResult);
} finally {
queryResult.decRef();
}
} else if (request.contextId().getId() == 2) {
listener.onFailure(new UncheckedIOException(new MockDirectoryWrapper.FakeIOException()));
} else {
fail("no such request ID: " + request.contextId());
}
}
};
SearchPhaseController searchPhaseController = searchPhaseController();
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
mockSearchPhaseContext.searchTransport = searchTransportService;
try (
SearchPhaseResults<SearchPhaseResult> consumer = searchPhaseController.newSearchPhaseResults(
EsExecutors.DIRECT_EXECUTOR_SERVICE,
new NoopCircuitBreaker(CircuitBreaker.REQUEST),
() -> false,
SearchProgressListener.NOOP,
mockSearchPhaseContext.getRequest(),
results.length(),
exc -> {}
)
) {
DfsQueryPhase phase = makeDfsPhase(results, consumer, mockSearchPhaseContext, responseRef);
assertEquals("dfs_query", phase.getName());
phase.run();
assertThat(mockSearchPhaseContext.failures, hasSize(1));
assertThat(mockSearchPhaseContext.failures.get(0).getCause(), instanceOf(UncheckedIOException.class));
assertThat(mockSearchPhaseContext.releasedSearchContexts, hasSize(1)); // phase execution will clean up on the contexts
mockSearchPhaseContext.results.close();
}
}
private static DfsQueryPhase makeDfsPhase(
AtomicArray<DfsSearchResult> results,
SearchPhaseResults<SearchPhaseResult> consumer,
MockSearchPhaseContext mockSearchPhaseContext,
AtomicReference<AtomicArray<SearchPhaseResult>> responseRef
) {
int shards = mockSearchPhaseContext.numShards;
for (int i = 0; i < shards; i++) {
mockSearchPhaseContext.results.getAtomicArray().set(i, results.get(i));
}
return new DfsQueryPhase(consumer, null, mockSearchPhaseContext) {
@Override
protected SearchPhase nextPhase(AggregatedDfs dfs) {
return new SearchPhase("test") {
@Override
public void run() {
responseRef.set(((QueryPhaseResultConsumer) consumer).results);
}
};
}
};
}
public void testRewriteShardSearchRequestWithRank() {
List<DfsKnnResults> dkrs = List.of(
new DfsKnnResults(null, new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1), new ScoreDoc(7, 0.1f, 2) }),
new DfsKnnResults(
null,
new ScoreDoc[] { new ScoreDoc(2, 1.75f, 2), new ScoreDoc(1, 2.0f, 1), new ScoreDoc(3, 0.25f, 2), new ScoreDoc(6, 2.5f, 2) }
)
);
MockSearchPhaseContext mspc = new MockSearchPhaseContext(2);
mspc.searchTransport = new SearchTransportService(null, null, null);
DfsQueryPhase dqp = new DfsQueryPhase(mock(QueryPhaseResultConsumer.class), null, mspc);
QueryBuilder bm25 = new TermQueryBuilder("field", "term");
SearchSourceBuilder ssb = new SearchSourceBuilder().query(bm25)
.knnSearch(
List.of(
new KnnSearchBuilder("vector", new float[] { 0.0f }, 10, 100, 10f, null, null),
new KnnSearchBuilder("vector2", new float[] { 0.0f }, 10, 100, 10f, null, null)
)
)
.rankBuilder(new TestRankBuilder(100));
SearchRequest sr = new SearchRequest().allowPartialSearchResults(true).source(ssb);
ShardSearchRequest ssr = new ShardSearchRequest(null, sr, new ShardId("test", "testuuid", 1), 1, 1, null, 1.0f, 0, null);
dqp.rewriteShardSearchRequest(dkrs, ssr);
KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder(
new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) },
"vector",
VectorData.fromFloats(new float[] { 0.0f }),
null,
List.of()
);
KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder(
new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) },
"vector2",
VectorData.fromFloats(new float[] { 0.0f }),
null,
List.of()
);
assertEquals(
List.of(bm25, ksdqb0, ksdqb1),
List.of(
ssr.source().subSearches().get(0).getQueryBuilder(),
ssr.source().subSearches().get(1).getQueryBuilder(),
ssr.source().subSearches().get(2).getQueryBuilder()
)
);
mspc.results.close();
}
private SearchPhaseController searchPhaseController() {
return new SearchPhaseController((task, request) -> InternalAggregationTestCase.emptyReduceContextBuilder());
}
}
|
DfsQueryPhaseTests
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/ResourceLeakException.java
|
{
"start": 779,
"end": 1974
}
|
class ____ extends RuntimeException {
private static final long serialVersionUID = 7186453858343358280L;
private final StackTraceElement[] cachedStackTrace;
public ResourceLeakException() {
cachedStackTrace = getStackTrace();
}
public ResourceLeakException(String message) {
super(message);
cachedStackTrace = getStackTrace();
}
public ResourceLeakException(String message, Throwable cause) {
super(message, cause);
cachedStackTrace = getStackTrace();
}
public ResourceLeakException(Throwable cause) {
super(cause);
cachedStackTrace = getStackTrace();
}
@Override
public int hashCode() {
int hashCode = 0;
for (StackTraceElement e: cachedStackTrace) {
hashCode = hashCode * 31 + e.hashCode();
}
return hashCode;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ResourceLeakException)) {
return false;
}
if (o == this) {
return true;
}
return Arrays.equals(cachedStackTrace, ((ResourceLeakException) o).cachedStackTrace);
}
}
|
ResourceLeakException
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/TreeRangeSetTest.java
|
{
"start": 1224,
"end": 24975
}
|
class ____ extends AbstractRangeSetTest {
// TODO(cpovirk): test all of these with the ranges added in the reverse order
private static final ImmutableList<Range<Integer>> QUERY_RANGES;
private static final int MIN_BOUND = -1;
private static final int MAX_BOUND = 1;
static {
ImmutableList.Builder<Range<Integer>> queryBuilder = ImmutableList.builder();
queryBuilder.add(Range.<Integer>all());
for (int i = MIN_BOUND; i <= MAX_BOUND; i++) {
for (BoundType boundType : BoundType.values()) {
queryBuilder.add(Range.upTo(i, boundType));
queryBuilder.add(Range.downTo(i, boundType));
}
queryBuilder.add(Range.singleton(i));
queryBuilder.add(Range.openClosed(i, i));
queryBuilder.add(Range.closedOpen(i, i));
for (BoundType lowerBoundType : BoundType.values()) {
for (int j = i + 1; j <= MAX_BOUND; j++) {
for (BoundType upperBoundType : BoundType.values()) {
queryBuilder.add(Range.range(i, lowerBoundType, j, upperBoundType));
}
}
}
}
QUERY_RANGES = queryBuilder.build();
}
void testViewAgainstExpected(RangeSet<Integer> expected, RangeSet<Integer> view) {
assertEquals(expected, view);
assertEquals(expected.asRanges(), view.asRanges());
assertEquals(expected.isEmpty(), view.isEmpty());
if (!expected.isEmpty()) {
assertEquals(expected.span(), view.span());
}
for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) {
assertEquals(expected.contains(i), view.contains(i));
assertEquals(expected.rangeContaining(i), view.rangeContaining(i));
}
testEnclosing(view);
if (view instanceof TreeRangeSet) {
testRangesByLowerBounds((TreeRangeSet<Integer>) view, expected.asRanges());
}
}
private static final ImmutableList<Cut<Integer>> CUTS_TO_TEST;
static {
List<Cut<Integer>> cutsToTest = new ArrayList<>();
for (int i = MIN_BOUND - 1; i <= MAX_BOUND + 1; i++) {
cutsToTest.add(Cut.belowValue(i));
cutsToTest.add(Cut.aboveValue(i));
}
cutsToTest.add(Cut.<Integer>aboveAll());
cutsToTest.add(Cut.<Integer>belowAll());
CUTS_TO_TEST = ImmutableList.copyOf(cutsToTest);
}
private void testRangesByLowerBounds(
TreeRangeSet<Integer> rangeSet, Iterable<Range<Integer>> expectedRanges) {
NavigableMap<Cut<Integer>, Range<Integer>> expectedRangesByLowerBound = Maps.newTreeMap();
for (Range<Integer> range : expectedRanges) {
expectedRangesByLowerBound.put(range.lowerBound, range);
}
NavigableMap<Cut<Integer>, Range<Integer>> rangesByLowerBound = rangeSet.rangesByLowerBound;
testNavigationAgainstExpected(expectedRangesByLowerBound, rangesByLowerBound, CUTS_TO_TEST);
}
<K, V> void testNavigationAgainstExpected(
NavigableMap<K, V> expected, NavigableMap<K, V> navigableMap, Iterable<K> keysToTest) {
for (K key : keysToTest) {
assertEquals(expected.lowerEntry(key), navigableMap.lowerEntry(key));
assertEquals(expected.floorEntry(key), navigableMap.floorEntry(key));
assertEquals(expected.ceilingEntry(key), navigableMap.ceilingEntry(key));
assertEquals(expected.higherEntry(key), navigableMap.higherEntry(key));
for (boolean inclusive : new boolean[] {false, true}) {
assertThat(navigableMap.headMap(key, inclusive).entrySet())
.containsExactlyElementsIn(expected.headMap(key, inclusive).entrySet())
.inOrder();
assertThat(navigableMap.tailMap(key, inclusive).entrySet())
.containsExactlyElementsIn(expected.tailMap(key, inclusive).entrySet())
.inOrder();
assertThat(navigableMap.headMap(key, inclusive).descendingMap().entrySet())
.containsExactlyElementsIn(expected.headMap(key, inclusive).descendingMap().entrySet())
.inOrder();
assertThat(navigableMap.tailMap(key, inclusive).descendingMap().entrySet())
.containsExactlyElementsIn(expected.tailMap(key, inclusive).descendingMap().entrySet())
.inOrder();
}
}
}
public void testIntersects(RangeSet<Integer> rangeSet) {
for (Range<Integer> query : QUERY_RANGES) {
boolean expectIntersect = false;
for (Range<Integer> expectedRange : rangeSet.asRanges()) {
if (expectedRange.isConnected(query) && !expectedRange.intersection(query).isEmpty()) {
expectIntersect = true;
break;
}
}
assertEquals(
rangeSet + " was incorrect on intersects(" + query + ")",
expectIntersect,
rangeSet.intersects(query));
}
}
public void testEnclosing(RangeSet<Integer> rangeSet) {
assertTrue(rangeSet.enclosesAll(ImmutableList.<Range<Integer>>of()));
for (Range<Integer> query : QUERY_RANGES) {
boolean expectEnclose = false;
for (Range<Integer> expectedRange : rangeSet.asRanges()) {
if (expectedRange.encloses(query)) {
expectEnclose = true;
break;
}
}
assertEquals(
rangeSet + " was incorrect on encloses(" + query + ")",
expectEnclose,
rangeSet.encloses(query));
assertEquals(
rangeSet + " was incorrect on enclosesAll([" + query + "])",
expectEnclose,
rangeSet.enclosesAll(ImmutableList.of(query)));
}
}
public void testAllSingleRangesComplementAgainstRemove() {
for (Range<Integer> range : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range);
TreeRangeSet<Integer> complement = TreeRangeSet.create();
complement.add(Range.<Integer>all());
complement.remove(range);
assertEquals(complement, rangeSet.complement());
assertThat(rangeSet.complement().asRanges())
.containsExactlyElementsIn(complement.asRanges())
.inOrder();
}
}
public void testInvariantsEmpty() {
testInvariants(TreeRangeSet.create());
}
public void testEmptyIntersecting() {
testIntersects(TreeRangeSet.<Integer>create());
testIntersects(TreeRangeSet.<Integer>create().complement());
}
public void testAllSingleRangesIntersecting() {
for (Range<Integer> range : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range);
testIntersects(rangeSet);
testIntersects(rangeSet.complement());
}
}
public void testAllTwoRangesIntersecting() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
testIntersects(rangeSet);
testIntersects(rangeSet.complement());
}
}
}
public void testEmptyEnclosing() {
testEnclosing(TreeRangeSet.<Integer>create());
testEnclosing(TreeRangeSet.<Integer>create().complement());
}
public void testAllSingleRangesEnclosing() {
for (Range<Integer> range : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range);
testEnclosing(rangeSet);
testEnclosing(rangeSet.complement());
}
}
public void testAllTwoRangesEnclosing() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
testEnclosing(rangeSet);
testEnclosing(rangeSet.complement());
}
}
}
public void testCreateCopy() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
assertEquals(rangeSet, TreeRangeSet.create(rangeSet));
}
}
}
private RangeSet<Integer> expectedSubRangeSet(
RangeSet<Integer> rangeSet, Range<Integer> subRange) {
RangeSet<Integer> expected = TreeRangeSet.create();
for (Range<Integer> range : rangeSet.asRanges()) {
if (range.isConnected(subRange)) {
expected.add(range.intersection(subRange));
}
}
return expected;
}
private RangeSet<Integer> expectedComplement(RangeSet<Integer> rangeSet) {
RangeSet<Integer> expected = TreeRangeSet.create();
expected.add(Range.<Integer>all());
expected.removeAll(rangeSet);
return expected;
}
public void testSubRangeSet() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
for (Range<Integer> subRange : QUERY_RANGES) {
testViewAgainstExpected(
expectedSubRangeSet(rangeSet, subRange), rangeSet.subRangeSet(subRange));
}
}
}
}
public void testSubRangeSetAdd() {
TreeRangeSet<Integer> set = TreeRangeSet.create();
Range<Integer> range = Range.closedOpen(0, 5);
set.subRangeSet(range).add(range);
}
public void testSubRangeSetReplaceAdd() {
TreeRangeSet<Integer> set = TreeRangeSet.create();
Range<Integer> range = Range.closedOpen(0, 5);
set.add(range);
set.subRangeSet(range).add(range);
}
public void testComplement() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
testViewAgainstExpected(expectedComplement(rangeSet), rangeSet.complement());
}
}
}
public void testSubRangeSetOfComplement() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
for (Range<Integer> subRange : QUERY_RANGES) {
testViewAgainstExpected(
expectedSubRangeSet(expectedComplement(rangeSet), subRange),
rangeSet.complement().subRangeSet(subRange));
}
}
}
}
public void testComplementOfSubRangeSet() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
for (Range<Integer> subRange : QUERY_RANGES) {
testViewAgainstExpected(
expectedComplement(expectedSubRangeSet(rangeSet, subRange)),
rangeSet.subRangeSet(subRange).complement());
}
}
}
}
public void testRangesByUpperBound() {
for (Range<Integer> range1 : QUERY_RANGES) {
for (Range<Integer> range2 : QUERY_RANGES) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(range1);
rangeSet.add(range2);
NavigableMap<Cut<Integer>, Range<Integer>> expectedRangesByUpperBound = Maps.newTreeMap();
for (Range<Integer> range : rangeSet.asRanges()) {
expectedRangesByUpperBound.put(range.upperBound, range);
}
testNavigationAgainstExpected(
expectedRangesByUpperBound,
new TreeRangeSet.RangesByUpperBound<Integer>(rangeSet.rangesByLowerBound),
CUTS_TO_TEST);
}
}
}
public void testMergesConnectedWithOverlap() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 4));
rangeSet.add(Range.open(2, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closedOpen(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.atLeast(6))
.inOrder();
}
public void testMergesConnectedDisjoint() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 4));
rangeSet.add(Range.open(4, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closedOpen(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.atLeast(6))
.inOrder();
}
public void testIgnoresSmallerSharingNoBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 6));
rangeSet.add(Range.open(2, 4));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testIgnoresSmallerSharingLowerBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 6));
rangeSet.add(Range.closed(1, 4));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testIgnoresSmallerSharingUpperBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 6));
rangeSet.add(Range.closed(3, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testIgnoresEqual() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 6));
rangeSet.add(Range.closed(1, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testExtendSameLowerBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 4));
rangeSet.add(Range.closed(1, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testExtendSameUpperBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.add(Range.closed(1, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testExtendBothDirections() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 4));
rangeSet.add(Range.closed(1, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testAddEmpty() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closedOpen(3, 3));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).isEmpty();
assertThat(rangeSet.complement().asRanges()).containsExactly(Range.<Integer>all());
}
public void testFillHoleExactly() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closedOpen(1, 3));
rangeSet.add(Range.closedOpen(4, 6));
rangeSet.add(Range.closedOpen(3, 4));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closedOpen(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.atLeast(6))
.inOrder();
}
public void testFillHoleWithOverlap() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closedOpen(1, 3));
rangeSet.add(Range.closedOpen(4, 6));
rangeSet.add(Range.closedOpen(2, 5));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closedOpen(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.atLeast(6))
.inOrder();
}
public void testAddManyPairs() {
for (int aLow = 0; aLow < 6; aLow++) {
for (int aHigh = 0; aHigh < 6; aHigh++) {
for (BoundType aLowType : BoundType.values()) {
for (BoundType aHighType : BoundType.values()) {
if ((aLow == aHigh && aLowType == OPEN && aHighType == OPEN) || aLow > aHigh) {
continue;
}
for (int bLow = 0; bLow < 6; bLow++) {
for (int bHigh = 0; bHigh < 6; bHigh++) {
for (BoundType bLowType : BoundType.values()) {
for (BoundType bHighType : BoundType.values()) {
if ((bLow == bHigh && bLowType == OPEN && bHighType == OPEN) || bLow > bHigh) {
continue;
}
doPairTest(
range(aLow, aLowType, aHigh, aHighType),
range(bLow, bLowType, bHigh, bHighType));
}
}
}
}
}
}
}
}
}
private static void doPairTest(Range<Integer> a, Range<Integer> b) {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(a);
rangeSet.add(b);
if (a.isEmpty() && b.isEmpty()) {
assertThat(rangeSet.asRanges()).isEmpty();
} else if (a.isEmpty()) {
assertThat(rangeSet.asRanges()).contains(b);
} else if (b.isEmpty()) {
assertThat(rangeSet.asRanges()).contains(a);
} else if (a.isConnected(b)) {
assertThat(rangeSet.asRanges()).containsExactly(a.span(b));
} else {
if (a.lowerEndpoint() < b.lowerEndpoint()) {
assertThat(rangeSet.asRanges()).containsExactly(a, b).inOrder();
} else {
assertThat(rangeSet.asRanges()).containsExactly(b, a).inOrder();
}
}
}
public void testRemoveEmpty() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(1, 6));
rangeSet.remove(Range.closedOpen(3, 3));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.closed(1, 6));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(1), Range.greaterThan(6))
.inOrder();
}
public void testRemovePartSharingLowerBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 5));
rangeSet.remove(Range.closedOpen(3, 5));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.singleton(5));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(5), Range.greaterThan(5))
.inOrder();
}
public void testRemovePartSharingUpperBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 5));
rangeSet.remove(Range.openClosed(3, 5));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).contains(Range.singleton(3));
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.lessThan(3), Range.greaterThan(3))
.inOrder();
}
public void testRemoveMiddle() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.atMost(6));
rangeSet.remove(Range.closedOpen(3, 4));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges())
.containsExactly(Range.lessThan(3), Range.closed(4, 6))
.inOrder();
assertThat(rangeSet.complement().asRanges())
.containsExactly(Range.closedOpen(3, 4), Range.greaterThan(6))
.inOrder();
}
public void testRemoveNoOverlap() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closedOpen(1, 3));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).containsExactly(Range.closed(3, 6));
}
public void testRemovePartFromBelowLowerBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(1, 3));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).containsExactly(Range.openClosed(3, 6));
}
public void testRemovePartFromAboveUpperBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(6, 9));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).containsExactly(Range.closedOpen(3, 6));
}
public void testRemoveExact() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(3, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).isEmpty();
}
public void testRemoveAllFromBelowLowerBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(2, 6));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).isEmpty();
}
public void testRemoveAllFromAboveUpperBound() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(3, 7));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).isEmpty();
}
public void testRemoveAllExtendingBothDirections() {
TreeRangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 6));
rangeSet.remove(Range.closed(2, 7));
testInvariants(rangeSet);
assertThat(rangeSet.asRanges()).isEmpty();
}
public void testRangeContaining1() {
RangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 10));
assertEquals(Range.closed(3, 10), rangeSet.rangeContaining(5));
assertTrue(rangeSet.contains(5));
assertThat(rangeSet.rangeContaining(1)).isNull();
assertFalse(rangeSet.contains(1));
}
public void testRangeContaining2() {
RangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 10));
rangeSet.remove(Range.open(5, 7));
assertEquals(Range.closed(3, 5), rangeSet.rangeContaining(5));
assertTrue(rangeSet.contains(5));
assertEquals(Range.closed(7, 10), rangeSet.rangeContaining(8));
assertTrue(rangeSet.contains(8));
assertThat(rangeSet.rangeContaining(6)).isNull();
assertFalse(rangeSet.contains(6));
}
public void testAddAll() {
RangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 10));
rangeSet.addAll(asList(Range.open(1, 3), Range.closed(5, 8), Range.closed(9, 11)));
assertThat(rangeSet.asRanges()).containsExactly(Range.openClosed(1, 11)).inOrder();
}
public void testRemoveAll() {
RangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 10));
rangeSet.removeAll(asList(Range.open(1, 3), Range.closed(5, 8), Range.closed(9, 11)));
assertThat(rangeSet.asRanges())
.containsExactly(Range.closedOpen(3, 5), Range.open(8, 9))
.inOrder();
}
@GwtIncompatible // SerializableTester
public void testSerialization() {
RangeSet<Integer> rangeSet = TreeRangeSet.create();
rangeSet.add(Range.closed(3, 10));
rangeSet.remove(Range.open(5, 7));
SerializableTester.reserializeAndAssert(rangeSet);
}
}
|
TreeRangeSetTest
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
|
{
"start": 26411,
"end": 27724
}
|
class ____ extends NodeManager {
protected NodeStatus nodeStatus;
public void setNodeStatus(NodeStatus status) {
this.nodeStatus = status;
}
/**
* Hook to allow modification/replacement of NodeStatus
* @param currentStatus Current status.
* @return New node status.
*/
protected NodeStatus getSimulatedNodeStatus(NodeStatus currentStatus) {
if(nodeStatus == null) {
return currentStatus;
} else {
// Use the same responseId for the custom node status
nodeStatus.setResponseId(currentStatus.getResponseId());
return nodeStatus;
}
}
@Override
protected void doSecureLogin() throws IOException {
// Don't try to login using keytab in the testcase.
}
@Override
protected NodeStatusUpdater createNodeStatusUpdater(Context context,
Dispatcher dispatcher, NodeHealthCheckerService healthChecker) {
return new NodeStatusUpdaterImpl(context,
dispatcher,
healthChecker,
metrics) {
// Allow simulation of nodestatus
@Override
protected NodeStatus getNodeStatus(int responseId) throws IOException {
return getSimulatedNodeStatus(super.getNodeStatus(responseId));
}
};
}
}
private
|
CustomNodeManager
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/expr/PGExprImpl.java
|
{
"start": 900,
"end": 1284
}
|
class ____ extends SQLExprImpl implements PGExpr {
@Override
public abstract void accept0(PGASTVisitor visitor);
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof PGASTVisitor) {
accept0((PGASTVisitor) visitor);
}
}
public String toString() {
return SQLUtils.toPGString(this);
}
}
|
PGExprImpl
|
java
|
apache__dubbo
|
dubbo-metrics/dubbo-metrics-api/src/main/java/org/apache/dubbo/metrics/data/BaseStatComposite.java
|
{
"start": 1802,
"end": 6357
}
|
class ____ implements MetricsExport {
private ApplicationStatComposite applicationStatComposite;
private ServiceStatComposite serviceStatComposite;
private MethodStatComposite methodStatComposite;
private RtStatComposite rtStatComposite;
public BaseStatComposite(ApplicationModel applicationModel) {
init(new ApplicationStatComposite(applicationModel));
init(new ServiceStatComposite(applicationModel));
init(new MethodStatComposite(applicationModel));
init(new RtStatComposite(applicationModel));
}
protected void init(ApplicationStatComposite applicationStatComposite) {
this.applicationStatComposite = applicationStatComposite;
}
protected void init(ServiceStatComposite serviceStatComposite) {
this.serviceStatComposite = serviceStatComposite;
}
protected void init(MethodStatComposite methodStatComposite) {
this.methodStatComposite = methodStatComposite;
}
protected void init(RtStatComposite rtStatComposite) {
this.rtStatComposite = rtStatComposite;
}
public void calcApplicationRt(String registryOpType, Long responseTime) {
rtStatComposite.calcServiceKeyRt(
registryOpType, responseTime, new ApplicationMetric(rtStatComposite.getApplicationModel()));
}
public void calcServiceKeyRt(String serviceKey, String registryOpType, Long responseTime) {
rtStatComposite.calcServiceKeyRt(
registryOpType, responseTime, new ServiceKeyMetric(rtStatComposite.getApplicationModel(), serviceKey));
}
public void calcServiceKeyRt(Invocation invocation, String registryOpType, Long responseTime) {
rtStatComposite.calcServiceKeyRt(invocation, registryOpType, responseTime);
}
public void calcMethodKeyRt(Invocation invocation, String registryOpType, Long responseTime) {
rtStatComposite.calcMethodKeyRt(invocation, registryOpType, responseTime);
}
public void setServiceKey(MetricsKeyWrapper metricsKey, String serviceKey, int num) {
serviceStatComposite.setServiceKey(metricsKey, serviceKey, num);
}
public void setServiceKey(MetricsKeyWrapper metricsKey, String serviceKey, int num, Map<String, String> extra) {
serviceStatComposite.setExtraServiceKey(metricsKey, serviceKey, num, extra);
}
public void incrementApp(MetricsKey metricsKey, int size) {
applicationStatComposite.incrementSize(metricsKey, size);
}
public void incrementServiceKey(MetricsKeyWrapper metricsKeyWrapper, String attServiceKey, int size) {
serviceStatComposite.incrementServiceKey(metricsKeyWrapper, attServiceKey, size);
}
public void incrementServiceKey(
MetricsKeyWrapper metricsKeyWrapper, String attServiceKey, Map<String, String> extra, int size) {
serviceStatComposite.incrementExtraServiceKey(metricsKeyWrapper, attServiceKey, extra, size);
}
public void incrementMethodKey(MetricsKeyWrapper metricsKeyWrapper, MethodMetric methodMetric, int size) {
methodStatComposite.incrementMethodKey(metricsKeyWrapper, methodMetric, size);
}
public void initMethodKey(MetricsKeyWrapper metricsKeyWrapper, Invocation invocation) {
methodStatComposite.initMethodKey(metricsKeyWrapper, invocation);
}
@Override
public List<MetricSample> export(MetricsCategory category) {
List<MetricSample> list = new ArrayList<>();
list.addAll(applicationStatComposite.export(category));
list.addAll(rtStatComposite.export(category));
list.addAll(serviceStatComposite.export(category));
list.addAll(methodStatComposite.export(category));
return list;
}
public ApplicationStatComposite getApplicationStatComposite() {
return applicationStatComposite;
}
public RtStatComposite getRtStatComposite() {
return rtStatComposite;
}
public void setAppKey(MetricsKey metricsKey, Long num) {
applicationStatComposite.setAppKey(metricsKey, num);
}
@Override
public boolean calSamplesChanged() {
// Should ensure that all the composite's samplesChanged have been compareAndSet, and cannot flip the `or` logic
boolean changed = applicationStatComposite.calSamplesChanged();
changed = rtStatComposite.calSamplesChanged() || changed;
changed = serviceStatComposite.calSamplesChanged() || changed;
changed = methodStatComposite.calSamplesChanged() || changed;
return changed;
}
}
|
BaseStatComposite
|
java
|
micronaut-projects__micronaut-core
|
router/src/main/java/io/micronaut/web/router/uri/PercentEncoder.java
|
{
"start": 863,
"end": 5527
}
|
class ____ {
static final PercentEncoder C0 = new PercentEncoder(new BitSet());
static {
for (char c = 0x20; c <= 0x7e; c++) {
C0.keepSet.set(c);
}
}
// whatwg sets
static final PercentEncoder FRAGMENT = C0.addEncode(' ', '"', '<', '>', '`');
static final PercentEncoder QUERY = C0.addEncode(' ', '"', '<', '>', '#');
static final PercentEncoder SPECIAL_QUERY = QUERY.addEncode('\'');
static final PercentEncoder PATH = QUERY.addEncode('?', '`', '{', '}');
static final PercentEncoder USERINFO = PATH.addEncode('/', ':', ';', '=', '@', '|').addEncodeRange('[', '^');
static final PercentEncoder COMPONENT = USERINFO.addEncode('+', ',').addEncodeRange('$', '&');
static final PercentEncoder FORM = COMPONENT.addEncode('!', '~').addEncodeRange('\'', ')');
// RFC 3986 (URI) sets
static final PercentEncoder RFC3986_UNRESERVED = new PercentEncoder(new BitSet())
.removeEncodeRange('a', 'z')
.removeEncodeRange('A', 'Z')
.removeEncodeRange('0', '9')
.removeEncode('-', '.', '_', '~');
static final PercentEncoder RFC3986_PCHAR = RFC3986_UNRESERVED.removeEncode('%', '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', '@'); // ':' is allowed but makes java.net.URI hiccup
static final PercentEncoder RFC3986_QUERY_CHAR = RFC3986_PCHAR.removeEncode('/', '?');
private final BitSet keepSet;
private PercentEncoder(BitSet keepSet) {
this.keepSet = keepSet;
}
public void encodeByte(StringBuilder target, byte b) {
if (keep(b)) {
target.append((char) (b & 0xff));
} else {
target.ensureCapacity(target.length() + 3);
appendEncodedByte(target, b);
}
}
public boolean keep(byte b) {
return keepSet.get(b & 0xff);
}
public void encodeUtf8(StringBuilder target, int codePoint) {
if (codePoint < 0x80) {
if (keepSet.get(codePoint)) {
target.append((char) codePoint);
} else {
target.ensureCapacity(target.length() + 3);
appendEncodedByte(target, (byte) codePoint);
}
} else if (codePoint < 0x800) {
target.ensureCapacity(target.length() + 6);
appendEncodedByte(target, (byte) (0b11000000 | (codePoint >> 6)));
appendEncodedByte(target, (byte) (0b10000000 | (codePoint & 0b111111)));
} else if (codePoint < 0x10000) {
target.ensureCapacity(target.length() + 9);
appendEncodedByte(target, (byte) (0b11100000 | (codePoint >> 12)));
appendEncodedByte(target, (byte) (0b10000000 | ((codePoint >> 6) & 0b111111)));
appendEncodedByte(target, (byte) (0b10000000 | (codePoint & 0b111111)));
} else if (codePoint < 0x110000) {
target.ensureCapacity(target.length() + 12);
appendEncodedByte(target, (byte) (0b11110000 | (codePoint >> 18)));
appendEncodedByte(target, (byte) (0b10000000 | ((codePoint >> 12) & 0b111111)));
appendEncodedByte(target, (byte) (0b10000000 | ((codePoint >> 6) & 0b111111)));
appendEncodedByte(target, (byte) (0b10000000 | (codePoint & 0b111111)));
} else {
throw new IllegalArgumentException("Code point out of range: " + codePoint);
}
}
static void appendEncodedByte(StringBuilder target, byte b) {
target.append('%');
if ((b & 0xff) < 0x10) {
target.append('0');
}
target.append(Integer.toHexString(b & 0xFF).toUpperCase(Locale.ROOT));
}
private PercentEncoder addEncode(char... removed) {
BitSet result = (BitSet) keepSet.clone();
for (char c : removed) {
result.clear(c);
}
return new PercentEncoder(result);
}
private PercentEncoder addEncodeRange(char fromInclusive, char toExclusive) {
BitSet result = (BitSet) keepSet.clone();
for (char c = fromInclusive; c <= toExclusive; c++) {
result.clear(c);
}
return new PercentEncoder(result);
}
private PercentEncoder removeEncode(char... removed) {
BitSet result = (BitSet) keepSet.clone();
for (char c : removed) {
result.set(c);
}
return new PercentEncoder(result);
}
private PercentEncoder removeEncodeRange(char fromInclusive, char toExclusive) {
BitSet result = (BitSet) keepSet.clone();
for (char c = fromInclusive; c <= toExclusive; c++) {
result.set(c);
}
return new PercentEncoder(result);
}
}
|
PercentEncoder
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java
|
{
"start": 1510,
"end": 5380
}
|
class ____ extends ValuesSourceAggregationBuilder.LeafOnly<GeoBoundsAggregationBuilder> {
public static final String NAME = "geo_bounds";
public static final ValuesSourceRegistry.RegistryKey<GeoBoundsAggregatorSupplier> REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(
NAME,
GeoBoundsAggregatorSupplier.class
);
public static final ObjectParser<GeoBoundsAggregationBuilder, String> PARSER = ObjectParser.fromBuilder(
NAME,
GeoBoundsAggregationBuilder::new
);
static {
ValuesSourceAggregationBuilder.declareFields(PARSER, false, false, false);
PARSER.declareBoolean(GeoBoundsAggregationBuilder::wrapLongitude, GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
}
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
GeoBoundsAggregatorFactory.registerAggregators(builder);
}
private boolean wrapLongitude = true;
public GeoBoundsAggregationBuilder(String name) {
super(name);
}
protected GeoBoundsAggregationBuilder(
GeoBoundsAggregationBuilder clone,
AggregatorFactories.Builder factoriesBuilder,
Map<String, Object> metadata
) {
super(clone, factoriesBuilder, metadata);
this.wrapLongitude = clone.wrapLongitude;
}
@Override
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
return new GeoBoundsAggregationBuilder(this, factoriesBuilder, metadata);
}
@Override
public boolean supportsSampling() {
return true;
}
/**
* Read from a stream.
*/
public GeoBoundsAggregationBuilder(StreamInput in) throws IOException {
super(in);
wrapLongitude = in.readBoolean();
}
@Override
protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeBoolean(wrapLongitude);
}
@Override
protected ValuesSourceType defaultValueSourceType() {
return CoreValuesSourceType.GEOPOINT;
}
/**
* Set whether to wrap longitudes. Defaults to true.
*/
public GeoBoundsAggregationBuilder wrapLongitude(boolean wrapLongitude) {
this.wrapLongitude = wrapLongitude;
return this;
}
@Override
protected GeoBoundsAggregatorFactory innerBuild(
AggregationContext context,
ValuesSourceConfig config,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder
) throws IOException {
GeoBoundsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config);
return new GeoBoundsAggregatorFactory(
name,
config,
wrapLongitude,
context,
parent,
subFactoriesBuilder,
metadata,
aggregatorSupplier
);
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD.getPreferredName(), wrapLongitude);
return builder;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), wrapLongitude);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
GeoBoundsAggregationBuilder other = (GeoBoundsAggregationBuilder) obj;
return Objects.equals(wrapLongitude, other.wrapLongitude);
}
@Override
public String getType() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
|
GeoBoundsAggregationBuilder
|
java
|
FasterXML__jackson-core
|
src/main/java/tools/jackson/core/exc/StreamWriteException.java
|
{
"start": 88,
"end": 192
}
|
class ____ all write-side streaming processing problems,
* mostly content generation issues.
*/
public
|
for
|
java
|
apache__camel
|
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/NettyHttpRawQueryTest.java
|
{
"start": 1164,
"end": 2052
}
|
class ____ extends BaseNettyTest {
@EndpointInject("mock:test")
MockEndpoint mockEndpoint;
@Test
public void shouldAccessRawQuery() throws Exception {
String query = "param=x1%26y%3D2";
mockEndpoint.expectedMessageCount(1);
mockEndpoint.message(0).header(HTTP_QUERY).isEqualTo("param=x1&y=2");
mockEndpoint.message(0).header(HTTP_RAW_QUERY).isEqualTo(query);
new URL("http://localhost:" + getPort() + "/?" + query).openConnection().getInputStream().close();
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("netty-http:http://0.0.0.0:{{port}}/")
.to(mockEndpoint);
}
};
}
}
|
NettyHttpRawQueryTest
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/KubernetesHelper.java
|
{
"start": 2259,
"end": 5265
}
|
class ____ {
private static KubernetesClient kubernetesClient;
/** Clients with custom config */
private static final Map<String, KubernetesClient> clients = new HashMap<>();
private static final ObjectMapper OBJECT_MAPPER;
static {
OBJECT_MAPPER = JsonMapper.builder()
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING)
.disable(JsonParser.Feature.AUTO_CLOSE_SOURCE)
.enable(MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES)
.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES)
.build()
.setDefaultPropertyInclusion(
JsonInclude.Value.construct(JsonInclude.Include.NON_EMPTY, JsonInclude.Include.NON_EMPTY));
}
private KubernetesHelper() {
//prevent instantiation of utility class.
}
/**
* Gets the default Kubernetes client.
*/
public static KubernetesClient getKubernetesClient() {
if (kubernetesClient == null) {
kubernetesClient = new KubernetesClientBuilder().build();
}
setKubernetesClientProperties();
return kubernetesClient;
}
/**
* Create or get Kubernetes client with given config.
*/
public static KubernetesClient getKubernetesClient(String config) {
if (clients.containsKey(config)) {
return clients.get(config);
}
setKubernetesClientProperties();
var client = new KubernetesClientBuilder().withConfig(config).build();
return clients.put(config, client);
}
// set short timeouts to fail fast in case it's not connected to a cluster and don't waste time
// the user can override these values by setting the property in the cli
private static void setKubernetesClientProperties() {
if (System.getProperty("kubernetes.connection.timeout") == null) {
System.setProperty("kubernetes.connection.timeout", "2000");
}
if (System.getProperty("kubernetes.request.timeout") == null) {
System.setProperty("kubernetes.request.timeout", "2000");
}
if (System.getProperty("kubernetes.request.retry.backoffLimit") == null) {
System.setProperty("kubernetes.request.retry.backoffLimit", "1");
}
}
/**
* Creates new Yaml instance. The implementation provided by Snakeyaml is not thread-safe. It is better to create a
* fresh instance for every YAML stream.
*/
public static Yaml yaml() {
return YamlHelper.yaml();
}
/**
* Creates new Yaml instance. The implementation provided by Snakeyaml is not thread-safe. It is better to create a
* fresh instance for every YAML stream. Uses the given
|
KubernetesHelper
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineBucketedSort.java
|
{
"start": 6805,
"end": 8843
}
|
class ____ implements BucketedSort.ExtraData, Releasable {
private final BigArrays bigArrays;
private final GeoLineMultiValuesSource valuesSources;
LongArray values;
final MissingHelper empty;
Extra(BigArrays bigArrays, GeoLineMultiValuesSource valuesSources) {
this.bigArrays = bigArrays;
this.valuesSources = valuesSources;
this.values = bigArrays.newLongArray(1, false);
this.empty = new MissingHelper(bigArrays);
}
@Override
public void swap(long lhs, long rhs) {
long tmp = values.get(lhs);
values.set(lhs, values.get(rhs));
values.set(rhs, tmp);
empty.swap(lhs, rhs);
}
@Override
public Loader loader(LeafReaderContext ctx) {
final MultiGeoPointValues docGeoPointValues = valuesSources.getGeoPointField(
GeoLineAggregationBuilder.POINT_FIELD.getPreferredName(),
ctx
);
return (index, doc) -> {
if (false == docGeoPointValues.advanceExact(doc)) {
empty.markMissing(index);
return;
}
if (docGeoPointValues.docValueCount() > 1) {
throw AggregationErrors.unsupportedMultivalue();
}
if (index >= values.size()) {
values = bigArrays.grow(values, index + 1);
}
final GeoPoint point = docGeoPointValues.nextValue();
int encodedLat = GeoEncodingUtils.encodeLatitude(point.lat());
int encodedLon = GeoEncodingUtils.encodeLongitude(point.lon());
long lonLat = (((long) encodedLon) << 32) | encodedLat & 0xffffffffL;
values.set(index, lonLat);
empty.markNotMissing(index);
};
}
@Override
public void close() {
Releasables.close(values, empty);
}
}
}
|
Extra
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/SingleTableRelationsTest.java
|
{
"start": 3700,
"end": 4166
}
|
class ____ extends PostTable {
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn
protected Category category;
@OneToMany(fetch = FetchType.LAZY, mappedBy = "category")
protected List<Category> children;
public Category() {
}
public Category(Integer id) {
super( id );
}
public Category(Integer id, Category category) {
super( id );
this.category = category;
}
}
@Entity(name = "Post")
@DiscriminatorValue("2")
public static
|
Category
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/ops/Animal.java
|
{
"start": 304,
"end": 578
}
|
class ____ {
private String name;
private Long id;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
}
|
Animal
|
java
|
bumptech__glide
|
annotation/compiler/test/src/test/java/com/bumptech/glide/annotation/compiler/InvalidAppGlideModuleWithExcludesTest.java
|
{
"start": 666,
"end": 1655
}
|
class ____ {
@Test
public void compilation_withMissingExcludedModuleClass_throws() {
assertThrows(
RuntimeException.class,
new ThrowingRunnable() {
@Override
public void run() throws Throwable {
javac()
.withProcessors(new GlideAnnotationProcessor())
.compile(
JavaFileObjects.forSourceLines(
"AppModuleWithExcludes",
"package com.bumptech.glide.test;",
"import com.bumptech.glide.annotation.Excludes;",
"import com.bumptech.glide.annotation.GlideModule;",
"import com.bumptech.glide.module.AppGlideModule;",
"import com.bumptech.glide.test.EmptyLibraryModule;",
"@GlideModule",
"@Excludes(EmptyLibraryModule.class)",
"public final
|
InvalidAppGlideModuleWithExcludesTest
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java
|
{
"start": 3833,
"end": 47306
}
|
enum ____ {
RocksDBSessionStore,
RocksDBTimeOrderedSessionStoreWithIndex,
RocksDBTimeOrderedSessionStoreWithoutIndex,
InMemoryStore
}
SessionStore<String, Long> sessionStore;
private MockRecordCollector recordCollector;
InternalMockProcessorContext<?, ?> context;
<K, V> SessionStore<K, V> buildSessionStore(final long retentionPeriod,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
switch (storeType()) {
case RocksDBSessionStore: {
return Stores.sessionStoreBuilder(
Stores.persistentSessionStore(
ROCK_DB_STORE_NAME,
ofMillis(retentionPeriod)),
keySerde,
valueSerde).build();
}
case RocksDBTimeOrderedSessionStoreWithIndex: {
return Stores.sessionStoreBuilder(
new RocksDbTimeOrderedSessionBytesStoreSupplier(
ROCK_DB_STORE_NAME,
retentionPeriod,
true
),
keySerde,
valueSerde
).build();
}
case RocksDBTimeOrderedSessionStoreWithoutIndex: {
return Stores.sessionStoreBuilder(
new RocksDbTimeOrderedSessionBytesStoreSupplier(
ROCK_DB_STORE_NAME,
retentionPeriod,
false
),
keySerde,
valueSerde
).build();
}
case InMemoryStore: {
return Stores.sessionStoreBuilder(
Stores.inMemorySessionStore(
IN_MEMORY_STORE_NAME,
ofMillis(retentionPeriod)),
keySerde,
valueSerde).build();
}
default:
throw new IllegalStateException("Unknown StoreType: " + storeType());
}
}
abstract StoreType storeType();
@BeforeEach
public void setUp() {
sessionStore = buildSessionStore(RETENTION_PERIOD, Serdes.String(), Serdes.Long());
recordCollector = new MockRecordCollector();
context = new InternalMockProcessorContext<>(
TestUtils.tempDirectory(),
Serdes.String(),
Serdes.Long(),
recordCollector,
new ThreadCache(
new LogContext("testCache"),
0,
new MockStreamsMetrics(new Metrics())));
context.setTime(1L);
sessionStore.init(context, sessionStore);
}
@AfterEach
public void after() {
sessionStore.close();
}
@Test
public void shouldPutAndFindSessionsInRange() {
final String key = "a";
final Windowed<String> a1 = new Windowed<>(key, new SessionWindow(10, 10L));
final Windowed<String> a2 = new Windowed<>(key, new SessionWindow(500L, 1000L));
sessionStore.put(a1, 1L);
sessionStore.put(a2, 2L);
sessionStore.put(new Windowed<>(key, new SessionWindow(1500L, 2000L)), 1L);
sessionStore.put(new Windowed<>(key, new SessionWindow(2500L, 3000L)), 2L);
final List<KeyValue<Windowed<String>, Long>> expected =
Arrays.asList(KeyValue.pair(a1, 1L), KeyValue.pair(a2, 2L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(key, 0, 1000L)
) {
assertEquals(expected, toList(values));
}
final List<KeyValue<Windowed<String>, Long>> expected2 =
Collections.singletonList(KeyValue.pair(a2, 2L));
try (final KeyValueIterator<Windowed<String>, Long> values2 = sessionStore.findSessions(key, 400L, 600L)
) {
assertEquals(expected2, toList(values2));
}
}
@Test
public void shouldPutAndBackwardFindSessionsInRange() {
final String key = "a";
final Windowed<String> a1 = new Windowed<>(key, new SessionWindow(10, 10L));
final Windowed<String> a2 = new Windowed<>(key, new SessionWindow(500L, 1000L));
sessionStore.put(a1, 1L);
sessionStore.put(a2, 2L);
sessionStore.put(new Windowed<>(key, new SessionWindow(1500L, 2000L)), 1L);
sessionStore.put(new Windowed<>(key, new SessionWindow(2500L, 3000L)), 2L);
final LinkedList<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(a1, 1L));
expected.add(KeyValue.pair(a2, 2L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFindSessions(key, 0, 1000L)) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
final List<KeyValue<Windowed<String>, Long>> expected2 =
Collections.singletonList(KeyValue.pair(a2, 2L));
try (final KeyValueIterator<Windowed<String>, Long> values2 = sessionStore.backwardFindSessions(key, 400L, 600L)) {
assertEquals(expected2, toList(values2));
}
}
@Test
public void shouldFetchAllSessionsWithSameRecordKey() {
final LinkedList<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(10, 10)), 2L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(100, 100)), 3L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(1000, 1000)), 4L));
for (final KeyValue<Windowed<String>, Long> kv : expected) {
sessionStore.put(kv.key, kv.value);
}
// add one that shouldn't appear in the results
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 0)), 5L);
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("a")) {
assertEquals(expected, toList(values));
}
}
@SuppressWarnings("resource")
@Test
public void shouldFindSessionsForTimeRange() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 5L);
if (storeType() == StoreType.RocksDBSessionStore) {
assertThrows(
UnsupportedOperationException.class,
() -> sessionStore.findSessions(0, 0),
"This API is not supported by this implementation of SessionStore."
);
return;
}
// Find point
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(0, 0)) {
final List<KeyValue<Windowed<String>, Long>> expected = Collections.singletonList(
KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 5L)
);
assertEquals(expected, toList(values));
}
sessionStore.put(new Windowed<>("b", new SessionWindow(10, 20)), 10L);
sessionStore.put(new Windowed<>("c", new SessionWindow(30, 40)), 20L);
// Find boundary
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(0, 20)) {
final List<KeyValue<Windowed<String>, Long>> expected = asList(
KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 5L),
KeyValue.pair(new Windowed<>("b", new SessionWindow(10, 20)), 10L)
);
assertEquals(expected, toList(values));
}
// Find left boundary
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(0, 19)) {
final List<KeyValue<Windowed<String>, Long>> expected = Collections.singletonList(
KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 5L)
);
assertEquals(expected, toList(values));
}
// Find right boundary
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(1, 20)) {
final List<KeyValue<Windowed<String>, Long>> expected = Collections.singletonList(
KeyValue.pair(new Windowed<>("b", new SessionWindow(10, 20)), 10L)
);
assertEquals(expected, toList(values));
}
// Find partial off by 1
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(19, 41)) {
final List<KeyValue<Windowed<String>, Long>> expected = asList(
KeyValue.pair(new Windowed<>("b", new SessionWindow(10, 20)), 10L),
KeyValue.pair(new Windowed<>("c", new SessionWindow(30, 40)), 20L)
);
assertEquals(expected, toList(values));
}
// Find all boundary
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions(0, 40)) {
final List<KeyValue<Windowed<String>, Long>> expected = asList(
KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 5L),
KeyValue.pair(new Windowed<>("b", new SessionWindow(10, 20)), 10L),
KeyValue.pair(new Windowed<>("c", new SessionWindow(30, 40)), 20L)
);
assertEquals(expected, toList(values));
}
}
@Test
public void shouldBackwardFetchAllSessionsWithSameRecordKey() {
final LinkedList<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(10, 10)), 2L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(100, 100)), 3L));
expected.add(KeyValue.pair(new Windowed<>("a", new SessionWindow(1000, 1000)), 4L));
for (final KeyValue<Windowed<String>, Long> kv : expected) {
sessionStore.put(kv.key, kv.value);
}
// add one that shouldn't appear in the results
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 0)), 5L);
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFetch("a")) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
}
@Test
public void shouldFetchAllSessionsWithinKeyRange() {
final List<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(new Windowed<>("aa", new SessionWindow(10, 10)), 2L));
expected.add(KeyValue.pair(new Windowed<>("aaa", new SessionWindow(100, 100)), 3L));
expected.add(KeyValue.pair(new Windowed<>("aaaa", new SessionWindow(100, 100)), 6L));
expected.add(KeyValue.pair(new Windowed<>("b", new SessionWindow(1000, 1000)), 4L));
expected.add(KeyValue.pair(new Windowed<>("bb", new SessionWindow(1500, 2000)), 5L));
for (final KeyValue<Windowed<String>, Long> kv : expected) {
sessionStore.put(kv.key, kv.value);
}
// add some that should only be fetched in infinite fetch
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("bbb", new SessionWindow(2500, 3000)), 6L);
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("aa", "bb")) {
assertEquals(expected, toList(values));
}
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.findSessions("aa", "bb", 0L, Long.MAX_VALUE)) {
assertEquals(expected, toList(values));
}
// infinite keyFrom fetch case
expected.add(0, KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch(null, "bb")) {
assertEquals(expected, toList(values));
}
// remove the one added for unlimited start fetch case
expected.remove(0);
// infinite keyTo fetch case
expected.add(KeyValue.pair(new Windowed<>("bbb", new SessionWindow(2500, 3000)), 6L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("aa", null)) {
assertEquals(expected, toList(values));
}
// fetch all case
expected.add(0, KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch(null, null)) {
assertEquals(expected, toList(values));
}
}
@Test
public void shouldBackwardFetchAllSessionsWithinKeyRange() {
final LinkedList<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(new Windowed<>("aa", new SessionWindow(10, 10)), 2L));
expected.add(KeyValue.pair(new Windowed<>("aaa", new SessionWindow(100, 100)), 3L));
expected.add(KeyValue.pair(new Windowed<>("aaaa", new SessionWindow(100, 100)), 6L));
expected.add(KeyValue.pair(new Windowed<>("b", new SessionWindow(1000, 1000)), 4L));
expected.add(KeyValue.pair(new Windowed<>("bb", new SessionWindow(1500, 2000)), 5L));
for (final KeyValue<Windowed<String>, Long> kv : expected) {
sessionStore.put(kv.key, kv.value);
}
// add some that should only be fetched in infinite fetch
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("bbb", new SessionWindow(2500, 3000)), 6L);
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFetch("aa", "bb")) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFindSessions("aa", "bb", 0L, Long.MAX_VALUE)) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
// infinite keyFrom fetch case
expected.add(0, KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFetch(null, "bb")) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
// remove the one added for unlimited start fetch case
expected.remove(0);
// infinite keyTo fetch case
expected.add(KeyValue.pair(new Windowed<>("bbb", new SessionWindow(2500, 3000)), 6L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFetch("aa", null)) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
// fetch all case
expected.add(0, KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L));
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.backwardFetch(null, null)) {
assertEquals(toList(expected.descendingIterator()), toList(values));
}
}
@Test
public void shouldFetchExactSession() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 4)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 3)), 2L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 4)), 3L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(1, 4)), 4L);
sessionStore.put(new Windowed<>("aaa", new SessionWindow(0, 4)), 5L);
final long result = sessionStore.fetchSession("aa", 0, 4);
assertEquals(3L, result);
}
@Test
public void shouldReturnNullOnSessionNotFound() {
assertNull(sessionStore.fetchSession("any key", 0L, 5L));
}
@Test
public void shouldFindValuesWithinMergingSessionWindowRange() {
final String key = "a";
sessionStore.put(new Windowed<>(key, new SessionWindow(0L, 0L)), 1L);
sessionStore.put(new Windowed<>(key, new SessionWindow(1000L, 1000L)), 2L);
final List<KeyValue<Windowed<String>, Long>> expected = Arrays.asList(
KeyValue.pair(new Windowed<>(key, new SessionWindow(0L, 0L)), 1L),
KeyValue.pair(new Windowed<>(key, new SessionWindow(1000L, 1000L)), 2L));
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions(key, -1, 1000L)) {
assertEquals(expected, toList(results));
}
}
@Test
public void shouldBackwardFindValuesWithinMergingSessionWindowRange() {
final String key = "a";
sessionStore.put(new Windowed<>(key, new SessionWindow(0L, 0L)), 1L);
sessionStore.put(new Windowed<>(key, new SessionWindow(1000L, 1000L)), 2L);
final LinkedList<KeyValue<Windowed<String>, Long>> expected = new LinkedList<>();
expected.add(KeyValue.pair(new Windowed<>(key, new SessionWindow(0L, 0L)), 1L));
expected.add(KeyValue.pair(new Windowed<>(key, new SessionWindow(1000L, 1000L)), 2L));
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.backwardFindSessions(key, -1, 1000L)) {
assertEquals(toList(expected.descendingIterator()), toList(results));
}
}
@Test
public void shouldRemove() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 1000)), 1L);
sessionStore.put(new Windowed<>("a", new SessionWindow(1500, 2500)), 2L);
sessionStore.remove(new Windowed<>("a", new SessionWindow(0, 1000)));
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions("a", 0L, 1000L)) {
assertFalse(results.hasNext());
}
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions("a", 1500L, 2500L)) {
assertTrue(results.hasNext());
}
}
@Test
public void shouldRemoveOnNullAggValue() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 1000)), 1L);
sessionStore.put(new Windowed<>("a", new SessionWindow(1500, 2500)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 1000)), null);
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions("a", 0L, 1000L)) {
assertFalse(results.hasNext());
}
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions("a", 1500L, 2500L)) {
assertTrue(results.hasNext());
}
}
@Test
public void shouldFindSessionsToMerge() {
final Windowed<String> session1 = new Windowed<>("a", new SessionWindow(0, 100));
final Windowed<String> session2 = new Windowed<>("a", new SessionWindow(101, 200));
final Windowed<String> session3 = new Windowed<>("a", new SessionWindow(201, 300));
final Windowed<String> session4 = new Windowed<>("a", new SessionWindow(301, 400));
final Windowed<String> session5 = new Windowed<>("a", new SessionWindow(401, 500));
sessionStore.put(session1, 1L);
sessionStore.put(session2, 2L);
sessionStore.put(session3, 3L);
sessionStore.put(session4, 4L);
sessionStore.put(session5, 5L);
final List<KeyValue<Windowed<String>, Long>> expected =
Arrays.asList(KeyValue.pair(session2, 2L), KeyValue.pair(session3, 3L));
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.findSessions("a", 150, 300)) {
assertEquals(expected, toList(results));
}
}
@Test
public void shouldBackwardFindSessionsToMerge() {
final Windowed<String> session1 = new Windowed<>("a", new SessionWindow(0, 100));
final Windowed<String> session2 = new Windowed<>("a", new SessionWindow(101, 200));
final Windowed<String> session3 = new Windowed<>("a", new SessionWindow(201, 300));
final Windowed<String> session4 = new Windowed<>("a", new SessionWindow(301, 400));
final Windowed<String> session5 = new Windowed<>("a", new SessionWindow(401, 500));
sessionStore.put(session1, 1L);
sessionStore.put(session2, 2L);
sessionStore.put(session3, 3L);
sessionStore.put(session4, 4L);
sessionStore.put(session5, 5L);
final List<KeyValue<Windowed<String>, Long>> expected =
asList(KeyValue.pair(session3, 3L), KeyValue.pair(session2, 2L));
try (final KeyValueIterator<Windowed<String>, Long> results = sessionStore.backwardFindSessions("a", 150, 300)) {
assertEquals(expected, toList(results));
}
}
@Test
public void shouldFetchExactKeys() {
sessionStore.close();
sessionStore = buildSessionStore(0x7a00000000000000L, Serdes.String(), Serdes.Long());
sessionStore.init(context, sessionStore);
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, 20)), 4L);
sessionStore.put(new Windowed<>("a",
new SessionWindow(0x7a00000000000000L - 2, 0x7a00000000000000L - 1)), 5L);
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("a", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 3L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(2L, 4L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("a", "aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("a", "aa", 10, 0)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(2L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions(null, "aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("a", null, 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions(null, null, 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
}
@Test
public void shouldBackwardFetchExactKeys() {
sessionStore.close();
sessionStore = buildSessionStore(0x7a00000000000000L, Serdes.String(), Serdes.Long());
sessionStore.init(context, sessionStore);
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, 20)), 4L);
sessionStore.put(new Windowed<>("a",
new SessionWindow(0x7a00000000000000L - 2, 0x7a00000000000000L - 1)), 5L);
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("a", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 3L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(2L, 4L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("a", "aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("a", "aa", 10, 0)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(2L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions(null, "aa", 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("a", null, 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions(null, null, 0, Long.MAX_VALUE)
) {
assertThat(valuesToSet(iterator), equalTo(Set.of(1L, 2L, 3L, 4L, 5L)));
}
}
@Test
public void shouldFetchAndIterateOverExactBinaryKeys() {
final SessionStore<Bytes, String> sessionStore =
buildSessionStore(RETENTION_PERIOD, Serdes.Bytes(), Serdes.String());
sessionStore.init(context, sessionStore);
final Bytes key1 = Bytes.wrap(new byte[] {0});
final Bytes key2 = Bytes.wrap(new byte[] {0, 0});
final Bytes key3 = Bytes.wrap(new byte[] {0, 0, 0});
sessionStore.put(new Windowed<>(key1, new SessionWindow(1, 100)), "1");
sessionStore.put(new Windowed<>(key2, new SessionWindow(2, 100)), "2");
sessionStore.put(new Windowed<>(key3, new SessionWindow(3, 100)), "3");
sessionStore.put(new Windowed<>(key1, new SessionWindow(4, 100)), "4");
sessionStore.put(new Windowed<>(key2, new SessionWindow(5, 100)), "5");
sessionStore.put(new Windowed<>(key3, new SessionWindow(6, 100)), "6");
sessionStore.put(new Windowed<>(key1, new SessionWindow(7, 100)), "7");
sessionStore.put(new Windowed<>(key2, new SessionWindow(8, 100)), "8");
sessionStore.put(new Windowed<>(key3, new SessionWindow(9, 100)), "9");
final List<String> expectedKey1 = asList("1", "4", "7");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.findSessions(key1, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey1)));
}
final List<String> expectedKey2 = asList("2", "5", "8");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.findSessions(key2, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey2)));
}
final List<String> expectedKey3 = asList("3", "6", "9");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.findSessions(key3, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey3)));
}
sessionStore.close();
}
@Test
public void shouldBackwardFetchAndIterateOverExactBinaryKeys() {
final SessionStore<Bytes, String> sessionStore =
buildSessionStore(RETENTION_PERIOD, Serdes.Bytes(), Serdes.String());
sessionStore.init(context, sessionStore);
final Bytes key1 = Bytes.wrap(new byte[] {0});
final Bytes key2 = Bytes.wrap(new byte[] {0, 0});
final Bytes key3 = Bytes.wrap(new byte[] {0, 0, 0});
sessionStore.put(new Windowed<>(key1, new SessionWindow(1, 100)), "1");
sessionStore.put(new Windowed<>(key2, new SessionWindow(2, 100)), "2");
sessionStore.put(new Windowed<>(key3, new SessionWindow(3, 100)), "3");
sessionStore.put(new Windowed<>(key1, new SessionWindow(4, 100)), "4");
sessionStore.put(new Windowed<>(key2, new SessionWindow(5, 100)), "5");
sessionStore.put(new Windowed<>(key3, new SessionWindow(6, 100)), "6");
sessionStore.put(new Windowed<>(key1, new SessionWindow(7, 100)), "7");
sessionStore.put(new Windowed<>(key2, new SessionWindow(8, 100)), "8");
sessionStore.put(new Windowed<>(key3, new SessionWindow(9, 100)), "9");
final List<String> expectedKey1 = asList("7", "4", "1");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.backwardFindSessions(key1, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey1)));
}
final List<String> expectedKey2 = asList("8", "5", "2");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.backwardFindSessions(key2, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey2)));
}
final List<String> expectedKey3 = asList("9", "6", "3");
try (KeyValueIterator<Windowed<Bytes>, String> iterator = sessionStore.backwardFindSessions(key3, 0L, Long.MAX_VALUE)) {
assertThat(valuesToSet(iterator), equalTo(new HashSet<>(expectedKey3)));
}
sessionStore.close();
}
@Test
public void testIteratorPeek() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, 20)), 4L);
try (final KeyValueIterator<Windowed<String>, Long> iterator = sessionStore.findSessions("a", 0L, 20)) {
assertEquals(new Windowed<>("a", new SessionWindow(0L, 0L)), iterator.peekNextKey());
final Windowed<String> k1 = iterator.peekNextKey();
assertEquals(iterator.next().key, k1);
final Windowed<String> k2 = iterator.peekNextKey();
assertEquals(iterator.next().key, k2);
assertFalse(iterator.hasNext());
}
}
@Test
public void testIteratorPeekBackward() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, 20)), 4L);
try (final KeyValueIterator<Windowed<String>, Long> iterator = sessionStore.backwardFindSessions("a", 0L, 20)) {
assertEquals(new Windowed<>("a", new SessionWindow(10L, 20L)), iterator.peekNextKey());
final Windowed<String> k1 = iterator.peekNextKey();
assertEquals(iterator.next().key, k1);
final Windowed<String> k2 = iterator.peekNextKey();
assertEquals(iterator.next().key, k2);
assertFalse(iterator.hasNext());
}
}
@Test
public void shouldRestore() {
final List<KeyValue<Windowed<String>, Long>> expected = Arrays.asList(
KeyValue.pair(new Windowed<>("a", new SessionWindow(0, 0)), 1L),
KeyValue.pair(new Windowed<>("a", new SessionWindow(10, 10)), 2L),
KeyValue.pair(new Windowed<>("a", new SessionWindow(100, 100)), 3L),
KeyValue.pair(new Windowed<>("a", new SessionWindow(1000, 1000)), 4L));
for (final KeyValue<Windowed<String>, Long> kv : expected) {
sessionStore.put(kv.key, kv.value);
}
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("a")) {
assertEquals(expected, toList(values));
}
sessionStore.close();
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("a")) {
assertEquals(Collections.emptyList(), toList(values));
}
final List<KeyValue<byte[], byte[]>> changeLog = new ArrayList<>();
for (final ProducerRecord<Object, Object> record : recordCollector.collected()) {
changeLog.add(new KeyValue<>(((Bytes) record.key()).get(), (byte[]) record.value()));
}
context.restore(sessionStore.name(), changeLog);
try (final KeyValueIterator<Windowed<String>, Long> values = sessionStore.fetch("a")) {
assertEquals(expected, toList(values));
}
}
@Test
public void shouldCloseOpenIteratorsWhenStoreIsClosedAndNotThrowInvalidStateStoreExceptionOnHasNext() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
sessionStore.put(new Windowed<>("b", new SessionWindow(10, 50)), 2L);
sessionStore.put(new Windowed<>("c", new SessionWindow(100, 500)), 3L);
try (final KeyValueIterator<Windowed<String>, Long> iterator = sessionStore.fetch("a")) {
assertTrue(iterator.hasNext());
sessionStore.close();
assertFalse(iterator.hasNext());
}
}
@Test
public void shouldReturnSameResultsForSingleKeyFindSessionsAndEqualKeyRangeFindSessions() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 1)), 0L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(2, 3)), 1L);
sessionStore.put(new Windowed<>("aa", new SessionWindow(4, 5)), 2L);
sessionStore.put(new Windowed<>("aaa", new SessionWindow(6, 7)), 3L);
try (final KeyValueIterator<Windowed<String>, Long> singleKeyIterator = sessionStore.findSessions("aa", 0L, 10L);
final KeyValueIterator<Windowed<String>, Long> rangeIterator = sessionStore.findSessions("aa", "aa", 0L, 10L)) {
assertEquals(singleKeyIterator.next(), rangeIterator.next());
assertEquals(singleKeyIterator.next(), rangeIterator.next());
assertFalse(singleKeyIterator.hasNext());
assertFalse(rangeIterator.hasNext());
}
}
@Test
public void shouldMeasureExpiredRecords() {
final Properties streamsConfig = StreamsTestUtils.getStreamsConfig();
final SessionStore<String, Long> sessionStore = buildSessionStore(RETENTION_PERIOD, Serdes.String(), Serdes.Long());
final InternalMockProcessorContext<?, ?> context = new InternalMockProcessorContext<>(
TestUtils.tempDirectory(),
new StreamsConfig(streamsConfig),
recordCollector
);
final Time time = Time.SYSTEM;
context.setTime(1L);
context.setSystemTimeMs(time.milliseconds());
sessionStore.init(context, sessionStore);
// Advance stream time by inserting record with large enough timestamp that records with timestamp 0 are expired
// Note that rocksdb will only expire segments at a time (where segment interval = 60,000 for this retention period)
sessionStore.put(new Windowed<>("initial record", new SessionWindow(0, 2 * SEGMENT_INTERVAL)), 0L);
// Try inserting a record with timestamp 0 -- should be dropped
sessionStore.put(new Windowed<>("late record", new SessionWindow(0, 0)), 0L);
sessionStore.put(new Windowed<>("another on-time record", new SessionWindow(0, 2 * SEGMENT_INTERVAL)), 0L);
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
final String threadId = Thread.currentThread().getName();
final Metric dropTotal;
final Metric dropRate;
dropTotal = metrics.get(new MetricName(
"dropped-records-total",
"stream-task-metrics",
"",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", "0_0")
)
));
dropRate = metrics.get(new MetricName(
"dropped-records-rate",
"stream-task-metrics",
"",
mkMap(
mkEntry("thread-id", threadId),
mkEntry("task-id", "0_0")
)
));
assertEquals(1.0, dropTotal.metricValue());
assertNotEquals(0.0, dropRate.metricValue());
sessionStore.close();
}
@Test
public void shouldNotThrowExceptionRemovingNonexistentKey() {
sessionStore.remove(new Windowed<>("a", new SessionWindow(0, 1)));
}
@SuppressWarnings("resource")
@Test
public void shouldThrowNullPointerExceptionOnFindSessionsNullKey() {
assertThrows(NullPointerException.class, () -> sessionStore.findSessions(null, 1L, 2L));
}
@SuppressWarnings("resource")
@Test
public void shouldThrowNullPointerExceptionOnFetchNullKey() {
assertThrows(NullPointerException.class, () -> sessionStore.fetch(null));
}
@Test
public void shouldThrowNullPointerExceptionOnRemoveNullKey() {
assertThrows(NullPointerException.class, () -> sessionStore.remove(null));
}
@Test
public void shouldThrowNullPointerExceptionOnPutNullKey() {
assertThrows(NullPointerException.class, () -> sessionStore.put(null, 1L));
}
@SuppressWarnings("resource")
@Test
public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey() {
final String keyFrom = new StringDeserializer().deserialize("", new IntegerSerializer().serialize("", -1));
final String keyTo = new StringDeserializer().deserialize("", new IntegerSerializer().serialize("", 1));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister();
final KeyValueIterator<Windowed<String>, Long> iterator = sessionStore.findSessions(keyFrom, keyTo, 0L, 10L)) {
assertFalse(iterator.hasNext());
final List<String> messages = appender.getMessages();
assertThat(
messages,
hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to range arguments set in the wrong order, " +
"or serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
" Note that the built-in numerical serdes do not follow this for negative numbers")
);
}
}
@Test
public void shouldRemoveExpired() {
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
if (storeType() == StoreType.InMemoryStore) {
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
// Advance stream time to expire the first record
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, RETENTION_PERIOD)), 4L);
} else {
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, SEGMENT_INTERVAL)), 2L);
sessionStore.put(new Windowed<>("a", new SessionWindow(10, SEGMENT_INTERVAL)), 3L);
// Advance stream time to expire the first record
sessionStore.put(new Windowed<>("aa", new SessionWindow(10, 2 * SEGMENT_INTERVAL)), 4L);
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("a", "b", 0L, Long.MAX_VALUE)
) {
if (storeType() == StoreType.InMemoryStore) {
assertEquals(valuesToSet(iterator), Set.of(2L, 3L, 4L));
} else {
// The 2 records with values 2L and 3L are considered expired as
// their end times < observed stream time - retentionPeriod + 1.
assertEquals(valuesToSet(iterator), Set.of(4L));
}
}
}
@Test
public void shouldMatchPositionAfterPut() {
context.setRecordContext(new ProcessorRecordContext(0, 1, 0, "", new RecordHeaders()));
sessionStore.put(new Windowed<>("a", new SessionWindow(0, 0)), 1L);
context.setRecordContext(new ProcessorRecordContext(0, 2, 0, "", new RecordHeaders()));
sessionStore.put(new Windowed<>("aa", new SessionWindow(0, 10)), 2L);
context.setRecordContext(new ProcessorRecordContext(0, 3, 0, "", new RecordHeaders()));
sessionStore.put(new Windowed<>("a", new SessionWindow(10, 20)), 3L);
final Position expected = Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 3L)))));
final Position actual = sessionStore.getPosition();
assertThat(expected, is(actual));
}
@Test
public void shouldNotFetchExpiredSessions() {
final long systemTime = Time.SYSTEM.milliseconds();
sessionStore.put(new Windowed<>("p", new SessionWindow(systemTime - 3 * RETENTION_PERIOD, systemTime - 2 * RETENTION_PERIOD)), 1L);
sessionStore.put(new Windowed<>("q", new SessionWindow(systemTime - 2 * RETENTION_PERIOD, systemTime - RETENTION_PERIOD)), 4L);
sessionStore.put(new Windowed<>("r", new SessionWindow(systemTime - RETENTION_PERIOD, systemTime - RETENTION_PERIOD / 2)), 3L);
sessionStore.put(new Windowed<>("p", new SessionWindow(systemTime - RETENTION_PERIOD, systemTime - RETENTION_PERIOD / 2)), 2L);
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("p", systemTime - 2 * RETENTION_PERIOD, systemTime - RETENTION_PERIOD)
) {
assertEquals(Set.of(2L), valuesToSet(iterator));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("p", systemTime - 5 * RETENTION_PERIOD, systemTime - 4 * RETENTION_PERIOD)
) {
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("p", "r", systemTime - 5 * RETENTION_PERIOD, systemTime - 4 * RETENTION_PERIOD)
) {
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("p", "r", systemTime - RETENTION_PERIOD, systemTime - RETENTION_PERIOD / 2)
) {
assertEquals(valuesToSet(iterator), Set.of(2L, 3L, 4L));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.findSessions("p", "r", systemTime - 2 * RETENTION_PERIOD, systemTime - RETENTION_PERIOD)
) {
assertEquals(valuesToSet(iterator), Set.of(2L, 3L, 4L));
}
try (final KeyValueIterator<Windowed<String>, Long> iterator =
sessionStore.backwardFindSessions("p", "r", systemTime - 2 * RETENTION_PERIOD, systemTime - RETENTION_PERIOD)
) {
assertEquals(valuesToSet(iterator), Set.of(2L, 3L, 4L));
}
}
}
|
StoreType
|
java
|
apache__camel
|
components/camel-cxf/camel-cxf-soap/src/test/java/org/apache/camel/component/cxf/converter/CachedCxfPayloadTest.java
|
{
"start": 1642,
"end": 4769
}
|
class ____ extends ExchangeTestSupport {
private static final String PAYLOAD = "<foo>bar<![CDATA[ & a cdata section ]]></foo>";
private static final String PAYLOAD_AMPED = "<foo>bar & a cdata section </foo>";
@Test
public void testCachedCxfPayloadSAXSource()
throws TypeConversionException, NoTypeConversionAvailableException, IOException {
SAXSource source = context.getTypeConverter().mandatoryConvertTo(SAXSource.class, PAYLOAD);
// this conversion uses org.apache.camel.converter.jaxp.XmlConverter.toDOMNodeFromSAX which uses Transformer
// to convert SAXSource to DOM. This conversion preserves the content but loses its original representation.
doTest(source, PAYLOAD_AMPED);
}
@Test
public void testCachedCxfPayloadStAXSource()
throws TypeConversionException, NoTypeConversionAvailableException, IOException {
StAXSource source = context.getTypeConverter().mandatoryConvertTo(StAXSource.class, PAYLOAD);
doTest(source, PAYLOAD);
}
@Test
public void testCachedCxfPayloadStaxSource()
throws TypeConversionException, NoTypeConversionAvailableException, IOException {
XMLStreamReader streamReader = StaxUtils.createXMLStreamReader(new StreamSource(new StringReader(PAYLOAD)));
StaxSource source = new StaxSource(streamReader);
doTest(source, PAYLOAD);
}
@Test
public void testCachedCxfPayloadDOMSource()
throws TypeConversionException, NoTypeConversionAvailableException, IOException {
DOMSource source = context.getTypeConverter().mandatoryConvertTo(DOMSource.class, PAYLOAD);
doTest(source, PAYLOAD);
}
@Test
public void testCachedCxfPayloadStreamSource()
throws TypeConversionException, NoTypeConversionAvailableException, IOException {
StreamSource source = context.getTypeConverter().mandatoryConvertTo(StreamSource.class, PAYLOAD);
doTest(source, PAYLOAD);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void doTest(Object source, String payload) throws IOException {
CxfPayload<?> originalPayload = context.getTypeConverter().convertTo(CxfPayload.class, source);
CachedCxfPayload<?> cache = new CachedCxfPayload(originalPayload, exchange);
assertTrue(cache.inMemory());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
cache.writeTo(bos);
String s = context.getTypeConverter().convertTo(String.class, bos);
assertEquals(payload, s);
cache.reset();
CachedCxfPayload clone = (CachedCxfPayload) cache.copy(exchange);
bos = new ByteArrayOutputStream();
clone.writeTo(bos);
s = context.getTypeConverter().convertTo(String.class, bos);
assertEquals(payload, s);
cache.reset();
clone.reset();
s = context.getTypeConverter().convertTo(String.class, cache);
assertEquals(payload, s);
s = context.getTypeConverter().convertTo(String.class, clone);
assertEquals(payload, s);
}
}
|
CachedCxfPayloadTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/EqualsMissingNullableTest.java
|
{
"start": 3068,
"end": 3422
}
|
class ____ {
public abstract boolean equals(final @Nullable Object o);
}
""")
.doTest(TEXT_MATCH);
}
@Test
public void negativeAlreadyAnnotated() {
aggressiveHelper
.addSourceLines(
"Foo.java",
"""
import javax.annotation.Nullable;
abstract
|
Foo
|
java
|
quarkusio__quarkus
|
integration-tests/maven/src/test/resources-filtered/projects/quarkus-index-dependencies-groupid/runner/src/main/java/org/acme/HelloResource.java
|
{
"start": 257,
"end": 621
}
|
class ____ {
@Inject
@ConfigProperty(name = "greeting")
String greeting;
@Inject
SomeBean bean;
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "hello";
}
@GET
@Path("/greeting")
@Produces(MediaType.TEXT_PLAIN)
public String greeting() {
return greeting;
}
}
|
HelloResource
|
java
|
dropwizard__dropwizard
|
dropwizard-jdbi3/src/test/java/io/dropwizard/jdbi3/NamePrependingTemplateEngineTest.java
|
{
"start": 423,
"end": 631
}
|
class ____ {
private static final String TEMPLATE = UUID.randomUUID().toString();
private static final String ORIGINAL_RENDERED = UUID.randomUUID().toString();
public
|
NamePrependingTemplateEngineTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/BeanDefinitionReader.java
|
{
"start": 903,
"end": 1301
}
|
interface ____ bean definition readers that specifies load methods with
* {@link Resource} and {@link String} location parameters.
*
* <p>Concrete bean definition readers can of course add additional
* load and register methods for bean definitions, specific to
* their bean definition format.
*
* @author Juergen Hoeller
* @since 1.1
* @see org.springframework.core.io.Resource
*/
public
|
for
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/test/java/org/apache/camel/dsl/jbang/core/common/PluginHelperTest.java
|
{
"start": 1250,
"end": 2631
}
|
class ____ {
@TempDir
Path tempDir;
@BeforeEach
public void setup() {
CommandLineHelper.useHomeDir(tempDir.toString());
}
@Test
public void testEmbeddedPluginDetectionDoesNotThrowException() {
assertDoesNotThrow(PluginHelper::hasEmbeddedPlugins);
}
@Test
public void testFallbackToJsonConfig() throws Exception {
// Create a user plugin config file in home directory
Path userConfig = CommandLineHelper.getHomeDir().resolve(PluginHelper.PLUGIN_CONFIG);
String userConfigContent = """
{
"plugins": {
"user-plugin": {
"name": "user-plugin",
"command": "user",
"description": "User plugin",
"firstVersion": "2.0.0"
}
}
}
""";
Files.writeString(userConfig, userConfigContent, StandardOpenOption.CREATE);
// Test that user config is loaded
JsonObject config = PluginHelper.getPluginConfig();
assertNotNull(config);
JsonObject plugins = config.getMap("plugins");
assertNotNull(plugins);
// Should have user plugin
JsonObject userPlugin = plugins.getMap("user-plugin");
assertNotNull(userPlugin);
}
}
|
PluginHelperTest
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/keys/ReactiveTransactionalKeyCommands.java
|
{
"start": 207,
"end": 19019
}
|
interface ____<K> extends ReactiveTransactionalRedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/copy">COPY</a>.
* Summary: Copy a key
* Group: generic
* Requires Redis 6.2.0
*
* @param source the key
* @param destination the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> copy(K source, K destination);
/**
* Execute the command <a href="https://redis.io/commands/copy">COPY</a>.
* Summary: Copy a key
* Group: generic
* Requires Redis 6.2.0
*
* @param source the key
* @param destination the key
* @param copyArgs the additional arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> copy(K source, K destination, CopyArgs copyArgs);
/**
* Execute the command <a href="https://redis.io/commands/del">DEL</a>.
* Summary: Delete one or multiple keys
* Group: generic
* Requires Redis 1.0.0
*
* @param keys the keys.
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> del(K... keys);
/**
* Execute the command <a href="https://redis.io/commands/dump">DUMP</a>.
* Summary: Return a serialized version of the value stored at the specified key.
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> dump(K key);
/**
* Execute the command <a href="https://redis.io/commands/exists">EXISTS</a>.
* Summary: Determine if a key exists
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key to check
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> exists(K key);
/**
* Execute the command <a href="https://redis.io/commands/exists">EXISTS</a>.
* Summary: Determine if a key exists
* Group: generic
* Requires Redis 1.0.0
*
* @param keys the keys to check
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> exists(K... keys);
/**
* Execute the command <a href="https://redis.io/commands/expire">EXPIRE</a>.
* Summary: Set a key's time to live in seconds
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param seconds the new TTL
* @param expireArgs the {@code EXPIRE} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expire(K key, long seconds, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/expire">EXPIRE</a>.
* Summary: Set a key's time to live in seconds
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param duration the new TTL
* @param expireArgs the {@code EXPIRE} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expire(K key, Duration duration, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/expire">EXPIRE</a>.
* Summary: Set a key's time to live in seconds
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param seconds the new TTL
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expire(K key, long seconds);
/**
* Execute the command <a href="https://redis.io/commands/expire">EXPIRE</a>.
* Summary: Set a key's time to live in seconds
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param duration the new TTL
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expire(K key, Duration duration);
/**
* Execute the command <a href="https://redis.io/commands/expireat">EXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 1.2.0
*
* @param key the key
* @param timestamp the timestamp
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expireat(K key, long timestamp);
/**
* Execute the command <a href="https://redis.io/commands/expireat">EXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 1.2.0
*
* @param key the key
* @param timestamp the timestamp
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expireat(K key, Instant timestamp);
/**
* Execute the command <a href="https://redis.io/commands/expireat">EXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 1.2.0
*
* @param key the key
* @param timestamp the timestamp
* @param expireArgs the {@code EXPIREAT} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expireat(K key, long timestamp, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/expireat">EXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 1.2.0
*
* @param key the key
* @param timestamp the timestamp
* @param expireArgs the {@code EXPIREAT} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> expireat(K key, Instant timestamp, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/expiretime">EXPIRETIME</a>.
* Summary: Get the expiration Unix timestamp for a key
* Group: generic
* Requires Redis 7.0.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
* @throws RedisKeyNotFoundException if the key does not exist
*/
Uni<Void> expiretime(K key);
/**
* Execute the command <a href="https://redis.io/commands/keys">KEYS</a>.
* Summary: Find all keys matching the given pattern
* Group: generic
* Requires Redis 1.0.0
*
* @param pattern the glob-style pattern
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> keys(String pattern);
/**
* Execute the command <a href="https://redis.io/commands/move">MOVE</a>.
* Summary: Move a key to another database
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> move(K key, long db);
/**
* Execute the command <a href="https://redis.io/commands/persist">PERSIST</a>.
* Summary: Remove the expiration from a key
* Group: generic
* Requires Redis 2.2.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> persist(K key);
/**
* Execute the command <a href="https://redis.io/commands/pexpire">PEXPIRE</a>.
* Summary: Set a key's time to live in milliseconds
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param duration the new TTL
* @param expireArgs the {@code PEXPIRE} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpire(K key, Duration duration, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/pexpire">PEXPIRE</a>.
* Summary: Set a key's time to live in milliseconds
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param ms the new TTL
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpire(K key, long ms);
/**
* Execute the command <a href="https://redis.io/commands/pexpire">PEXPIRE</a>.
* Summary: Set a key's time to live in milliseconds
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param duration the new TTL
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpire(K key, Duration duration);
/**
* Execute the command <a href="https://redis.io/commands/pexpire">PEXPIRE</a>.
* Summary: Set a key's time to live in milliseconds
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param milliseconds the new TTL
* @param expireArgs the {@code PEXPIRE} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpire(K key, long milliseconds, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/pexpireat">PEXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param timestamp the timestamp
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpireat(K key, long timestamp);
/**
* Execute the command <a href="https://redis.io/commands/pexpireat">PEXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param timestamp the timestamp
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpireat(K key, Instant timestamp);
/**
* Execute the command <a href="https://redis.io/commands/pexpireat">PEXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param timestamp the timestamp
* @param expireArgs the {@code EXPIREAT} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpireat(K key, long timestamp, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/pexpireat">PEXPIREAT</a>.
* Summary: Set the expiration for a key as a UNIX timestamp
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @param timestamp the timestamp
* @param expireArgs the {@code EXPIREAT} command extra-arguments
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pexpireat(K key, Instant timestamp, ExpireArgs expireArgs);
/**
* Execute the command <a href="https://redis.io/commands/pexpiretime">PEXPIRETIME</a>.
* Summary: Get the expiration Unix timestamp for a key
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
* @throws RedisKeyNotFoundException if the key does not exist
*/
Uni<Void> pexpiretime(K key);
/**
* Execute the command <a href="https://redis.io/commands/pttl">PTTL</a>.
* Summary: Get the time to live for a key in milliseconds
* Group: generic
* Requires Redis 2.6.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> pttl(K key);
/**
* Execute the command <a href="https://redis.io/commands/randomkey">RANDOMKEY</a>.
* Summary: Return a random key from the keyspace
* Group: generic
* Requires Redis 1.0.0
*
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> randomkey();
/**
* Execute the command <a href="https://redis.io/commands/rename">RENAME</a>.
* Summary: Rename a key
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param newkey the new key
*/
Uni<Void> rename(K key, K newkey);
/**
* Execute the command <a href="https://redis.io/commands/renamenx">RENAMENX</a>.
* Summary: Rename a key, only if the new key does not exist
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @param newkey the new key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> renamenx(K key, K newkey);
/**
* Execute the command <a href="https://redis.io/commands/touch">TOUCH</a>.
* Summary: Alters the last access time of a key(s). Returns the number of existing keys specified.
* Group: generic
* Requires Redis 3.2.1
*
* @param keys the keys
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> touch(K... keys);
/**
* Execute the command <a href="https://redis.io/commands/ttl">TTL</a>.
* Summary: Get the time to live for a key in seconds
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
* @throws RedisKeyNotFoundException if the key does not exist
*/
Uni<Void> ttl(K key) throws RedisKeyNotFoundException;
/**
* Execute the command <a href="https://redis.io/commands/type">TYPE</a>.
* Summary: Determine the type stored at key
* Group: generic
* Requires Redis 1.0.0
*
* @param key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> type(K key);
/**
* Execute the command <a href="https://redis.io/commands/unlink">UNLINK</a>.
* Summary: Delete a key asynchronously in another thread. Otherwise, it is just as {@code DEL}, but non-blocking.
* Group: generic
* Requires Redis 4.0.0
*
* @param keys the keys
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> unlink(K... keys);
}
|
ReactiveTransactionalKeyCommands
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webmvc/src/test/java/org/springframework/boot/webmvc/autoconfigure/WebMvcAutoConfigurationTests.java
|
{
"start": 64869,
"end": 65203
}
|
class ____ extends AbstractFlashMapManager {
@Override
protected @Nullable List<FlashMap> retrieveFlashMaps(HttpServletRequest request) {
return null;
}
@Override
protected void updateFlashMaps(List<FlashMap> flashMaps, HttpServletRequest request,
HttpServletResponse response) {
}
}
static
|
CustomFlashMapManager
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java
|
{
"start": 45154,
"end": 45504
}
|
class ____ {
public @DataTypeHint("MAP<STRING, INT>") Object mapField;
public SimplePojo simplePojoField;
public Object someObject;
}
// --------------------------------------------------------------------------------------------
/** Generic Varargs in parameters. */
public static
|
ComplexPojoWithManyAnnotations
|
java
|
apache__camel
|
components/camel-consul/src/generated/java/org/apache/camel/component/consul/ConsulEndpointConfigurer.java
|
{
"start": 733,
"end": 10199
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
ConsulEndpoint target = (ConsulEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "acltoken":
case "aclToken": target.getConfiguration().setAclToken(property(camelContext, java.lang.String.class, value)); return true;
case "action": target.getConfiguration().setAction(property(camelContext, java.lang.String.class, value)); return true;
case "blockseconds":
case "blockSeconds": target.getConfiguration().setBlockSeconds(property(camelContext, java.lang.Integer.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "connecttimeout":
case "connectTimeout": target.getConfiguration().setConnectTimeout(property(camelContext, java.time.Duration.class, value)); return true;
case "consistencymode":
case "consistencyMode": target.getConfiguration().setConsistencyMode(property(camelContext, org.kiwiproject.consul.option.ConsistencyMode.class, value)); return true;
case "consulclient":
case "consulClient": target.getConfiguration().setConsulClient(property(camelContext, org.kiwiproject.consul.Consul.class, value)); return true;
case "datacenter": target.getConfiguration().setDatacenter(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "firstindex":
case "firstIndex": target.getConfiguration().setFirstIndex(property(camelContext, java.math.BigInteger.class, value)); return true;
case "key": target.getConfiguration().setKey(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "nearnode":
case "nearNode": target.getConfiguration().setNearNode(property(camelContext, java.lang.String.class, value)); return true;
case "nodemeta":
case "nodeMeta": target.getConfiguration().setNodeMeta(property(camelContext, java.lang.String.class, value)); return true;
case "password": target.getConfiguration().setPassword(property(camelContext, java.lang.String.class, value)); return true;
case "pinginstance":
case "pingInstance": target.getConfiguration().setPingInstance(property(camelContext, boolean.class, value)); return true;
case "readtimeout":
case "readTimeout": target.getConfiguration().setReadTimeout(property(camelContext, java.time.Duration.class, value)); return true;
case "recursive": target.getConfiguration().setRecursive(property(camelContext, boolean.class, value)); return true;
case "sslcontextparameters":
case "sslContextParameters": target.getConfiguration().setSslContextParameters(property(camelContext, org.apache.camel.support.jsse.SSLContextParameters.class, value)); return true;
case "tags": target.getConfiguration().setTags(property(camelContext, java.lang.String.class, value)); return true;
case "url": target.getConfiguration().setUrl(property(camelContext, java.lang.String.class, value)); return true;
case "username":
case "userName": target.getConfiguration().setUserName(property(camelContext, java.lang.String.class, value)); return true;
case "valueasstring":
case "valueAsString": target.getConfiguration().setValueAsString(property(camelContext, boolean.class, value)); return true;
case "writetimeout":
case "writeTimeout": target.getConfiguration().setWriteTimeout(property(camelContext, java.time.Duration.class, value)); return true;
default: return false;
}
}
@Override
public String[] getAutowiredNames() {
return new String[]{"consulClient"};
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "acltoken":
case "aclToken": return java.lang.String.class;
case "action": return java.lang.String.class;
case "blockseconds":
case "blockSeconds": return java.lang.Integer.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "connecttimeout":
case "connectTimeout": return java.time.Duration.class;
case "consistencymode":
case "consistencyMode": return org.kiwiproject.consul.option.ConsistencyMode.class;
case "consulclient":
case "consulClient": return org.kiwiproject.consul.Consul.class;
case "datacenter": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "firstindex":
case "firstIndex": return java.math.BigInteger.class;
case "key": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "nearnode":
case "nearNode": return java.lang.String.class;
case "nodemeta":
case "nodeMeta": return java.lang.String.class;
case "password": return java.lang.String.class;
case "pinginstance":
case "pingInstance": return boolean.class;
case "readtimeout":
case "readTimeout": return java.time.Duration.class;
case "recursive": return boolean.class;
case "sslcontextparameters":
case "sslContextParameters": return org.apache.camel.support.jsse.SSLContextParameters.class;
case "tags": return java.lang.String.class;
case "url": return java.lang.String.class;
case "username":
case "userName": return java.lang.String.class;
case "valueasstring":
case "valueAsString": return boolean.class;
case "writetimeout":
case "writeTimeout": return java.time.Duration.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
ConsulEndpoint target = (ConsulEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "acltoken":
case "aclToken": return target.getConfiguration().getAclToken();
case "action": return target.getConfiguration().getAction();
case "blockseconds":
case "blockSeconds": return target.getConfiguration().getBlockSeconds();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "connecttimeout":
case "connectTimeout": return target.getConfiguration().getConnectTimeout();
case "consistencymode":
case "consistencyMode": return target.getConfiguration().getConsistencyMode();
case "consulclient":
case "consulClient": return target.getConfiguration().getConsulClient();
case "datacenter": return target.getConfiguration().getDatacenter();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "firstindex":
case "firstIndex": return target.getConfiguration().getFirstIndex();
case "key": return target.getConfiguration().getKey();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "nearnode":
case "nearNode": return target.getConfiguration().getNearNode();
case "nodemeta":
case "nodeMeta": return target.getConfiguration().getNodeMeta();
case "password": return target.getConfiguration().getPassword();
case "pinginstance":
case "pingInstance": return target.getConfiguration().isPingInstance();
case "readtimeout":
case "readTimeout": return target.getConfiguration().getReadTimeout();
case "recursive": return target.getConfiguration().isRecursive();
case "sslcontextparameters":
case "sslContextParameters": return target.getConfiguration().getSslContextParameters();
case "tags": return target.getConfiguration().getTags();
case "url": return target.getConfiguration().getUrl();
case "username":
case "userName": return target.getConfiguration().getUserName();
case "valueasstring":
case "valueAsString": return target.getConfiguration().isValueAsString();
case "writetimeout":
case "writeTimeout": return target.getConfiguration().getWriteTimeout();
default: return null;
}
}
}
|
ConsulEndpointConfigurer
|
java
|
elastic__elasticsearch
|
plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedPassageFormatter.java
|
{
"start": 1398,
"end": 1797
}
|
class ____ extends PassageFormatter {
public static final String SEARCH_HIT_TYPE = "_hit_term";
private final Encoder encoder;
AnnotatedText[] annotations;
public AnnotatedPassageFormatter(Encoder encoder) {
this.encoder = encoder;
}
void setAnnotations(AnnotatedText[] annotations) {
this.annotations = annotations;
}
static
|
AnnotatedPassageFormatter
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AlreadyCheckedTest.java
|
{
"start": 13642,
"end": 14110
}
|
class ____ {
private final List<String> xs = null;
public boolean e(List<String> ys) {
if (xs.equals(ys)) {
return true;
}
return xs.equals(ys);
}
}
""")
.doTest();
}
@Test
public void i3914() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.List;
|
Test
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/AnnotationBeanNameGeneratorTests.java
|
{
"start": 10541,
"end": 10952
}
|
interface ____ {
String attribute() default "";
}
/**
* Custom stereotype annotation which has a {@code String value} attribute that
* is explicitly declared as an alias for an attribute in a meta-annotation
* other than {@link Component @Component}.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Component
@MetaAnnotationWithStringAttribute
@
|
MetaAnnotationWithStringAttribute
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/jdk/AtomicTypeSerializationTest.java
|
{
"start": 1205,
"end": 1525
}
|
class ____ {
@JsonSerialize(contentUsing=UpperCasingSerializer.class)
public AtomicReference<String> value;
public UCStringWrapper(String s) { value = new AtomicReference<String>(s); }
}
// [datatypes-java8#17]
@JsonPropertyOrder({ "date1", "date2", "date" })
static
|
UCStringWrapper
|
java
|
apache__maven
|
src/mdo/java/WrapperProperties.java
|
{
"start": 11064,
"end": 12482
}
|
class ____ extends AbstractSet<Map.Entry<Object, Object>> {
@Override
public Iterator<Map.Entry<Object, Object>> iterator() {
return new Iterator<Map.Entry<Object, Object>>() {
Iterator<Object> keyIterator = keyOrder.iterator();
@Override
public boolean hasNext() {
return keyIterator.hasNext();
}
@Override
public Map.Entry<Object, Object> next() {
Object key = keyIterator.next();
return new Map.Entry<>() {
@Override
public Object getKey() {
return key;
}
@Override
public Object getValue() {
return get(key);
}
@Override
public Object setValue(Object value) {
return WrapperProperties.this.put(key, value);
}
};
}
};
}
@Override
public int size() {
return keyOrder.size();
}
}
private
|
EntrySet
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionTests.java
|
{
"start": 611,
"end": 5088
}
|
class ____ extends ScriptTestCase {
public void testBasic() {
assertEquals(5, exec("int get() {5;} get()"));
}
public void testReference() {
assertEquals(5, exec("void get(int[] x) {x[0] = 5;} int[] y = new int[1]; y[0] = 1; get(y); y[0]"));
}
public void testConcat() {
assertEquals("xyxy", exec("String catcat(String single) {single + single;} catcat('xy')"));
}
public void testMultiArgs() {
assertEquals(5, exec("int add(int x, int y) {return x + y;} int x = 1, y = 2; add(add(x, x), add(x, y))"));
}
public void testMultiFuncs() {
assertEquals(1, exec("int add(int x, int y) {return x + y;} int sub(int x, int y) {return x - y;} add(2, sub(3, 4))"));
assertEquals(3, exec("int sub2(int x, int y) {sub(x, y) - y;} int sub(int x, int y) {return x - y;} sub2(5, 1)"));
}
public void testRecursion() {
assertEquals(55, exec("int fib(int n) {if (n <= 1) return n; else return fib(n-1) + fib(n-2);} fib(10)"));
}
public void testEmpty() {
Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("void test(int x) {} test()"); });
assertThat(
expected.getMessage(),
containsString("invalid function definition: found no statements for function [test] with [1] parameters")
);
}
public void testReturnsAreUnboxedIfNeeded() {
assertEquals((byte) 5, exec("byte get() {Byte.valueOf(5)} get()"));
assertEquals((short) 5, exec("short get() {Byte.valueOf(5)} get()"));
assertEquals(5, exec("int get() {Byte.valueOf(5)} get()"));
assertEquals((short) 5, exec("short get() {Short.valueOf(5)} get()"));
assertEquals(5, exec("int get() {Integer.valueOf(5)} get()"));
assertEquals(5.0f, exec("float get() {Float.valueOf(5)} get()"));
assertEquals(5.0d, exec("double get() {Float.valueOf(5)} get()"));
assertEquals(5.0d, exec("double get() {Double.valueOf(5)} get()"));
assertEquals(true, exec("boolean get() {Boolean.TRUE} get()"));
}
public void testDuplicates() {
Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("void test(int x) {x = 2;} void test(def y) {y = 3;} test()");
});
assertThat(expected.getMessage(), containsString("found duplicate function"));
}
public void testBadCastFromMethod() {
Exception e = expectScriptThrows(ClassCastException.class, () -> exec("int get() {5L} get()"));
assertEquals("Cannot cast from [long] to [int].", e.getMessage());
e = expectScriptThrows(ClassCastException.class, () -> exec("int get() {5.1f} get()"));
assertEquals("Cannot cast from [float] to [int].", e.getMessage());
e = expectScriptThrows(ClassCastException.class, () -> exec("int get() {5.1d} get()"));
assertEquals("Cannot cast from [double] to [int].", e.getMessage());
}
public void testInfiniteLoop() {
var e = expectScriptThrows(ErrorCauseWrapper.class, () -> { exec("void test() {boolean x = true; while (x) {}} test()"); });
assertThat(e.realCause.getClass(), equalTo(PainlessError.class));
assertThat(e.getMessage(), containsString("The maximum number of statements that can be executed in a loop has been reached."));
}
public void testReturnVoid() {
assertEquals(null, exec("void test(StringBuilder b, int i) {b.setLength(i)} test(new StringBuilder(), 1)"));
Exception expected = expectScriptThrows(IllegalArgumentException.class, () -> {
exec("int test(StringBuilder b, int i) {b.setLength(i)} test(new StringBuilder(), 1)");
});
assertEquals(
"invalid function definition: " + "not all paths provide a return value for function [test] with [2] parameters",
expected.getMessage()
);
expected = expectScriptThrows(ClassCastException.class, () -> {
exec("int test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)");
});
assertEquals("Cannot cast from [void] to [int].", expected.getMessage());
expected = expectScriptThrows(ClassCastException.class, () -> {
exec("def test(StringBuilder b, int i) {return b.setLength(i)} test(new StringBuilder(), 1)");
});
assertEquals("Cannot cast from [void] to [def].", expected.getMessage());
}
}
|
FunctionTests
|
java
|
resilience4j__resilience4j
|
resilience4j-metrics/src/main/java/io/github/resilience4j/metrics/Timer.java
|
{
"start": 531,
"end": 8389
}
|
interface ____ {
/**
* Creates a timer of a provided MetricRegistry
*
* @param name the name of the timer
* @param metricRegistry the MetricRegistry
* @return a Timer instance
*/
static Timer ofMetricRegistry(String name, MetricRegistry metricRegistry) {
return new TimerImpl(name, metricRegistry);
}
/**
* Creates a timer of a default MetricRegistry
*
* @param name the name of the timer
* @return a Timer instance
*/
static Timer of(String name) {
return new TimerImpl(name, new MetricRegistry());
}
/**
* Creates a timed checked supplier.
*
* @param timer the timer to use
* @param supplier the original supplier
* @return a timed supplier
*/
static <T> CheckedSupplier<T> decorateCheckedSupplier(Timer timer,
CheckedSupplier<T> supplier) {
return () -> {
final Timer.Context context = timer.context();
try {
T returnValue = supplier.get();
context.onSuccess();
return returnValue;
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed runnable.
*
* @param timer the timer to use
* @param runnable the original runnable
* @return a timed runnable
*/
static CheckedRunnable decorateCheckedRunnable(Timer timer, CheckedRunnable runnable) {
return () -> {
final Timer.Context context = timer.context();
try {
runnable.run();
context.onSuccess();
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed checked supplier.
*
* @param timer the timer to use
* @param supplier the original supplier
* @return a timed supplier
*/
static <T> Supplier<T> decorateSupplier(Timer timer, Supplier<T> supplier) {
return () -> {
final Timer.Context context = timer.context();
try {
T returnValue = supplier.get();
context.onSuccess();
return returnValue;
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed Callable.
*
* @param timer the timer to use
* @param callable the original Callable
* @return a timed Callable
*/
static <T> Callable<T> decorateCallable(Timer timer, Callable<T> callable) {
return () -> {
final Timer.Context context = timer.context();
try {
T returnValue = callable.call();
context.onSuccess();
return returnValue;
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed runnable.
*
* @param timer the timer to use
* @param runnable the original runnable
* @return a timed runnable
*/
static Runnable decorateRunnable(Timer timer, Runnable runnable) {
return () -> {
final Timer.Context context = timer.context();
try {
runnable.run();
context.onSuccess();
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed function.
*
* @param timer the timer to use
* @param function the original function
* @return a timed function
*/
static <T, R> Function<T, R> decorateFunction(Timer timer, Function<T, R> function) {
return (T t) -> {
final Timer.Context context = timer.context();
try {
R returnValue = function.apply(t);
context.onSuccess();
return returnValue;
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* Creates a timed function.
*
* @param timer the timer to use
* @param function the original function
* @return a timed function
*/
static <T, R> CheckedFunction<T, R> decorateCheckedFunction(Timer timer,
CheckedFunction<T, R> function) {
return (T t) -> {
final Timer.Context context = timer.context();
try {
R returnValue = function.apply(t);
context.onSuccess();
return returnValue;
} catch (Throwable e) {
context.onError();
throw e;
}
};
}
/**
* @param timer the timer to use
* @param stageSupplier the CompletionStage Supplier
* @return a decorated completion stage
*/
static <T> Supplier<CompletionStage<T>> decorateCompletionStageSupplier(Timer timer,
Supplier<CompletionStage<T>> stageSupplier) {
return () -> {
final Timer.Context context = timer.context();
try {
final CompletionStage<T> stage = stageSupplier.get();
stage.whenComplete((result, throwable) -> {
if (throwable != null) {
context.onError();
} else {
context.onSuccess();
}
});
return stage;
} catch (Throwable throwable) {
context.onError();
throw throwable;
}
};
}
/**
* Creates a Timer context and starts the timer
*
* @return the Timer context
*/
Timer.Context context();
/**
* Returns the name of this Timer.
*
* @return the name of this Timer
*/
String getName();
/**
* Returns the MetricRegistry of this Timer.
*
* @return the MetricRegistry of this Timer
*/
MetricRegistry getMetricRegistry();
/**
* Returns the Metrics of this Timer.
*
* @return the Metrics of this Timer
*/
Timer.Metrics getMetrics();
/**
* Decorates and executes the decorated Runnable.
*
* @param runnable the original Callable
*/
default void executeRunnable(Runnable runnable) {
decorateRunnable(this, runnable).run();
}
/**
* Decorates and executes the decorated Callable.
*
* @param callable the original Callable
* @param <T> the type of results supplied by this Callable
* @return the result of the decorated Callable.
*/
default <T> T executeCallable(Callable<T> callable) throws Exception {
return decorateCallable(this, callable).call();
}
/**
* Decorates and executes the decorated Supplier.
*
* @param supplier the original Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
default <T> T executeSupplier(Supplier<T> supplier) {
return decorateSupplier(this, supplier).get();
}
/**
* Decorates and executes the decorated CompletionStage Supplier.
*
* @param supplier the CompletionStage Supplier
* @param <T> the type of results supplied by this supplier
* @return the result of the decorated Supplier.
*/
default <T> CompletionStage<T> executeCompletionStageSupplier(
Supplier<CompletionStage<T>> supplier) {
return decorateCompletionStageSupplier(this, supplier).get();
}
|
Timer
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/SagaDefinition.java
|
{
"start": 1654,
"end": 10334
}
|
class ____ extends OutputDefinition<SagaDefinition> {
@XmlTransient
private CamelSagaService sagaServiceBean;
@XmlAttribute
@Metadata(label = "advanced", javaType = "org.apache.camel.saga.CamelSagaService")
private String sagaService;
@XmlAttribute
@Metadata(label = "advanced", javaType = "org.apache.camel.model.SagaPropagation", defaultValue = "REQUIRED",
enums = "REQUIRED,REQUIRES_NEW,MANDATORY,SUPPORTS,NOT_SUPPORTED,NEVER")
private String propagation;
@XmlAttribute
@Metadata(label = "advanced", javaType = "org.apache.camel.model.SagaCompletionMode", defaultValue = "AUTO",
enums = "AUTO,MANUAL")
private String completionMode;
@XmlAttribute
@Metadata(javaType = "java.time.Duration")
private String timeout;
@XmlElement
private SagaActionUriDefinition compensation;
@XmlElement
private SagaActionUriDefinition completion;
@XmlElement(name = "option")
@Metadata(label = "advanced")
private List<PropertyExpressionDefinition> options;
public SagaDefinition() {
}
protected SagaDefinition(SagaDefinition source) {
super(source);
this.sagaServiceBean = source.sagaServiceBean;
this.sagaService = source.sagaService;
this.propagation = source.propagation;
this.completionMode = source.completionMode;
this.timeout = source.timeout;
this.compensation = source.compensation != null ? source.compensation.copyDefinition() : null;
this.completion = source.completion != null ? source.completion.copyDefinition() : null;
this.options = ProcessorDefinitionHelper.deepCopyDefinitions(source.options);
}
@Override
public SagaDefinition copyDefinition() {
return new SagaDefinition(this);
}
@Override
public List<ProcessorDefinition<?>> getOutputs() {
return outputs;
}
@XmlElementRef
@Override
public void setOutputs(List<ProcessorDefinition<?>> outputs) {
super.setOutputs(outputs);
}
@Override
public boolean isAbstract() {
return true;
}
@Override
public boolean isTopLevelOnly() {
return true;
}
@Override
public boolean isWrappingEntireOutput() {
return true;
}
@Override
public String getLabel() {
String desc = description();
if (ObjectHelper.isEmpty(desc)) {
return "saga";
} else {
return "saga[" + desc + "]";
}
}
@Override
public String toString() {
String desc = description();
if (ObjectHelper.isEmpty(desc)) {
return "Saga -> [" + outputs + "]";
} else {
return "Saga[" + desc + "] -> [" + outputs + "]";
}
}
// Properties
public CamelSagaService getSagaServiceBean() {
return sagaServiceBean;
}
public String getSagaService() {
return sagaService;
}
/**
* Refers to the id to lookup in the registry for the specific CamelSagaService to use.
*/
public void setSagaService(String sagaService) {
this.sagaService = sagaService;
}
public SagaActionUriDefinition getCompensation() {
return compensation;
}
/**
* The compensation endpoint URI that must be called to compensate all changes done in the route. The route
* corresponding to the compensation URI must perform compensation and complete without error. If errors occur
* during compensation, the saga service may call again the compensation URI to retry.
*/
public void setCompensation(SagaActionUriDefinition compensation) {
this.compensation = compensation;
}
public SagaActionUriDefinition getCompletion() {
return completion;
}
/**
* The completion endpoint URI that will be called when the Saga is completed successfully. The route corresponding
* to the completion URI must perform completion tasks and terminate without error. If errors occur during
* completion, the saga service may call again the completion URI to retry.
*/
public void setCompletion(SagaActionUriDefinition completion) {
this.completion = completion;
}
public String getPropagation() {
return propagation;
}
/**
* Set the Saga propagation mode (REQUIRED, REQUIRES_NEW, MANDATORY, SUPPORTS, NOT_SUPPORTED, NEVER).
*/
public void setPropagation(String propagation) {
this.propagation = propagation;
}
public String getCompletionMode() {
return completionMode;
}
/**
* Determine how the saga should be considered complete. When set to AUTO, the saga is completed when the exchange
* that initiates the saga is processed successfully, or compensated when it completes exceptionally. When set to
* MANUAL, the user must complete or compensate the saga using the "saga:complete" or "saga:compensate" endpoints.
*/
public void setCompletionMode(String completionMode) {
this.completionMode = completionMode;
}
public List<PropertyExpressionDefinition> getOptions() {
return options;
}
/**
* Allows to save properties of the current exchange in order to re-use them in a compensation/completion callback
* route. Options are usually helpful e.g. to store and retrieve identifiers of objects that should be deleted in
* compensating actions. Option values will be transformed into input headers of the compensation/completion
* exchange.
*/
public void setOptions(List<PropertyExpressionDefinition> options) {
this.options = options;
}
public String getTimeout() {
return timeout;
}
/**
* Set the maximum amount of time for the Saga. After the timeout is expired, the saga will be compensated
* automatically (unless a different decision has been taken in the meantime).
*/
public void setTimeout(String timeout) {
this.timeout = timeout;
}
private void addOption(String option, Expression expression) {
if (this.options == null) {
this.options = new ArrayList<>();
}
this.options.add(new PropertyExpressionDefinition(option, expression));
}
// Builders
public SagaDefinition compensation(String compensation) {
if (this.compensation != null) {
throw new IllegalStateException("Compensation has already been set");
}
this.compensation = new SagaActionUriDefinition(compensation);
return this;
}
public SagaDefinition completion(String completion) {
if (this.completion != null) {
throw new IllegalStateException("Completion has already been set");
}
this.completion = new SagaActionUriDefinition(completion);
return this;
}
public SagaDefinition propagation(SagaPropagation propagation) {
setPropagation(propagation.name());
return this;
}
public SagaDefinition sagaService(CamelSagaService sagaService) {
this.sagaServiceBean = sagaService;
return this;
}
public SagaDefinition sagaService(String sagaService) {
setSagaService(sagaService);
return this;
}
public SagaDefinition completionMode(SagaCompletionMode completionMode) {
return completionMode(completionMode.name());
}
public SagaDefinition completionMode(String completionMode) {
setCompletionMode(completionMode);
return this;
}
public SagaDefinition option(String option, Expression expression) {
addOption(option, expression);
return this;
}
public SagaDefinition timeout(Duration duration) {
return timeout(TimeUtils.printDuration(duration, true));
}
public SagaDefinition timeout(long timeout, TimeUnit unit) {
return timeout(Duration.ofMillis(unit.toMillis(timeout)));
}
public SagaDefinition timeout(String duration) {
setTimeout(duration);
return this;
}
// Utils
protected String description() {
StringBuilder desc = new StringBuilder(256);
addField(desc, "compensation", compensation);
addField(desc, "completion", completion);
addField(desc, "propagation", propagation);
return desc.toString();
}
private void addField(StringBuilder builder, String key, Object value) {
if (value == null) {
return;
}
if (!builder.isEmpty()) {
builder.append(',');
}
builder.append(key).append(':').append(value);
}
}
|
SagaDefinition
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jaxb/deployment/src/test/java/io/quarkus/resteasy/reactive/jaxb/deployment/test/SseResourceTest.java
|
{
"start": 4628,
"end": 4871
}
|
class ____ {
@XmlElement
public String name;
public Message(String name) {
this.name = name;
}
// for JAXB
public Message() {
}
}
@Path("sse")
public static
|
Message
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/RedissonBoundedBlockingQueue.java
|
{
"start": 1589,
"end": 17142
}
|
class ____<V> extends RedissonQueue<V> implements RBoundedBlockingQueue<V> {
private final RedissonBlockingQueue<V> blockingQueue;
private final RedissonQueueSemaphore semaphore;
private final String channelName;
private final String semaphoreName;
protected RedissonBoundedBlockingQueue(CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson) {
super(commandExecutor, name, redisson);
blockingQueue = new RedissonBlockingQueue<>(commandExecutor, name, redisson);
semaphoreName = getSemaphoreName(getRawName());
semaphore = new RedissonQueueSemaphore(commandExecutor, semaphoreName, getServiceManager().getCfg().getCodec());
channelName = RedissonSemaphore.getChannelName(semaphore.getRawName());
}
protected RedissonBoundedBlockingQueue(Codec codec, CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson) {
super(codec, commandExecutor, name, redisson);
blockingQueue = new RedissonBlockingQueue<>(commandExecutor, name, redisson);
semaphoreName = getSemaphoreName(getRawName());
semaphore = new RedissonQueueSemaphore(commandExecutor, semaphoreName, codec);
channelName = RedissonSemaphore.getChannelName(semaphore.getRawName());
}
private String getSemaphoreName(String name) {
return prefixName("redisson_bqs", name);
}
@Override
public RFuture<Boolean> addAsync(V e) {
RFuture<Boolean> future = offerAsync(e);
CompletionStage<Boolean> f = future.handle((res, ex) -> {
if (ex != null) {
throw new CompletionException(ex);
}
if (!res) {
throw new CompletionException(new IllegalStateException("Queue is full"));
}
return true;
});
return new CompletableFutureWrapper<>(f);
}
@Override
public RFuture<Void> putAsync(V e) {
RedissonQueueSemaphore semaphore = createSemaphore(e);
return semaphore.acquireAsync();
}
private RedissonQueueSemaphore createSemaphore(V e) {
RedissonQueueSemaphore semaphore = new RedissonQueueSemaphore(commandExecutor, semaphoreName, getCodec());
semaphore.setQueueName(getRawName());
semaphore.setValue(e);
return semaphore;
}
@Override
public void put(V e) throws InterruptedException {
RedissonQueueSemaphore semaphore = createSemaphore(e);
semaphore.acquire();
}
@Override
public RFuture<Boolean> offerAsync(V e) {
RedissonQueueSemaphore semaphore = createSemaphore(e);
return semaphore.tryAcquireAsync();
}
@Override
public boolean offer(V e, long timeout, TimeUnit unit) throws InterruptedException {
RedissonQueueSemaphore semaphore = createSemaphore(e);
return semaphore.tryAcquire(timeout, unit);
}
@Override
public RFuture<Boolean> offerAsync(V e, long timeout, TimeUnit unit) {
RedissonQueueSemaphore semaphore = createSemaphore(e);
return semaphore.tryAcquireAsync(timeout, unit);
}
@Override
public RFuture<V> takeAsync() {
RFuture<V> takeFuture = blockingQueue.takeAsync();
return wrapTakeFuture(takeFuture);
}
private <V> RFuture<V> wrapTakeFuture(RFuture<V> takeFuture) {
CompletionStage<V> f = takeFuture.thenCompose(res -> {
if (res == null) {
return CompletableFuture.completedFuture(null);
}
return createSemaphore(null).releaseAsync().handle((r, ex) -> res);
});
f.whenComplete((r, e) -> {
if (f.toCompletableFuture().isCancelled()) {
takeFuture.cancel(false);
}
});
return new CompletableFutureWrapper<>(f);
}
@Override
public RFuture<Boolean> removeAsync(Object o) {
return removeAllAsync(Collections.singleton(o));
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return new CompletableFutureWrapper<>(false);
}
return commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local count = 0; " +
"for i = 1, #ARGV, 1 do "
+ "if redis.call('lrem', KEYS[1], 0, ARGV[i]) == 1 then "
+ "count = count + 1; "
+ "end; "
+"end; "
+ "if count > 0 then "
+ "local value = redis.call('incrby', KEYS[2], count); "
+ "redis.call('publish', KEYS[3], value); "
+ "return 1;"
+ "end;"
+ "return 0 ",
Arrays.asList(getRawName(), semaphore.getRawName(), channelName), encode(c).toArray());
}
@Override
public RFuture<V> pollAsync() {
return commandExecutor.evalWriteNoRetryAsync(getRawName(), codec, RedisCommands.EVAL_OBJECT,
"local res = redis.call('lpop', KEYS[1]);"
+ "if res ~= false then " +
"local value = redis.call('incrby', KEYS[2], ARGV[1]); " +
"redis.call('publish', KEYS[3], value); "
+ "end;"
+ "return res;",
Arrays.asList(getRawName(), semaphore.getRawName(), channelName), 1);
}
/*
* (non-Javadoc)
* @see java.util.concurrent.BlockingQueue#take()
*/
@Override
public V take() throws InterruptedException {
return commandExecutor.getInterrupted(takeAsync());
}
@Override
public RFuture<V> pollAsync(long timeout, TimeUnit unit) {
RFuture<V> takeFuture = blockingQueue.pollAsync(timeout, unit);
return wrapTakeFuture(takeFuture);
}
/*
* (non-Javadoc)
* @see java.util.concurrent.BlockingQueue#poll(long, java.util.concurrent.TimeUnit)
*/
@Override
public V poll(long timeout, TimeUnit unit) throws InterruptedException {
return commandExecutor.getInterrupted(pollAsync(timeout, unit));
}
/*
* (non-Javadoc)
* @see org.redisson.core.RBlockingQueue#pollFromAny(long, java.util.concurrent.TimeUnit, java.lang.String[])
*/
@Override
public V pollFromAny(long timeout, TimeUnit unit, String... queueNames) throws InterruptedException {
return commandExecutor.getInterrupted(pollFromAnyAsync(timeout, unit, queueNames));
}
/*
* (non-Javadoc)
* @see org.redisson.core.RBlockingQueueAsync#pollFromAnyAsync(long, java.util.concurrent.TimeUnit, java.lang.String[])
*/
@Override
public RFuture<V> pollFromAnyAsync(long timeout, TimeUnit unit, String... queueNames) {
RFuture<V> takeFuture = blockingQueue.pollFromAnyAsync(timeout, unit, queueNames);
return wrapTakeFuture(takeFuture);
}
@Override
public Entry<String, V> pollFromAnyWithName(Duration timeout, String... queueNames) throws InterruptedException {
return commandExecutor.getInterrupted(pollFromAnyWithNameAsync(timeout, queueNames));
}
@Override
public RFuture<Entry<String, V>> pollFromAnyWithNameAsync(Duration timeout, String... queueNames) {
RFuture<Entry<String, V>> takeFuture = blockingQueue.pollFromAnyWithNameAsync(timeout, queueNames);
return wrapTakeFuture(takeFuture);
}
@Override
public Entry<String, V> pollLastFromAnyWithName(Duration timeout, String... queueNames) throws InterruptedException {
return commandExecutor.getInterrupted(pollLastFromAnyWithNameAsync(timeout, queueNames));
}
@Override
public RFuture<Entry<String, V>> pollLastFromAnyWithNameAsync(Duration timeout, String... queueNames) {
RFuture<Entry<String, V>> takeFuture = blockingQueue.pollLastFromAnyWithNameAsync(timeout, queueNames);
return wrapTakeFuture(takeFuture);
}
@Override
public Map<String, List<V>> pollFirstFromAny(Duration duration, int count, String... queueNames) {
return get(pollFirstFromAnyAsync(duration, count, queueNames));
}
@Override
public Map<String, List<V>> pollLastFromAny(Duration duration, int count, String... queueNames) {
return get(pollLastFromAnyAsync(duration, count, queueNames));
}
@Override
public RFuture<Map<String, List<V>>> pollFirstFromAnyAsync(Duration duration, int count, String... queueNames) {
RFuture<Map<String, List<V>>> future = blockingQueue.pollFirstFromAnyAsync(duration, count, queueNames);
return wrapTakeFuture(future);
}
@Override
public RFuture<Map<String, List<V>>> pollLastFromAnyAsync(Duration duration, int count, String... queueNames) {
RFuture<Map<String, List<V>>> future = blockingQueue.pollLastFromAnyAsync(duration, count, queueNames);
return wrapTakeFuture(future);
}
@Override
public V takeLastAndOfferFirstTo(String queueName) throws InterruptedException {
return commandExecutor.getInterrupted(takeLastAndOfferFirstToAsync(queueName));
}
@Override
public int subscribeOnElements(Consumer<V> consumer) {
return getServiceManager().getElementsSubscribeService()
.subscribeOnElements(this::takeAsync, consumer);
}
@Override
public int subscribeOnElements(Function<V, CompletionStage<Void>> consumer) {
return getServiceManager().getElementsSubscribeService()
.subscribeOnElements(this::takeAsync, consumer);
}
@Override
public void unsubscribe(int listenerId) {
getServiceManager().getElementsSubscribeService().unsubscribe(listenerId);
}
@Override
public RFuture<V> takeLastAndOfferFirstToAsync(String queueName) {
return pollLastAndOfferFirstToAsync(queueName, 0, TimeUnit.SECONDS);
}
@Override
public RFuture<V> pollLastAndOfferFirstToAsync(String queueName, long timeout, TimeUnit unit) {
RFuture<V> takeFuture = blockingQueue.pollLastAndOfferFirstToAsync(queueName, timeout, unit);
return wrapTakeFuture(takeFuture);
}
@Override
public V pollLastAndOfferFirstTo(String queueName, long timeout, TimeUnit unit) throws InterruptedException {
return commandExecutor.getInterrupted(pollLastAndOfferFirstToAsync(queueName, timeout, unit));
}
@Override
public int remainingCapacity() {
return createSemaphore(null).availablePermits();
}
@Override
public int drainTo(Collection<? super V> c) {
return get(drainToAsync(c));
}
@Override
public RFuture<Integer> drainToAsync(Collection<? super V> c) {
if (c == null) {
throw new NullPointerException();
}
return commandExecutor.evalWriteAsync(getRawName(), codec, new RedisCommand<Object>("EVAL", new ListDrainToDecoder(c)),
"local vals = redis.call('lrange', KEYS[1], 0, -1); " +
"redis.call('del', KEYS[1]); " +
"if #vals > 0 then "
+ "local value = redis.call('incrby', KEYS[2], #vals); " +
"redis.call('publish', KEYS[3], value); "
+ "end; " +
"return vals",
Arrays.asList(getRawName(), semaphore.getRawName(), channelName));
}
@Override
public int drainTo(Collection<? super V> c, int maxElements) {
return get(drainToAsync(c, maxElements));
}
@Override
public RFuture<Integer> drainToAsync(Collection<? super V> c, int maxElements) {
if (c == null) {
throw new NullPointerException();
}
if (maxElements <= 0) {
return new CompletableFutureWrapper<>(0);
}
return commandExecutor.evalWriteAsync(getRawName(), codec, new RedisCommand<Object>("EVAL", new ListDrainToDecoder(c)),
"local elemNum = math.min(ARGV[1], redis.call('llen', KEYS[1])) - 1;" +
"local vals = redis.call('lrange', KEYS[1], 0, elemNum); " +
"redis.call('ltrim', KEYS[1], elemNum + 1, -1); " +
"if #vals > 0 then "
+ "local value = redis.call('incrby', KEYS[2], #vals); " +
"redis.call('publish', KEYS[3], value); "
+ "end; " +
"return vals",
Arrays.asList(getRawName(), semaphore.getRawName(), channelName), maxElements);
}
@Override
public RFuture<Boolean> trySetCapacityAsync(int capacity) {
return commandExecutor.evalWriteAsync(getRawName(), LongCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"local value = redis.call('get', KEYS[1]); " +
"if (value == false) then "
+ "redis.call('set', KEYS[1], ARGV[1]); "
+ "redis.call('publish', KEYS[2], ARGV[1]); "
+ "return 1;"
+ "end;"
+ "return 0;",
Arrays.asList(semaphore.getRawName(), channelName), capacity);
}
@Override
public boolean trySetCapacity(int capacity) {
return get(trySetCapacityAsync(capacity));
}
@Override
public void clear() {
get(commandExecutor.evalWriteAsync(getRawName(), codec, RedisCommands.EVAL_BOOLEAN,
"local len = redis.call('llen', KEYS[1]); " +
"if len > 0 then "
+ "redis.call('del', KEYS[1]); "
+ "local value = redis.call('incrby', KEYS[2], len); " +
"redis.call('publish', KEYS[3], value); "
+ "end; ",
Arrays.asList(getRawName(), semaphore.getRawName(), channelName)));
}
@Override
public RFuture<Boolean> deleteAsync() {
return deleteAsync(getRawName(), semaphoreName);
}
@Override
public RFuture<Boolean> copyAsync(List<Object> keys, int database, boolean replace) {
String newName = (String) keys.get(1);
List<Object> kks = Arrays.asList(getRawName(), semaphoreName,
newName, getSemaphoreName(newName));
return super.copyAsync(kks, database, replace);
}
@Override
public RFuture<Long> sizeInMemoryAsync() {
List<Object> keys = Arrays.<Object>asList(getRawName(), semaphoreName);
return super.sizeInMemoryAsync(keys);
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
return super.expireAsync(timeToLive, timeUnit, param, getRawName(), semaphoreName);
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
return super.expireAtAsync(timestamp, param, getRawName(), semaphoreName);
}
@Override
public RFuture<Boolean> clearExpireAsync() {
return clearExpireAsync(getRawName(), semaphoreName);
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
if (c.isEmpty()) {
return new CompletableFutureWrapper<>(false);
}
RedissonQueueSemaphore semaphore = new RedissonQueueSemaphore(commandExecutor, semaphoreName, getCodec());
semaphore.setQueueName(getRawName());
semaphore.setValues(c);
return semaphore.tryAcquireAsync();
}
}
|
RedissonBoundedBlockingQueue
|
java
|
processing__processing4
|
java/test/processing/mode/java/preproc/MissingChevMessageSimplifierStrategyTest.java
|
{
"start": 272,
"end": 659
}
|
class ____ {
private PreprocessIssueMessageSimplifier.PreprocIssueMessageSimplifierStrategy strategy;
@Before
public void setup() {
strategy = PreprocessIssueMessageSimplifier.get().createUnbalancedChevStrategy();
}
@Test
public void testPresent() {
Optional<PdeIssueEmitter.IssueMessageSimplification> msg = strategy.simplify("
|
MissingChevMessageSimplifierStrategyTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/SwitchDefaultTest.java
|
{
"start": 2160,
"end": 2540
}
|
class ____ {
void f(int i) {
switch (i) {
case 2:
return;
case 1:
case 0:
default:
return;
}
}
}
""")
.addOutputLines(
"out/Test.java",
"""
|
Test
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3849/DeduplicateForCompileArgsMapper.java
|
{
"start": 463,
"end": 820
}
|
interface ____ {
DeduplicateForCompileArgsMapper INSTANCE = Mappers.getMapper( DeduplicateForCompileArgsMapper.class );
List<String> INVOKED_METHODS = new ArrayList<>();
ParentDto mapParent(Parent source, @Context MappingContext context);
ChildDto mapChild(Parent source, @Context MappingContext context);
|
DeduplicateForCompileArgsMapper
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/xml/code/javaguide/xml/JavaXmlRequests.java
|
{
"start": 343,
"end": 1860
}
|
class ____ extends Controller {
// #xml-hello
public Result sayHello(Http.Request request) {
Document dom = request.body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {
String name = XPath.selectText("//name", dom);
if (name == null) {
return badRequest("Missing parameter [name]");
} else {
return ok("Hello " + name);
}
}
}
// #xml-hello
// #xml-hello-bodyparser
@BodyParser.Of(BodyParser.Xml.class)
public Result sayHelloBP(Http.Request request) {
Document dom = request.body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {
String name = XPath.selectText("//name", dom);
if (name == null) {
return badRequest("Missing parameter [name]");
} else {
return ok("Hello " + name);
}
}
}
// #xml-hello-bodyparser
// #xml-reply
@BodyParser.Of(BodyParser.Xml.class)
public Result replyHello(Http.Request request) {
Document dom = request.body().asXml();
if (dom == null) {
return badRequest("Expecting Xml data");
} else {
String name = XPath.selectText("//name", dom);
if (name == null) {
return badRequest("<message \"status\"=\"KO\">Missing parameter [name]</message>")
.as("application/xml");
} else {
return ok("<message \"status\"=\"OK\">Hello " + name + "</message>").as("application/xml");
}
}
}
// #xml-reply
}
|
JavaXmlRequests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 127919,
"end": 130226
}
|
class ____ extends ParserRuleContext {
public TerminalNode DISSECT() { return getToken(EsqlBaseParser.DISSECT, 0); }
public PrimaryExpressionContext primaryExpression() {
return getRuleContext(PrimaryExpressionContext.class,0);
}
public StringContext string() {
return getRuleContext(StringContext.class,0);
}
public DissectCommandOptionsContext dissectCommandOptions() {
return getRuleContext(DissectCommandOptionsContext.class,0);
}
@SuppressWarnings("this-escape")
public DissectCommandContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_dissectCommand; }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDissectCommand(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitDissectCommand(this);
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor<? extends T>)visitor).visitDissectCommand(this);
else return visitor.visitChildren(this);
}
}
public final DissectCommandContext dissectCommand() throws RecognitionException {
DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState());
enterRule(_localctx, 90, RULE_dissectCommand);
try {
enterOuterAlt(_localctx, 1);
{
setState(497);
match(DISSECT);
setState(498);
primaryExpression(0);
setState(499);
string();
setState(501);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) {
case 1:
{
setState(500);
dissectCommandOptions();
}
break;
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
@SuppressWarnings("CheckReturnValue")
public static
|
DissectCommandContext
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AlreadyCheckedTest.java
|
{
"start": 7718,
"end": 8136
}
|
class ____ {
public int test(boolean a) {
if (a) {
// BUG: Diagnostic contains: true
return a ? 1 : 2;
}
return 0;
}
}
""")
.doTest();
}
@Test
public void equalsCheckedTwice() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/conversion/lossy/MapMapper.java
|
{
"start": 385,
"end": 461
}
|
interface ____ {
Map<Integer, Float> map(Map<Long, Double> in);
}
|
MapMapper
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/OperatorAttributesBuilder.java
|
{
"start": 981,
"end": 1845
}
|
class ____ {
private boolean outputOnlyAfterEndOfStream = false;
private boolean internalSorterSupported = false;
/**
* Set to true if and only if the operator only emits records after all its inputs have ended.
* If it is not set, the default value false is used.
*/
public OperatorAttributesBuilder setOutputOnlyAfterEndOfStream(
boolean outputOnlyAfterEndOfStream) {
this.outputOnlyAfterEndOfStream = outputOnlyAfterEndOfStream;
return this;
}
public OperatorAttributesBuilder setInternalSorterSupported(boolean internalSorterSupported) {
this.internalSorterSupported = internalSorterSupported;
return this;
}
public OperatorAttributes build() {
return new OperatorAttributes(outputOnlyAfterEndOfStream, internalSorterSupported);
}
}
|
OperatorAttributesBuilder
|
java
|
alibaba__nacos
|
persistence/src/main/java/com/alibaba/nacos/persistence/repository/embedded/operate/BaseDatabaseOperate.java
|
{
"start": 1843,
"end": 11445
}
|
interface ____ extends DatabaseOperate {
Logger LOGGER = LoggerFactory.getLogger(BaseDatabaseOperate.class);
/**
* query one result by sql then convert result to target type.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param cls target type
* @param <R> target type
* @return R
*/
default <R> R queryOne(JdbcTemplate jdbcTemplate, String sql, Class<R> cls) {
try {
return jdbcTemplate.queryForObject(sql, cls);
} catch (IncorrectResultSizeDataAccessException e) {
return null;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] can't get connection : {}", ExceptionUtil.getAllExceptionMsg(e));
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException : {}", ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* query one result by sql and args then convert result to target type.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param args args
* @param cls target type
* @param <R> target type
* @return R
*/
default <R> R queryOne(JdbcTemplate jdbcTemplate, String sql, Object[] args, Class<R> cls) {
try {
return jdbcTemplate.queryForObject(sql, args, cls);
} catch (IncorrectResultSizeDataAccessException e) {
return null;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] {}", e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", sql, args,
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* query one result by sql and args then convert result to target type through {@link RowMapper}.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param args args
* @param mapper {@link RowMapper}
* @param <R> target type
* @return R
*/
default <R> R queryOne(JdbcTemplate jdbcTemplate, String sql, Object[] args, RowMapper<R> mapper) {
try {
return jdbcTemplate.queryForObject(sql, args, mapper);
} catch (IncorrectResultSizeDataAccessException e) {
return null;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] {}", e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", sql, args,
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* query many result by sql and args then convert result to target type through {@link RowMapper}.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param args args
* @param mapper {@link RowMapper}
* @param <R> target type
* @return result list
*/
default <R> List<R> queryMany(JdbcTemplate jdbcTemplate, String sql, Object[] args, RowMapper<R> mapper) {
try {
return jdbcTemplate.query(sql, args, mapper);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] {}", e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", sql, args,
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* query many result by sql and args then convert result to target type.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param args args
* @param rClass target type class
* @param <R> target type
* @return result list
*/
default <R> List<R> queryMany(JdbcTemplate jdbcTemplate, String sql, Object[] args, Class<R> rClass) {
try {
return jdbcTemplate.queryForList(sql, args, rClass);
} catch (IncorrectResultSizeDataAccessException e) {
return null;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] {}", e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", sql, args,
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* query many result by sql and args then convert result to List<Map<String, Object>>.
*
* @param jdbcTemplate {@link JdbcTemplate}
* @param sql sql
* @param args args
* @return List<Map<String, Object>>
*/
default List<Map<String, Object>> queryMany(JdbcTemplate jdbcTemplate, String sql, Object[] args) {
try {
return jdbcTemplate.queryForList(sql, args);
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] {}", e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", sql, args,
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
}
/**
* execute update operation.
*
* @param transactionTemplate {@link TransactionTemplate}
* @param jdbcTemplate {@link JdbcTemplate}
* @param contexts {@link List} ModifyRequest list
* @return {@link Boolean}
*/
default Boolean update(TransactionTemplate transactionTemplate, JdbcTemplate jdbcTemplate,
List<ModifyRequest> contexts) {
return update(transactionTemplate, jdbcTemplate, contexts, null);
}
/**
* execute update operation, to fix #3617.
*
* @param transactionTemplate {@link TransactionTemplate}
* @param jdbcTemplate {@link JdbcTemplate}
* @param contexts {@link List} ModifyRequest list
* @return {@link Boolean}
*/
default Boolean update(TransactionTemplate transactionTemplate, JdbcTemplate jdbcTemplate,
List<ModifyRequest> contexts, BiConsumer<Boolean, Throwable> consumer) {
boolean updateResult = Boolean.FALSE;
try {
updateResult = transactionTemplate.execute(status -> {
String[] errSql = new String[] {null};
Object[][] args = new Object[][] {null};
try {
contexts.forEach(pair -> {
errSql[0] = pair.getSql();
args[0] = pair.getArgs();
boolean rollBackOnUpdateFail = pair.isRollBackOnUpdateFail();
LoggerUtils.printIfDebugEnabled(LOGGER, "current sql : {}", errSql[0]);
LoggerUtils.printIfDebugEnabled(LOGGER, "current args : {}", args[0]);
int row = jdbcTemplate.update(pair.getSql(), pair.getArgs());
if (rollBackOnUpdateFail && row < 1) {
LoggerUtils.printIfDebugEnabled(LOGGER, "SQL update affected {} rows ", row);
throw new IllegalTransactionStateException("Illegal transaction");
}
});
if (consumer != null) {
consumer.accept(Boolean.TRUE, null);
}
return Boolean.TRUE;
} catch (BadSqlGrammarException | DataIntegrityViolationException e) {
LOGGER.error("[db-error] sql : {}, args : {}, error : {}", errSql[0], args[0], e.toString());
if (consumer != null) {
consumer.accept(Boolean.FALSE, e);
}
return Boolean.FALSE;
} catch (CannotGetJdbcConnectionException e) {
LOGGER.error("[db-error] sql : {}, args : {}, error : {}", errSql[0], args[0], e.toString());
throw e;
} catch (DataAccessException e) {
LOGGER.error("[db-error] DataAccessException sql : {}, args : {}, error : {}", errSql[0], args[0],
ExceptionUtil.getAllExceptionMsg(e));
throw e;
}
});
} catch (IllegalTransactionStateException e) {
LoggerUtils.printIfDebugEnabled(LOGGER, "Roll back transaction for {} ", e.getMessage());
if (consumer != null) {
consumer.accept(Boolean.FALSE, e);
}
}
return updateResult;
}
/**
* Perform data import.
*
* @param template {@link JdbcTemplate}
* @param requests {@link List} ModifyRequest list
* @return {@link Boolean}
*/
default Boolean doDataImport(JdbcTemplate template, List<ModifyRequest> requests) {
final String[] sql = requests.stream().map(ModifyRequest::getSql).map(DerbyUtils::insertStatementCorrection)
.toArray(String[]::new);
int[] affect = template.batchUpdate(sql);
return IntStream.of(affect).count() == requests.size();
}
}
|
BaseDatabaseOperate
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/JoinColumnOrFormula.java
|
{
"start": 1007,
"end": 1277
}
|
interface ____ {
/**
* The formula to use in the join condition.
*/
JoinFormula formula() default @JoinFormula(value="", referencedColumnName="");
/**
* The column to use in the join condition.
*/
JoinColumn column() default @JoinColumn();
}
|
JoinColumnOrFormula
|
java
|
google__dagger
|
javatests/dagger/functional/membersinject/subpackage/a/AGrandchild.java
|
{
"start": 755,
"end": 1498
}
|
class ____ extends BChild {
@Inject APackagePrivateObject aGrandchildField;
private APackagePrivateObject aGrandchildMethod;
@Inject
void aGrandchildMethod(APackagePrivateObject aGrandchildMethod) {
this.aGrandchildMethod = aGrandchildMethod;
}
@Override
@Inject
protected void aParentMethod(APublicObject aParentMethod) {
super.aParentMethod(aParentMethod);
}
@SuppressWarnings("OverridesJavaxInjectableMethod")
@Override
protected void aChildMethod(APublicObject aChildMethod) {
super.aChildMethod(aChildMethod);
}
public APackagePrivateObject aGrandchildField() {
return aGrandchildField;
}
public APackagePrivateObject aGrandchildMethod() {
return aGrandchildMethod;
}
}
|
AGrandchild
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/MembersInjectionValidationTest.java
|
{
"start": 14570,
"end": 15659
}
|
interface ____ {",
" void inject(KotlinClassWithMemberInjectedNamedCompanion injected);",
" void injectCompanion(KotlinClassWithMemberInjectedNamedCompanion.TheCompanion"
+ " injected);",
"}");
CompilerTests.daggerCompiler(component, testModule)
.compile(
subject -> {
subject.hasErrorCount(2);
subject.hasErrorContaining("Dagger does not support injection into static fields");
subject.hasErrorContaining(
"KotlinClassWithMemberInjectedNamedCompanion cannot be provided");
});
}
@Test
public void setterMemberInjectionForKotlinClassWithNamedCompanionObjectFails() {
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import dagger.internal.codegen.KotlinClassWithSetterMemberInjectedNamedCompanion;",
"",
"@Component(modules = TestModule.class)",
"
|
TestComponent
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/AclSetuserArgs.java
|
{
"start": 19971,
"end": 20049
}
|
interface ____ is the base for all ACL SETUSER arguments.
*/
private
|
that
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/filter/NullConversionsSkipTest.java
|
{
"start": 1192,
"end": 1359
}
|
class ____ {
String value = "default";
public void setValue(String v) {
value = v;
}
}
// for [databind#2015]
|
StringValue
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/common/ModelElement.java
|
{
"start": 343,
"end": 513
}
|
class ____ all model elements. Implements the {@link Writable} contract to write model elements into source code
* files.
*
* @author Gunnar Morling
*/
public abstract
|
of
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/DispatcherThreadFactory.java
|
{
"start": 1103,
"end": 1805
}
|
class ____ implements ThreadFactory {
private final ThreadGroup group;
private final String threadName;
/**
* Creates a new thread factory.
*
* @param group The group that the threads will be associated with.
* @param threadName The name for the threads.
*/
public DispatcherThreadFactory(ThreadGroup group, String threadName) {
this.group = group;
this.threadName = threadName;
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(group, r, threadName);
t.setDaemon(true);
t.setUncaughtExceptionHandler(FatalExitExceptionHandler.INSTANCE);
return t;
}
}
|
DispatcherThreadFactory
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/AsyncStateTableStreamOperator.java
|
{
"start": 1573,
"end": 1645
}
|
class ____ both batch and stream operators
* without key.
*
* <p>This
|
for
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/util/context/Context4.java
|
{
"start": 871,
"end": 5887
}
|
class ____ implements CoreContext {
/**
* Checks for duplicate keys and null keys. This method is intended for a short space of keys in the
* 4-10 range. Shorter number of keys can easily be checked with direct equals comparison(s),
* saving on allocating a vararg array (although this method would still behave correctly).
*
* @param keys the keys to check for duplicates and nulls, by looping over the combinations
* @throws NullPointerException if any of the keys is null
* @throws IllegalArgumentException on the first key encountered twice
*/
static void checkKeys(Object... keys) {
int size = keys.length;
//NB: there is no sense in looking for duplicates when size < 2, but the loop below skips these cases anyway
for (int i = 0; i < size - 1; i++) {
Object key = Objects.requireNonNull(keys[i], "key" + (i+1));
for (int j = i + 1; j < size; j++) {
Object otherKey = keys[j];
if (key.equals(otherKey)) {
throw new IllegalArgumentException("Key #" + (i+1) + " (" + key + ") is duplicated");
}
}
}
//at the end of the loops, only the last key hasn't been checked for null
if (size != 0) {
Objects.requireNonNull(keys[size - 1], "key" + size);
}
}
final Object key1;
final Object value1;
final Object key2;
final Object value2;
final Object key3;
final Object value3;
final Object key4;
final Object value4;
Context4(Object key1, Object value1,
Object key2, Object value2,
Object key3, Object value3,
Object key4, Object value4) {
//TODO merge null check and duplicate check in the util method
Context4.checkKeys(key1, key2, key3, key4);
this.key1 = Objects.requireNonNull(key1, "key1");
this.value1 = Objects.requireNonNull(value1, "value1");
this.key2 = Objects.requireNonNull(key2, "key2");
this.value2 = Objects.requireNonNull(value2, "value2");
this.key3 = Objects.requireNonNull(key3, "key3");
this.value3 = Objects.requireNonNull(value3, "value3");
this.key4 = Objects.requireNonNull(key4, "key4");
this.value4 = Objects.requireNonNull(value4, "value4");
}
@Override
public Context put(Object key, Object value) {
Objects.requireNonNull(key, "key");
Objects.requireNonNull(value, "value");
if(this.key1.equals(key)){
return new Context4(key, value, key2, value2, key3, value3, key4, value4);
}
if (this.key2.equals(key)) {
return new Context4(key1, value1, key, value, key3, value3, key4, value4);
}
if (this.key3.equals(key)) {
return new Context4(key1, value1, key2, value2, key, value, key4, value4);
}
if (this.key4.equals(key)) {
return new Context4(key1, value1, key2, value2, key3, value3, key, value);
}
return new Context5(this.key1, this.value1, this.key2, this.value2, this.key3, this.value3,
this.key4, this.value4, key, value);
}
@Override
public Context delete(Object key) {
Objects.requireNonNull(key, "key");
if(this.key1.equals(key)){
return new Context3(key2, value2, key3, value3, key4, value4);
}
if (this.key2.equals(key)) {
return new Context3(key1, value1, key3, value3, key4, value4);
}
if (this.key3.equals(key)) {
return new Context3(key1, value1, key2, value2, key4, value4);
}
if (this.key4.equals(key)) {
return new Context3(key1, value1, key2, value2, key3, value3);
}
return this;
}
@Override
public boolean hasKey(Object key) {
return this.key1.equals(key) || this.key2.equals(key) || this.key3.equals(key) || this.key4.equals(key);
}
@Override
@SuppressWarnings("unchecked")
public <T> T get(Object key) {
if (this.key1.equals(key)) {
return (T)this.value1;
}
if (this.key2.equals(key)) {
return (T)this.value2;
}
if (this.key3.equals(key)) {
return (T)this.value3;
}
if (this.key4.equals(key)) {
return (T)this.value4;
}
throw new NoSuchElementException("Context does not contain key: "+key);
}
@Override
public int size() {
return 4;
}
@Override
public Stream<Map.Entry<Object, Object>> stream() {
return Stream.of(
new AbstractMap.SimpleImmutableEntry<>(key1, value1),
new AbstractMap.SimpleImmutableEntry<>(key2, value2),
new AbstractMap.SimpleImmutableEntry<>(key3, value3),
new AbstractMap.SimpleImmutableEntry<>(key4, value4));
}
@Override
public void forEach(BiConsumer<Object, Object> action) {
action.accept(key1, value1);
action.accept(key2, value2);
action.accept(key3, value3);
action.accept(key4, value4);
}
@Override
public Context putAllInto(Context base) {
return base
.put(this.key1, this.value1)
.put(this.key2, this.value2)
.put(this.key3, this.value3)
.put(this.key4, this.value4);
}
@Override
public void unsafePutAllInto(ContextN other) {
other.accept(key1, value1);
other.accept(key2, value2);
other.accept(key3, value3);
other.accept(key4, value4);
}
@Override
public String toString() {
return "Context4{" + key1 + '='+ value1 + ", " + key2 + '=' + value2 + ", " +
key3 + '=' + value3 + ", " + key4 + '=' + value4 + '}';
}
}
|
Context4
|
java
|
spring-projects__spring-security
|
access/src/main/java/org/springframework/security/messaging/access/intercept/MessageSecurityMetadataSource.java
|
{
"start": 1109,
"end": 1185
}
|
interface ____ extends SecurityMetadataSource {
}
|
MessageSecurityMetadataSource
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/struct/TestUnwrappedWithPrefix.java
|
{
"start": 1977,
"end": 2396
}
|
class ____
{
@JsonUnwrapped(prefix="general.")
public ConfigGeneral general = new ConfigGeneral();
@JsonUnwrapped(prefix="misc.")
public ConfigMisc misc = new ConfigMisc();
public ConfigRoot() { }
protected ConfigRoot(String name, int value)
{
general = new ConfigGeneral(name);
misc.value = value;
}
}
static
|
ConfigRoot
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 93984,
"end": 94190
}
|
class ____ {
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties(WithCustomConverterAndObjectToObjectMethodProperties.class)
static
|
WithPublicStringConstructorPropertiesConfiguration
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxDocsCondition.java
|
{
"start": 903,
"end": 2127
}
|
class ____ extends Condition<Long> {
public static final String NAME = "max_docs";
public MaxDocsCondition(Long value) {
super(NAME, Type.MAX);
this.value = value;
}
public MaxDocsCondition(StreamInput in) throws IOException {
super(NAME, Type.MAX);
this.value = in.readLong();
}
@Override
public Result evaluate(final Stats stats) {
return new Result(this, this.value <= stats.numDocs());
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(value);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.field(NAME, value);
}
public static MaxDocsCondition fromXContent(XContentParser parser) throws IOException {
if (parser.nextToken() == XContentParser.Token.VALUE_NUMBER) {
return new MaxDocsCondition(parser.longValue());
} else {
throw new IllegalArgumentException("invalid token when parsing " + NAME + " condition: " + parser.currentToken());
}
}
}
|
MaxDocsCondition
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/failover/RestartPipelinedRegionFailoverStrategy.java
|
{
"start": 12774,
"end": 15443
}
|
class ____
implements ResultPartitionAvailabilityChecker {
/** Result partition state checker from the shuffle master. */
private final ResultPartitionAvailabilityChecker resultPartitionAvailabilityChecker;
/** Records partitions which has caused {@link PartitionException}. */
private final HashSet<IntermediateResultPartitionID> failedPartitions;
/** Retrieve {@link ResultPartitionType} by {@link IntermediateResultPartitionID}. */
private final Function<IntermediateResultPartitionID, ResultPartitionType>
resultPartitionTypeRetriever;
RegionFailoverResultPartitionAvailabilityChecker(
ResultPartitionAvailabilityChecker checker,
Function<IntermediateResultPartitionID, ResultPartitionType>
resultPartitionTypeRetriever) {
this.resultPartitionAvailabilityChecker = checkNotNull(checker);
this.failedPartitions = new HashSet<>();
this.resultPartitionTypeRetriever = checkNotNull(resultPartitionTypeRetriever);
}
@Override
public boolean isAvailable(IntermediateResultPartitionID resultPartitionID) {
return !failedPartitions.contains(resultPartitionID)
&& resultPartitionAvailabilityChecker.isAvailable(resultPartitionID)
// If the result partition is available in the partition tracker and does not
// fail, it will be available if it can be re-consumption, and it may also be
// available for PIPELINED_APPROXIMATE type.
&& isResultPartitionIsReConsumableOrPipelinedApproximate(resultPartitionID);
}
public void markResultPartitionFailed(IntermediateResultPartitionID resultPartitionID) {
failedPartitions.add(resultPartitionID);
}
public void removeResultPartitionFromFailedState(
IntermediateResultPartitionID resultPartitionID) {
failedPartitions.remove(resultPartitionID);
}
private boolean isResultPartitionIsReConsumableOrPipelinedApproximate(
IntermediateResultPartitionID resultPartitionID) {
ResultPartitionType resultPartitionType =
resultPartitionTypeRetriever.apply(resultPartitionID);
return resultPartitionType.isReconsumable()
|| resultPartitionType == ResultPartitionType.PIPELINED_APPROXIMATE;
}
}
/** The factory to instantiate {@link RestartPipelinedRegionFailoverStrategy}. */
public static
|
RegionFailoverResultPartitionAvailabilityChecker
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/json/GsonJsonParser.java
|
{
"start": 1781,
"end": 1842
}
|
class ____ extends TypeToken<List<Object>> {
}
}
|
ListTypeToken
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/OnClassCondition.java
|
{
"start": 6183,
"end": 6290
}
|
interface ____ {
@Nullable ConditionOutcome[] resolveOutcomes();
}
private static final
|
OutcomesResolver
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/authorization/EnableMultiFactorAuthenticationTests.java
|
{
"start": 3340,
"end": 5041
}
|
class ____ {
private static final String ATTR_NAME = "org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors$SecurityContextRequestPostProcessorSupport$TestSecurityContextRepository.REPO";
@Autowired
MockMvc mvc;
@Autowired
Service service;
@Test
@WithMockUser(authorities = { "ROLE_USER", FactorGrantedAuthority.OTT_AUTHORITY })
public void formLoginWhenAuthenticatedThenMergedAuthorities() throws Exception {
this.mvc.perform(formLogin())
.andExpect(authenticated().withAuthorities("ROLE_USER", FactorGrantedAuthority.OTT_AUTHORITY,
FactorGrantedAuthority.PASSWORD_AUTHORITY));
}
@Test
@WithMockUser(authorities = { FactorGrantedAuthority.PASSWORD_AUTHORITY, FactorGrantedAuthority.OTT_AUTHORITY })
void webWhenAuthorized() throws Exception {
this.mvc.perform(get("/")).andExpect(status().isOk());
}
@Test
@WithMockUser
void webWhenNotAuthorized() throws Exception {
this.mvc.perform(get("/")).andExpect(status().isUnauthorized());
}
@Test
@WithMockUser(authorities = { FactorGrantedAuthority.PASSWORD_AUTHORITY, FactorGrantedAuthority.OTT_AUTHORITY })
void methodWhenAuthorized() throws Exception {
Assertions.assertThatNoException().isThrownBy(() -> this.service.authenticated());
}
@Test
@WithMockUser
void methodWhenNotAuthorized() throws Exception {
Assertions.assertThatExceptionOfType(AccessDeniedException.class)
.isThrownBy(() -> this.service.authenticated());
}
@EnableWebSecurity
@EnableMethodSecurity
@Configuration
@EnableMultiFactorAuthentication(
authorities = { FactorGrantedAuthority.OTT_AUTHORITY, FactorGrantedAuthority.PASSWORD_AUTHORITY })
static
|
EnableMultiFactorAuthenticationTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/RedundantNullCheckTest.java
|
{
"start": 11803,
"end": 12445
}
|
class ____ {
@Nullable String getNullableString() {
return null;
}
void process() {
if (getNullableString() == null) {
/* This is fine */
}
}
}
""")
.doTest();
}
@Test
public void positive_methodCall_inNullMarkedScope_explicitlyNonNullReturn() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.NonNull;
@NullMarked
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
|
{
"start": 999,
"end": 3137
}
|
class ____ extends NotificationService<SlackAccount> {
private static final Setting<String> SETTING_DEFAULT_ACCOUNT = Setting.simpleString(
"xpack.notification.slack.default_account",
Property.Dynamic,
Property.NodeScope
);
private static final Setting.AffixSetting<SecureString> SETTING_URL_SECURE = Setting.affixKeySetting(
"xpack.notification.slack.account.",
"secure_url",
(key) -> SecureSetting.secureString(key, null)
);
private static final Setting.AffixSetting<Settings> SETTING_DEFAULTS = Setting.affixKeySetting(
"xpack.notification.slack.account.",
"message_defaults",
(key) -> Setting.groupSetting(key + ".", Property.Dynamic, Property.NodeScope)
);
private static final Logger logger = LogManager.getLogger(SlackService.class);
private final HttpClient httpClient;
@SuppressWarnings("this-escape")
public SlackService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) {
super("slack", settings, clusterSettings, SlackService.getDynamicSettings(), SlackService.getSecureSettings());
this.httpClient = httpClient;
// ensure logging of setting changes
clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {});
clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
// do an initial load
reload(settings);
}
@Override
protected SlackAccount createAccount(String name, Settings accountSettings) {
return new SlackAccount(name, accountSettings, httpClient, logger);
}
private static List<Setting<?>> getDynamicSettings() {
return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_DEFAULTS);
}
private static List<Setting<?>> getSecureSettings() {
return Arrays.asList(SETTING_URL_SECURE);
}
public static List<Setting<?>> getSettings() {
List<Setting<?>> allSettings = new ArrayList<Setting<?>>(getDynamicSettings());
allSettings.addAll(getSecureSettings());
return allSettings;
}
}
|
SlackService
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/test/java/org/apache/camel/dataformat/univocity/UniVocityCsvDataFormatMarshalSpringTest.java
|
{
"start": 1667,
"end": 4879
}
|
class ____ extends CamelSpringTestSupport {
@EndpointInject("mock:result")
MockEndpoint result;
/**
* Tests that we can marshal CSV with the default configuration.
*/
@Test
public void shouldMarshalWithDefaultConfiguration() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2,3", "one,two,three"), body);
}
/**
* Tests that we can marshal a single line with CSV.
*/
@Test
public void shouldMarshalSingleLine() throws Exception {
template.sendBody("direct:default", asMap("A", "1", "B", "2", "C", "3"));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2,3"), body);
}
/**
* Tests that the marshalling adds new columns on the fly and keep its order
*/
@Test
public void shouldMarshalAndAddNewColumns() throws Exception {
template.sendBody("direct:default", Arrays.asList(
asMap("A", "1", "B", "2"),
asMap("C", "three", "A", "one", "B", "two")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,2", "one,two,three"), body);
}
/**
* Tests that we can marshal CSV with specific headers
*/
@Test
public void shouldMarshalWithSpecificHeaders() throws Exception {
template.sendBody("direct:header", Arrays.asList(
asMap("A", "1", "B", "2", "C", "3"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("1,3", "one,three"), body);
}
/**
* Tests that we can marshal CSV using and advanced configuration
*/
@Test
public void shouldMarshalUsingAdvancedConfiguration() throws Exception {
template.sendBody("direct:advanced", Arrays.asList(
asMap("A", null, "B", "", "C", "_"),
asMap("A", "one", "B", "two", "C", "three")));
result.expectedMessageCount(1);
result.assertIsSatisfied();
String body = assertIsInstanceOf(String.class, result.getExchanges().get(0).getIn().getBody());
assertEquals(join("_N/A_;_empty_;_-__", "_one_;_two_;_three_"), body);
}
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/dataformat/univocity/UniVocityCsvDataFormatMarshalSpringTest.xml");
}
}
|
UniVocityCsvDataFormatMarshalSpringTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.