language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/route/RouteDefinitionRouteLocatorTests.java | {
"start": 11200,
"end": 11434
} | class ____ extends AbstractGatewayFilterFactory {
@Override
public GatewayFilter apply(Object config) {
return new OrderedGatewayFilter((exchange, chain) -> chain.filter(exchange), 9999);
}
}
}
| TestOrderedGatewayFilterFactory |
java | micronaut-projects__micronaut-core | router/src/main/java/io/micronaut/web/router/DefaultRouteBuilder.java | {
"start": 19124,
"end": 19197
} | class ____ base {@link MethodBasedRouteInfo}.
*/
abstract static | for |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/clhm/Weighers.java | {
"start": 10356,
"end": 10617
} | enum ____ implements Weigher<Collection<?>> {
INSTANCE;
@Override
public int weightOf(Collection<?> values) {
return values.size();
}
}
/**
* The SingletonEntryWeigher.
*/
private | CollectionWeigher |
java | elastic__elasticsearch | libs/entitlement/tools/common/src/main/java/org/elasticsearch/entitlement/tools/AccessibleJdkMethods.java | {
"start": 9772,
"end": 9939
} | class ____ public and exported, to be accessible outside the JDK the method must be either:
// - public or
// - protected if not a final | is |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMeters.java | {
"start": 334,
"end": 411
} | class ____ oder to replace the {@code ID} tag
* with {@code NAME}.
*/
public | in |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeMapAssert_isEmpty_Test.java | {
"start": 944,
"end": 1858
} | class ____ extends RangeMapAssertBaseTest {
@Test
public void should_pass_if_actual_is_empty() {
actual.clear();
assertThat(actual).isEmpty();
}
@Test
public void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage(actualIsNull());
}
@Test
public void should_fail_if_actual_is_not_empty() {
// WHEN
Throwable throwable = catchThrowable(() -> assertThat(actual).isEmpty());
// THEN
assertThat(throwable).isInstanceOf(AssertionError.class)
.hasMessage("%nExpecting empty but was: [[380..450)=violet, [450..495)=blue, [495..570)=green, [570..590)=yellow, [590..620)=orange, [620..750)=red]".formatted());
}
}
| RangeMapAssert_isEmpty_Test |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/adder/_target/TargetWithAnimals.java | {
"start": 298,
"end": 622
} | class ____ {
private List<String> animals = new ArrayList<>();
public List<String> getAnimals() {
return animals;
}
public void setAnimals(List<String> animals) {
this.animals = animals;
}
public void addAnimal(String animal) {
animals.add( animal );
}
}
| TargetWithAnimals |
java | google__guava | android/guava-testlib/src/com/google/common/collect/testing/google/SetMultimapTestSuiteBuilder.java | {
"start": 4328,
"end": 4793
} | class ____<K, V>
extends MultimapTestSuiteBuilder.EntriesGenerator<K, V, SetMultimap<K, V>>
implements TestSetGenerator<Entry<K, V>> {
EntriesGenerator(
OneSizeTestContainerGenerator<SetMultimap<K, V>, Entry<K, V>> multimapGenerator) {
super(multimapGenerator);
}
@Override
public Set<Entry<K, V>> create(Object... elements) {
return (Set<Entry<K, V>>) super.create(elements);
}
}
static final | EntriesGenerator |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/coordination/NodeHealthCheckFailureException.java | {
"start": 849,
"end": 1134
} | class ____ extends ElasticsearchException {
public NodeHealthCheckFailureException(String msg, Object... args) {
super(msg, args);
}
public NodeHealthCheckFailureException(StreamInput in) throws IOException {
super(in);
}
}
| NodeHealthCheckFailureException |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/health/metadata/HealthMetadataService.java | {
"start": 9040,
"end": 13657
} | class ____ extends SimpleBatchedExecutor<UpsertHealthMetadataTask, Void> {
@Override
public Tuple<ClusterState, Void> executeTask(UpsertHealthMetadataTask task, ClusterState clusterState) {
final var initialHealthMetadata = HealthMetadata.getFromClusterState(clusterState);
final var finalHealthMetadata = localHealthMetadata; // single volatile read
return Tuple.tuple(
finalHealthMetadata.equals(initialHealthMetadata)
? clusterState
: clusterState.copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, finalHealthMetadata)),
null
);
}
@Override
public void taskSucceeded(UpsertHealthMetadataTask task, Void unused) {}
@Override
public String describeTasks(List<UpsertHealthMetadataTask> tasks) {
return ""; // tasks are equivalent and idempotent, no need to list them out
}
}
private void updateOnDiskSettingsUpdated(String settingName, String value) {
var diskBuilder = HealthMetadata.Disk.newBuilder(this.localHealthMetadata.getDiskMetadata());
var healthMetadataBuilder = HealthMetadata.newBuilder(this.localHealthMetadata);
if (CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey().equals(settingName)) {
diskBuilder.highWatermark(value, settingName);
} else if (CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey().equals(settingName)) {
diskBuilder.floodStageWatermark(value, settingName);
} else if (CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_WATERMARK_SETTING.getKey().equals(settingName)) {
diskBuilder.frozenFloodStageWatermark(value, settingName);
} else if (CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_MAX_HEADROOM_SETTING.getKey().equals(settingName)) {
diskBuilder.frozenFloodStageMaxHeadroom(value, settingName);
} else if (CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_MAX_HEADROOM_SETTING.getKey().equals(settingName)) {
diskBuilder.highMaxHeadroom(value, settingName);
} else if (CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_MAX_HEADROOM_SETTING.getKey().equals(settingName)) {
diskBuilder.floodStageMaxHeadroom(value, settingName);
}
this.localHealthMetadata = healthMetadataBuilder.disk(diskBuilder.build()).build();
}
private void updateOnShardLimitsSettingsUpdated(String settingName, Integer value) {
var shardLimitsBuilder = HealthMetadata.ShardLimits.newBuilder(this.localHealthMetadata.getShardLimitsMetadata());
var healthMetadataBuilder = HealthMetadata.newBuilder(this.localHealthMetadata);
if (SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey().equals(settingName)) {
shardLimitsBuilder.maxShardsPerNode(value);
} else if (SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN.getKey().equals(settingName)) {
shardLimitsBuilder.maxShardsPerNodeFrozen(value);
} else if (SETTING_SHARD_CAPACITY_UNHEALTHY_THRESHOLD_YELLOW.getKey().equals(settingName)) {
shardLimitsBuilder.shardCapacityUnhealthyThresholdYellow(value);
} else if (SETTING_SHARD_CAPACITY_UNHEALTHY_THRESHOLD_RED.getKey().equals(settingName)) {
shardLimitsBuilder.shardCapacityUnhealthyThresholdRed(value);
}
this.localHealthMetadata = healthMetadataBuilder.shardLimits(shardLimitsBuilder.build()).build();
}
private static HealthMetadata initialHealthMetadata(Settings settings) {
return new HealthMetadata(
new HealthMetadata.Disk(
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.get(settings),
CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_MAX_HEADROOM_SETTING.get(settings),
CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.get(settings),
CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_MAX_HEADROOM_SETTING.get(settings),
CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_WATERMARK_SETTING.get(settings),
CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_FROZEN_MAX_HEADROOM_SETTING.get(settings)
),
new HealthMetadata.ShardLimits(
SETTING_CLUSTER_MAX_SHARDS_PER_NODE.get(settings),
SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN.get(settings),
SETTING_SHARD_CAPACITY_UNHEALTHY_THRESHOLD_YELLOW.get(settings),
SETTING_SHARD_CAPACITY_UNHEALTHY_THRESHOLD_RED.get(settings)
)
);
}
}
| Executor |
java | elastic__elasticsearch | libs/tdigest/src/test/java/org/elasticsearch/tdigest/ComparisonTests.java | {
"start": 1084,
"end": 6478
} | class ____ extends TDigestTestCase {
private static final int SAMPLE_COUNT = 1_000_000;
private TDigest avlTreeDigest;
private TDigest mergingDigest;
private TDigest sortingDigest;
private TDigest hybridDigest;
double[] samples;
private void loadData(Supplier<Double> sampleGenerator) {
final int COMPRESSION = 100;
avlTreeDigest = TDigest.createAvlTreeDigest(arrays(), COMPRESSION);
mergingDigest = TDigest.createMergingDigest(arrays(), COMPRESSION);
sortingDigest = TDigest.createSortingDigest(arrays());
hybridDigest = TDigest.createHybridDigest(arrays(), COMPRESSION);
samples = new double[SAMPLE_COUNT];
for (int i = 0; i < SAMPLE_COUNT; i++) {
samples[i] = sampleGenerator.get();
avlTreeDigest.add(samples[i]);
mergingDigest.add(samples[i]);
sortingDigest.add(samples[i]);
hybridDigest.add(samples[i]);
}
Arrays.sort(samples);
}
private void releaseData() {
Releasables.close(avlTreeDigest, mergingDigest, sortingDigest, hybridDigest);
}
public void testRandomDenseDistribution() {
loadData(() -> random().nextDouble());
for (double percentile : new double[] { 0, 0.01, 0.1, 1, 5, 10, 25, 50, 75, 90, 99, 99.9, 99.99, 100.0 }) {
double q = percentile / 100.0;
double expected = Dist.quantile(q, samples);
double accuracy = percentile > 1 ? Math.abs(expected / 10) : Math.abs(expected);
assertEquals(String.valueOf(percentile), expected, sortingDigest.quantile(q), 0);
assertEquals(String.valueOf(percentile), expected, avlTreeDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, mergingDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, hybridDigest.quantile(q), accuracy);
}
releaseData();
}
public void testRandomSparseDistribution() {
loadData(() -> random().nextDouble() * SAMPLE_COUNT * SAMPLE_COUNT + SAMPLE_COUNT);
for (double percentile : new double[] { 0, 0.01, 0.1, 1, 5, 10, 25, 50, 75, 90, 99, 99.9, 99.99, 100.0 }) {
double q = percentile / 100.0;
double expected = Dist.quantile(q, samples);
double accuracy = percentile > 1 ? Math.abs(expected / 10) : Math.abs(expected);
assertEquals(String.valueOf(percentile), expected, sortingDigest.quantile(q), 0);
assertEquals(String.valueOf(percentile), expected, avlTreeDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, mergingDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, hybridDigest.quantile(q), accuracy);
}
releaseData();
}
public void testDenseGaussianDistribution() {
loadData(() -> random().nextGaussian());
for (double percentile : new double[] { 0, 0.01, 0.1, 1, 5, 10, 25, 75, 90, 99, 99.9, 99.99, 100.0 }) {
double q = percentile / 100.0;
double expected = Dist.quantile(q, samples);
double accuracy = percentile > 1 ? Math.abs(expected / 10) : Math.abs(expected);
assertEquals(String.valueOf(percentile), expected, sortingDigest.quantile(q), 0);
assertEquals(String.valueOf(percentile), expected, avlTreeDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, mergingDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, hybridDigest.quantile(q), accuracy);
}
double expectedMedian = Dist.quantile(0.5, samples);
assertEquals(expectedMedian, sortingDigest.quantile(0.5), 0);
assertEquals(expectedMedian, avlTreeDigest.quantile(0.5), 0.01);
assertEquals(expectedMedian, mergingDigest.quantile(0.5), 0.01);
assertEquals(expectedMedian, hybridDigest.quantile(0.5), 0.01);
releaseData();
}
public void testSparseGaussianDistribution() {
loadData(() -> random().nextGaussian() * SAMPLE_COUNT);
for (double percentile : new double[] { 0, 0.01, 0.1, 1, 5, 10, 25, 75, 90, 99, 99.9, 99.99, 100.0 }) {
double q = percentile / 100.0;
double expected = Dist.quantile(q, samples);
double accuracy = percentile > 1 ? Math.abs(expected / 10) : Math.abs(expected);
assertEquals(String.valueOf(percentile), expected, sortingDigest.quantile(q), 0);
assertEquals(String.valueOf(percentile), expected, avlTreeDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, mergingDigest.quantile(q), accuracy);
assertEquals(String.valueOf(percentile), expected, hybridDigest.quantile(q), accuracy);
}
// The absolute value of median is within [0,5000], which is deemed close enough to 0 compared to the max value.
double expectedMedian = Dist.quantile(0.5, samples);
assertEquals(expectedMedian, sortingDigest.quantile(0.5), 0);
assertEquals(expectedMedian, avlTreeDigest.quantile(0.5), 5000);
assertEquals(expectedMedian, mergingDigest.quantile(0.5), 5000);
assertEquals(expectedMedian, hybridDigest.quantile(0.5), 5000);
releaseData();
}
}
| ComparisonTests |
java | spring-projects__spring-boot | module/spring-boot-couchbase/src/main/java/org/springframework/boot/couchbase/autoconfigure/health/CouchbaseHealthContributorAutoConfiguration.java | {
"start": 2220,
"end": 2709
} | class ____
extends CompositeHealthContributorConfiguration<CouchbaseHealthIndicator, Cluster> {
CouchbaseHealthContributorAutoConfiguration() {
super(CouchbaseHealthIndicator::new);
}
@Bean
@ConditionalOnMissingBean(name = { "couchbaseHealthIndicator", "couchbaseHealthContributor" })
HealthContributor couchbaseHealthContributor(ConfigurableListableBeanFactory beanFactory) {
return createContributor(beanFactory, Cluster.class);
}
}
| CouchbaseHealthContributorAutoConfiguration |
java | apache__hadoop | hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestReservationSystemInvariants.java | {
"start": 1670,
"end": 3313
} | class ____ extends BaseSLSRunnerTest {
public static Collection<Object[]> data() {
// Test with both schedulers, and all three trace types
return Arrays.asList(new Object[][] {
{CapacityScheduler.class.getCanonicalName(), "SYNTH",
"src/test/resources/syn.json", null},
{FairScheduler.class.getCanonicalName(), "SYNTH",
"src/test/resources/syn.json", null}
});
}
public void initTestReservationSystemInvariants(String pSchedulerType,
String pTraceType, String pTraceLocation, String pNodeFile) {
this.schedulerType = pSchedulerType;
this.traceType = pTraceType;
this.traceLocation = pTraceLocation;
this.nodeFile = pNodeFile;
setup();
}
@ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})")
@MethodSource("data")
@Timeout(value = 120)
@SuppressWarnings("all")
public void testSimulatorRunning(String pSchedulerType,
String pTraceType, String pTraceLocation, String pNodeFile) throws Exception {
initTestReservationSystemInvariants(pSchedulerType, pTraceType, pTraceLocation, pNodeFile);
Configuration conf = new Configuration(false);
conf.set(YarnConfiguration.RM_SCHEDULER, schedulerType);
conf.setBoolean(YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true);
conf.set(YarnConfiguration.RM_SCHEDULER_MONITOR_POLICIES,
ReservationInvariantsChecker.class.getCanonicalName());
conf.setBoolean(InvariantsChecker.THROW_ON_VIOLATION, true);
long timeTillShutDownInSec = 90;
runSLS(conf, timeTillShutDownInSec);
}
@Override
public void setup() {
}
}
| TestReservationSystemInvariants |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java | {
"start": 979,
"end": 1297
} | class ____ extends TestRSRawCoderBase {
@BeforeEach
public void setup() {
this.encoderFactoryClass = RSLegacyRawErasureCoderFactory.class;
this.decoderFactoryClass = RSLegacyRawErasureCoderFactory.class;
setAllowDump(false); // Change to true to allow verbose dump for debugging
}
}
| TestRSLegacyRawCoder |
java | alibaba__nacos | plugin/datasource/src/main/java/com/alibaba/nacos/plugin/datasource/mapper/ConfigInfoTagMapper.java | {
"start": 1055,
"end": 3554
} | interface ____ extends Mapper {
/**
* Update tag configuration information.
* The default sql:
* UPDATE config_info_tag SET content=?, md5 = ?, src_ip=?,src_user=?,gmt_modified=?,app_name=? WHERE
* data_id=? AND group_id=? AND tenant_id=? AND tag_id=? AND (md5=? or md5 is null or md5='')
*
* @param context sql paramMap
* @return The sql of updating tag configuration information.
*/
default MapperResult updateConfigInfo4TagCas(MapperContext context) {
Object content = context.getUpdateParameter(FieldConstant.CONTENT);
Object md5 = context.getUpdateParameter(FieldConstant.MD5);
Object srcIp = context.getUpdateParameter(FieldConstant.SRC_IP);
Object srcUser = context.getUpdateParameter(FieldConstant.SRC_USER);
Object gmtModified = context.getUpdateParameter(FieldConstant.GMT_MODIFIED);
Object appName = context.getUpdateParameter(FieldConstant.APP_NAME);
Object dataId = context.getWhereParameter(FieldConstant.DATA_ID);
Object groupId = context.getWhereParameter(FieldConstant.GROUP_ID);
Object tenantId = context.getWhereParameter(FieldConstant.TENANT_ID);
Object tagId = context.getWhereParameter(FieldConstant.TAG_ID);
Object oldMd5 = context.getWhereParameter(FieldConstant.MD5);
String sql =
"UPDATE config_info_tag SET content = ?, md5 = ?, src_ip = ?,src_user = ?,gmt_modified = ?,app_name = ? "
+ "WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND tag_id = ? AND (md5 = ? OR md5 IS NULL OR md5 = '')";
return new MapperResult(sql,
CollectionUtils.list(content, md5, srcIp, srcUser, gmtModified, appName, dataId, groupId, tenantId,
tagId, oldMd5));
}
/**
* Query all tag config info for dump task.
* The default sql:
* SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified
* FROM ( SELECT id FROM config_info_tag ORDER BY id LIMIT startRow,pageSize ) g,
* config_info_tag t WHERE g.id = t.id
*
* @param context The start index.
* @return The sql of querying all tag config info for dump task.
*/
MapperResult findAllConfigInfoTagForDumpAllFetchRows(MapperContext context);
/**
* 获取返回表名.
*
* @return 表名
*/
default String getTableName() {
return TableConstant.CONFIG_INFO_TAG;
}
}
| ConfigInfoTagMapper |
java | apache__maven | its/core-it-suite/src/test/resources/mng-7335-missing-jar-in-parallel-build/child1/src/main/java/com/example/Example.java | {
"start": 882,
"end": 981
} | class ____ {
public static void main(String[] args) {
StringUtils.trim("");
}
}
| Example |
java | apache__thrift | lib/java/src/test/java/org/apache/thrift/protocol/TestShortStack.java | {
"start": 1007,
"end": 1421
} | class ____ {
@Test
public void testOps() throws Exception {
ShortStack s = new ShortStack(1);
s.push((short) 10);
s.push((short) 11);
s.push((short) 12);
assertEquals((short) 12, s.pop());
assertEquals((short) 11, s.pop());
s.push((short) 40);
assertEquals((short) 40, s.pop());
assertEquals((short) 10, s.pop());
assertThrows(Exception.class, s::pop);
}
}
| TestShortStack |
java | spring-projects__spring-boot | module/spring-boot-graphql-test/src/main/java/org/springframework/boot/graphql/test/autoconfigure/tester/HttpGraphQlTesterAutoConfiguration.java | {
"start": 2037,
"end": 2901
} | class ____ {
@Bean
@ConditionalOnBean(WebTestClient.class)
@ConditionalOnMissingBean
HttpGraphQlTester webTestClientGraphQlTester(ApplicationContext applicationContext, WebTestClient webTestClient,
GraphQlProperties properties) {
String graphQlPath = properties.getHttp().getPath();
LocalTestWebServer localTestWebServer = LocalTestWebServer.get(applicationContext);
return HttpGraphQlTester.create(createWebTestClient(webTestClient.mutate(), localTestWebServer, graphQlPath));
}
private WebTestClient createWebTestClient(WebTestClient.Builder builder,
@Nullable LocalTestWebServer localTestWebServer, String graphQlPath) {
return (localTestWebServer != null)
? builder.uriBuilderFactory(localTestWebServer.withPath(graphQlPath).uriBuilderFactory()).build()
: builder.baseUrl(graphQlPath).build();
}
}
| HttpGraphQlTesterAutoConfiguration |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/prometheus/PrometheusProperties.java | {
"start": 5578,
"end": 5732
} | enum ____ {
/**
* Push metrics in text format.
*/
TEXT,
/**
* Push metrics in protobuf format.
*/
PROTOBUF
}
public | Format |
java | apache__flink | flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java | {
"start": 18109,
"end": 19055
} | class ____ for the format of the path.
* @param dataStream The {@link DataStream} out of which to create the view.
* @param <T> The type of the {@link DataStream}.
*/
<T> void createTemporaryView(String path, DataStream<T> dataStream);
/**
* Creates a view from the given {@link DataStream} in a given path. Registered views can be
* referenced in SQL queries.
*
* <p>See {@link #fromDataStream(DataStream, Schema)} for more information on how a {@link
* DataStream} is translated into a table.
*
* <p>Temporary objects can shadow permanent ones. If a permanent object in a given path exists,
* it will be inaccessible in the current session. To make the permanent object available again
* you can drop the corresponding temporary object.
*
* @param path The path under which the {@link DataStream} is created. See also the {@link
* TableEnvironment} | description |
java | apache__camel | catalog/camel-catalog-maven/src/main/java/org/apache/camel/catalog/maven/ComponentArtifactHelper.java | {
"start": 1146,
"end": 3332
} | class ____ {
private ComponentArtifactHelper() {
}
public static Properties loadComponentProperties(ClassLoader classLoader, Logger logger) {
Properties answer = new Properties();
try (InputStream is = classLoader.getResourceAsStream("META-INF/services/org/apache/camel/component.properties")) {
// load the component files using the recommended way by a component.properties file
if (is != null) {
answer.load(is);
}
} catch (Exception e) {
logger.warn("Error loading META-INF/services/org/apache/camel/component.properties file due {}", e.getMessage(), e);
}
return answer;
}
public static String extractComponentJavaType(ClassLoader classLoader, String scheme, Logger logger) {
try (InputStream is = classLoader.getResourceAsStream("META-INF/services/org/apache/camel/component/" + scheme)) {
if (is != null) {
Properties props = new Properties();
props.load(is);
return (String) props.get("class");
}
} catch (Exception e) {
logger.warn("Error loading META-INF/services/org/apache/camel/component/{} file due {}", scheme, e.getMessage(), e);
}
return null;
}
public static String loadComponentJSonSchema(ClassLoader classLoader, String scheme, Logger logger) {
String answer = null;
String path = null;
String javaType = extractComponentJavaType(classLoader, scheme, logger);
if (javaType != null) {
int pos = javaType.lastIndexOf('.');
path = javaType.substring(0, pos);
path = path.replace('.', '/');
path = path + "/" + scheme + ".json";
}
if (path != null) {
try (InputStream is = classLoader.getResourceAsStream(path)) {
if (is != null) {
answer = loadText(is);
}
} catch (Exception e) {
logger.warn("Error loading {} file due {}", path, e.getMessage(), e);
}
}
return answer;
}
}
| ComponentArtifactHelper |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/Injection.java | {
"start": 33774,
"end": 35370
} | class ____ hierarchy, during which all seen
* methods are recorded into {@code previousMethods}.
* <p>
* This is not entirely precise according to the JLS rules for method overriding, but seems good enough.
*/
static boolean isOverriden(MethodOverrideKey method, Set<MethodOverrideKey> previousMethods) {
short flags = method.method.flags();
if (Modifier.isPublic(flags) || Modifier.isProtected(flags)) {
// if there's an override, it must be public or perhaps protected,
// so it always has the same visibility
return previousMethods.contains(method);
} else if (Modifier.isPrivate(flags)) {
// private methods are never overridden
return false;
} else { // package-private
// if there's an override, it must be in the same package and:
// 1. either package-private (so it has the same visibility)
if (previousMethods.contains(method)) {
return true;
}
// 2. or public/protected (so it has a different visibility: empty string)
String packageName = method.packageName();
MethodOverrideKey methodWithoutVisibility = method.withoutVisibility();
for (MethodOverrideKey previousMethod : previousMethods) {
if (methodWithoutVisibility.equals(previousMethod)
&& packageName.equals(previousMethod.packageName())) {
return true;
}
}
return false;
}
}
}
| inheritance |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/async/AsyncSession.java | {
"start": 1048,
"end": 1592
} | interface ____ entity operations. All operations will enqueued a @link {@link AsyncOperation} and return
* immediately (fine to call on the UI/main thread). The queue will be processed in a (single) background thread. The
* processing order is the call order of the operations. It's possible to start multiple AsyncSessions that will
* execute
* concurrently.
*
* @author Markus
* @see AbstractDaoSession#startAsyncSession()
*/
// Facade to AsyncOperationExecutor: prepares operations and delegates work to AsyncOperationExecutor.
public | to |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/mock/web/MockHttpServletResponseTests.java | {
"start": 20008,
"end": 22433
} | class ____ {
@Test
void servletOutputStreamCommittedWhenBufferSizeExceeded() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getOutputStream().write('X');
assertThat(response.isCommitted()).isFalse();
int size = response.getBufferSize();
response.getOutputStream().write(new byte[size]);
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize((size + 1));
}
@Test
void servletOutputStreamCommittedOnFlushBuffer() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getOutputStream().write('X');
assertThat(response.isCommitted()).isFalse();
response.flushBuffer();
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize(1);
}
@Test
void servletWriterCommittedWhenBufferSizeExceeded() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getWriter().write("X");
assertThat(response.isCommitted()).isFalse();
int size = response.getBufferSize();
char[] data = new char[size];
Arrays.fill(data, 'p');
response.getWriter().write(data);
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize((size + 1));
}
@Test
void servletOutputStreamCommittedOnOutputStreamFlush() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getOutputStream().write('X');
assertThat(response.isCommitted()).isFalse();
response.getOutputStream().flush();
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize(1);
}
@Test
void servletWriterCommittedOnWriterFlush() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getWriter().write("X");
assertThat(response.isCommitted()).isFalse();
response.getWriter().flush();
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize(1);
}
@Test // SPR-16683
void servletWriterCommittedOnWriterClose() throws IOException {
assertThat(response.isCommitted()).isFalse();
response.getWriter().write("X");
assertThat(response.isCommitted()).isFalse();
response.getWriter().close();
assertThat(response.isCommitted()).isTrue();
assertThat(response.getContentAsByteArray()).hasSize(1);
}
}
@Nested
| ResponseCommittedTests |
java | apache__camel | components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindySimpleKeyValuePairFixTest.java | {
"start": 4165,
"end": 4713
} | class ____ {
@KeyValuePairField(tag = 37)
private String id;
@KeyValuePairField(tag = 40)
private String product;
@KeyValuePairField(tag = 38)
private String quantity;
public String getId() {
return id;
}
public String getProduct() {
return product;
}
public String getQuantity() {
return quantity;
}
public void setQuantity(String quantity) {
this.quantity = quantity;
}
}
}
| FixOrder |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/YarnChild.java | {
"start": 2954,
"end": 14883
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(YarnChild.class);
static volatile TaskAttemptID taskid = null;
public static void main(String[] args) throws Throwable {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
LOG.debug("Child starting");
final JobConf job = new JobConf(MRJobConfig.JOB_CONF_FILE);
// Initing with our JobConf allows us to avoid loading confs twice
Limits.init(job);
UserGroupInformation.setConfiguration(job);
// MAPREDUCE-6565: need to set configuration for SecurityUtil.
SecurityUtil.setConfiguration(job);
String host = args[0];
int port = Integer.parseInt(args[1]);
final InetSocketAddress address =
NetUtils.createSocketAddrForHost(host, port);
final TaskAttemptID firstTaskid = TaskAttemptID.forName(args[2]);
long jvmIdLong = Long.parseLong(args[3]);
JVMId jvmId = new JVMId(firstTaskid.getJobID(),
firstTaskid.getTaskType() == TaskType.MAP, jvmIdLong);
CallerContext.setCurrent(
new CallerContext.Builder("mr_" + firstTaskid.toString()).build());
// initialize metrics
DefaultMetricsSystem.initialize(
StringUtils.camelize(firstTaskid.getTaskType().name()) +"Task");
// Security framework already loaded the tokens into current ugi
Credentials credentials =
UserGroupInformation.getCurrentUser().getCredentials();
LOG.info("Executing with tokens: {}", credentials.getAllTokens());
// Create TaskUmbilicalProtocol as actual task owner.
UserGroupInformation taskOwner =
UserGroupInformation.createRemoteUser(firstTaskid.getJobID().toString());
Token<JobTokenIdentifier> jt = TokenCache.getJobToken(credentials);
SecurityUtil.setTokenService(jt, address);
taskOwner.addToken(jt);
final TaskUmbilicalProtocol umbilical =
taskOwner.doAs(new PrivilegedExceptionAction<TaskUmbilicalProtocol>() {
@Override
public TaskUmbilicalProtocol run() throws Exception {
return (TaskUmbilicalProtocol)RPC.getProxy(TaskUmbilicalProtocol.class,
TaskUmbilicalProtocol.versionID, address, job);
}
});
// report non-pid to application master
JvmContext context = new JvmContext(jvmId, "-1000");
LOG.debug("PID: " + System.getenv().get("JVM_PID"));
Task task = null;
UserGroupInformation childUGI = null;
ScheduledExecutorService logSyncer = null;
try {
int idleLoopCount = 0;
JvmTask myTask = null;
// poll for new task
for (int idle = 0; null == myTask; ++idle) {
long sleepTimeMilliSecs = Math.min(idle * 500, 1500);
LOG.info("Sleeping for " + sleepTimeMilliSecs
+ "ms before retrying again. Got null now.");
MILLISECONDS.sleep(sleepTimeMilliSecs);
myTask = umbilical.getTask(context);
}
if (myTask.shouldDie()) {
return;
}
task = myTask.getTask();
YarnChild.taskid = task.getTaskID();
// Create the job-conf and set credentials
configureTask(job, task, credentials, jt);
// log the system properties
String systemPropsToLog = MRApps.getSystemPropertiesToLog(job);
if (systemPropsToLog != null) {
LOG.info(systemPropsToLog);
}
// Initiate Java VM metrics
JvmMetrics.initSingleton(jvmId.toString(), job.getSessionId());
childUGI = UserGroupInformation.createRemoteUser(System
.getenv(ApplicationConstants.Environment.USER.toString()));
// Add tokens to new user so that it may execute its task correctly.
childUGI.addCredentials(credentials);
// set job classloader if configured before invoking the task
MRApps.setJobClassLoader(job);
logSyncer = TaskLog.createLogSyncer();
// Create a final reference to the task for the doAs block
final Task taskFinal = task;
childUGI.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// use job-specified working directory
setEncryptedSpillKeyIfRequired(taskFinal);
FileSystem.get(job).setWorkingDirectory(job.getWorkingDirectory());
taskFinal.run(job, umbilical); // run the task
return null;
}
});
} catch (FSError e) {
LOG.error("FSError from child", e);
if (!ShutdownHookManager.get().isShutdownInProgress()) {
umbilical.fsError(taskid, e.getMessage());
}
} catch (Exception exception) {
LOG.warn("Exception running child : "
+ StringUtils.stringifyException(exception));
try {
if (task != null) {
// do cleanup for the task
if (childUGI == null) { // no need to job into doAs block
task.taskCleanup(umbilical);
} else {
final Task taskFinal = task;
childUGI.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
taskFinal.taskCleanup(umbilical);
return null;
}
});
}
}
} catch (Exception e) {
LOG.info("Exception cleaning up: " + StringUtils.stringifyException(e));
}
// Report back any failures, for diagnostic purposes
if (taskid != null) {
if (!ShutdownHookManager.get().isShutdownInProgress()) {
reportError(exception, task, umbilical);
}
}
} catch (Throwable throwable) {
LOG.error("Error running child : "
+ StringUtils.stringifyException(throwable));
if (taskid != null) {
if (!ShutdownHookManager.get().isShutdownInProgress()) {
Throwable tCause = throwable.getCause();
String cause =
tCause == null ? throwable.getMessage() : StringUtils
.stringifyException(tCause);
umbilical.fatalError(taskid, cause, false);
}
}
} finally {
RPC.stopProxy(umbilical);
DefaultMetricsSystem.shutdown();
TaskLog.syncLogsShutdown(logSyncer);
}
}
@VisibleForTesting
static void reportError(Exception exception, Task task,
TaskUmbilicalProtocol umbilical) throws IOException {
boolean fastFailJob = false;
boolean hasClusterStorageCapacityExceededException =
ExceptionUtils.indexOfType(exception,
ClusterStorageCapacityExceededException.class) != -1;
if (hasClusterStorageCapacityExceededException) {
boolean killJobWhenExceedClusterStorageCapacity = task.getConf()
.getBoolean(MRJobConfig.JOB_DFS_STORAGE_CAPACITY_KILL_LIMIT_EXCEED,
MRJobConfig.DEFAULT_JOB_DFS_STORAGE_CAPACITY_KILL_LIMIT_EXCEED);
if (killJobWhenExceedClusterStorageCapacity) {
LOG.error(
"Fast fail the job because the cluster storage capacity was exceeded.");
fastFailJob = true;
}
}
umbilical.fatalError(taskid, StringUtils.stringifyException(exception),
fastFailJob);
}
/**
* Utility method to check if the Encrypted Spill Key needs to be set into the
* user credentials of the user running the Map / Reduce Task
* @param task The Map / Reduce task to set the Encrypted Spill information in
* @throws Exception
*/
public static void setEncryptedSpillKeyIfRequired(Task task) throws
Exception {
if ((task != null) && (task.getEncryptedSpillKey() != null) && (task
.getEncryptedSpillKey().length > 1)) {
Credentials creds =
UserGroupInformation.getCurrentUser().getCredentials();
TokenCache.setEncryptedSpillKey(task.getEncryptedSpillKey(), creds);
UserGroupInformation.getCurrentUser().addCredentials(creds);
}
}
/**
* Configure mapred-local dirs. This config is used by the task for finding
* out an output directory.
* @throws IOException
*/
private static void configureLocalDirs(Task task, JobConf job) throws IOException {
String[] localSysDirs = StringUtils.getTrimmedStrings(
System.getenv(Environment.LOCAL_DIRS.name()));
job.setStrings(MRConfig.LOCAL_DIR, localSysDirs);
LOG.info(MRConfig.LOCAL_DIR + " for child: " + job.get(MRConfig.LOCAL_DIR));
LocalDirAllocator lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR);
Path workDir = null;
// First, try to find the JOB_LOCAL_DIR on this host.
try {
workDir = lDirAlloc.getLocalPathToRead("work", job);
} catch (DiskErrorException e) {
// DiskErrorException means dir not found. If not found, it will
// be created below.
}
if (workDir == null) {
// JOB_LOCAL_DIR doesn't exist on this host -- Create it.
workDir = lDirAlloc.getLocalPathForWrite("work", job);
FileSystem lfs = FileSystem.getLocal(job).getRaw();
boolean madeDir = false;
try {
madeDir = lfs.mkdirs(workDir);
} catch (FileAlreadyExistsException e) {
// Since all tasks will be running in their own JVM, the race condition
// exists where multiple tasks could be trying to create this directory
// at the same time. If this task loses the race, it's okay because
// the directory already exists.
madeDir = true;
workDir = lDirAlloc.getLocalPathToRead("work", job);
}
if (!madeDir) {
throw new IOException("Mkdirs failed to create "
+ workDir.toString());
}
}
job.set(MRJobConfig.JOB_LOCAL_DIR,workDir.toString());
}
private static void configureTask(JobConf job, Task task,
Credentials credentials, Token<JobTokenIdentifier> jt) throws IOException {
job.setCredentials(credentials);
ApplicationAttemptId appAttemptId = ContainerId.fromString(
System.getenv(Environment.CONTAINER_ID.name()))
.getApplicationAttemptId();
LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId);
// Set it in conf, so as to be able to be used the the OutputCommitter.
job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID,
appAttemptId.getAttemptId());
// set tcp nodelay
job.setBoolean("ipc.client.tcpnodelay", true);
job.setClass(MRConfig.TASK_LOCAL_OUTPUT_CLASS,
YarnOutputFiles.class, MapOutputFile.class);
// set the jobToken and shuffle secrets into task
task.setJobTokenSecret(
JobTokenSecretManager.createSecretKey(jt.getPassword()));
byte[] shuffleSecret = TokenCache.getShuffleSecretKey(credentials);
if (shuffleSecret == null) {
LOG.warn("Shuffle secret missing from task credentials."
+ " Using job token secret as shuffle secret.");
shuffleSecret = jt.getPassword();
}
task.setShuffleSecret(
JobTokenSecretManager.createSecretKey(shuffleSecret));
// setup the child's MRConfig.LOCAL_DIR.
configureLocalDirs(task, job);
// setup the child's attempt directories
// Do the task-type specific localization
task.localizeConfiguration(job);
// Set up the DistributedCache related configs
MRApps.setupDistributedCacheLocal(job);
// Overwrite the localized task jobconf which is linked to in the current
// work-dir.
Path localTaskFile = new Path(MRJobConfig.JOB_CONF_FILE);
writeLocalJobFile(localTaskFile, job);
task.setJobFile(localTaskFile.toString());
task.setConf(job);
}
private static final FsPermission urw_gr =
FsPermission.createImmutable((short) 0640);
/**
* Write the task specific job-configuration file.
* @throws IOException
*/
private static void writeLocalJobFile(Path jobFile, JobConf conf)
throws IOException {
FileSystem localFs = FileSystem.getLocal(conf);
localFs.delete(jobFile);
OutputStream out = null;
try {
out = FileSystem.create(localFs, jobFile, urw_gr);
conf.writeXml(out);
} finally {
IOUtils.cleanupWithLogger(LOG, out);
}
}
}
| YarnChild |
java | spring-projects__spring-boot | module/spring-boot-hazelcast/src/main/java/org/springframework/boot/hazelcast/health/HazelcastHealthIndicator.java | {
"start": 1121,
"end": 1832
} | class ____ extends AbstractHealthIndicator {
private final HazelcastInstance hazelcast;
public HazelcastHealthIndicator(HazelcastInstance hazelcast) {
super("Hazelcast health check failed");
Assert.notNull(hazelcast, "'hazelcast' must not be null");
this.hazelcast = hazelcast;
}
@Override
protected void doHealthCheck(Health.Builder builder) {
if (!this.hazelcast.getLifecycleService().isRunning()) {
builder.down();
return;
}
this.hazelcast.executeTransaction((context) -> {
String uuid = this.hazelcast.getLocalEndpoint().getUuid().toString();
builder.up().withDetail("name", this.hazelcast.getName()).withDetail("uuid", uuid);
return null;
});
}
}
| HazelcastHealthIndicator |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/context/request/async/TimeoutDeferredResultProcessingInterceptor.java | {
"start": 1512,
"end": 1810
} | class ____ implements DeferredResultProcessingInterceptor {
@Override
public <T> boolean handleTimeout(NativeWebRequest request, DeferredResult<T> result) throws Exception {
result.setErrorResult(new AsyncRequestTimeoutException());
return false;
}
}
| TimeoutDeferredResultProcessingInterceptor |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/test/RowAssert.java | {
"start": 1129,
"end": 1570
} | class ____ extends AbstractAssert<RowAssert, Row> {
public RowAssert(Row row) {
super(row, RowAssert.class);
}
public RowAssert hasKind(RowKind kind) {
isNotNull();
assertThat(this.actual.getKind()).isEqualTo(kind);
return this;
}
public RowAssert hasArity(int arity) {
isNotNull();
assertThat(this.actual.getArity()).isEqualTo(arity);
return this;
}
}
| RowAssert |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/tags/form/ErrorsTag.java | {
"start": 5380,
"end": 9939
} | class ____ extends AbstractHtmlElementBodyTag implements BodyTag {
/**
* The key under which this tag exposes error messages in
* the {@link PageContext#PAGE_SCOPE page context scope}.
*/
public static final String MESSAGES_ATTRIBUTE = "messages";
/**
* The HTML '{@code span}' tag.
*/
public static final String SPAN_TAG = "span";
private String element = SPAN_TAG;
private String delimiter = "<br/>";
/**
* Stores any value that existed in the 'errors messages' before the tag was started.
*/
private @Nullable Object oldMessages;
private boolean errorMessagesWereExposed;
/**
* Set the HTML element must be used to render the error messages.
* <p>Defaults to an HTML '{@code <span/>}' tag.
*/
public void setElement(String element) {
Assert.hasText(element, "'element' cannot be null or blank");
this.element = element;
}
/**
* Get the HTML element must be used to render the error messages.
*/
public String getElement() {
return this.element;
}
/**
* Set the delimiter to be used between error messages.
* <p>Defaults to an HTML '{@code <br/>}' tag.
*/
public void setDelimiter(String delimiter) {
this.delimiter = delimiter;
}
/**
* Return the delimiter to be used between error messages.
*/
public String getDelimiter() {
return this.delimiter;
}
/**
* Get the value for the HTML '{@code id}' attribute.
* <p>Appends '{@code .errors}' to the value returned by {@link #getPropertyPath()}
* or to the model attribute name if the {@code <form:errors/>} tag's
* '{@code path}' attribute has been omitted.
* @return the value for the HTML '{@code id}' attribute
* @see #getPropertyPath()
*/
@Override
protected String autogenerateId() throws JspException {
String path = getPropertyPath();
if (!StringUtils.hasLength(path) || "*".equals(path)) {
path = (String) this.pageContext.getAttribute(
FormTag.MODEL_ATTRIBUTE_VARIABLE_NAME, PageContext.REQUEST_SCOPE);
}
return StringUtils.deleteAny(path, "[]") + ".errors";
}
/**
* Get the value for the HTML '{@code name}' attribute.
* <p>Simply returns {@code null} because the '{@code name}' attribute
* is not a validate attribute for the '{@code span}' element.
*/
@Override
protected @Nullable String getName() throws JspException {
return null;
}
/**
* Should rendering of this tag proceed at all?
* <p>Only renders output when there are errors for the configured {@link #setPath path}.
* @return {@code true} only when there are errors for the configured {@link #setPath path}
*/
@Override
protected boolean shouldRender() throws JspException {
try {
return getBindStatus().isError();
}
catch (IllegalStateException ex) {
// Neither BindingResult nor target object available.
return false;
}
}
@Override
protected void renderDefaultContent(TagWriter tagWriter) throws JspException {
tagWriter.startTag(getElement());
writeDefaultAttributes(tagWriter);
String delimiter = ObjectUtils.getDisplayString(evaluate("delimiter", getDelimiter()));
String[] errorMessages = getBindStatus().getErrorMessages();
for (int i = 0; i < errorMessages.length; i++) {
String errorMessage = errorMessages[i];
if (i > 0) {
tagWriter.appendValue(delimiter);
}
tagWriter.appendValue(getDisplayString(errorMessage));
}
tagWriter.endTag();
}
/**
* Exposes any bind status error messages under {@link #MESSAGES_ATTRIBUTE this key}
* in the {@link PageContext#PAGE_SCOPE}.
* <p>Only called if {@link #shouldRender()} returns {@code true}.
* @see #removeAttributes()
*/
@Override
protected void exposeAttributes() throws JspException {
List<String> errorMessages = new ArrayList<>(Arrays.asList(getBindStatus().getErrorMessages()));
this.oldMessages = this.pageContext.getAttribute(MESSAGES_ATTRIBUTE, PageContext.PAGE_SCOPE);
this.pageContext.setAttribute(MESSAGES_ATTRIBUTE, errorMessages, PageContext.PAGE_SCOPE);
this.errorMessagesWereExposed = true;
}
/**
* Removes any bind status error messages that were previously stored under
* {@link #MESSAGES_ATTRIBUTE this key} in the {@link PageContext#PAGE_SCOPE}.
* @see #exposeAttributes()
*/
@Override
protected void removeAttributes() {
if (this.errorMessagesWereExposed) {
if (this.oldMessages != null) {
this.pageContext.setAttribute(MESSAGES_ATTRIBUTE, this.oldMessages, PageContext.PAGE_SCOPE);
this.oldMessages = null;
}
else {
this.pageContext.removeAttribute(MESSAGES_ATTRIBUTE, PageContext.PAGE_SCOPE);
}
}
}
}
| ErrorsTag |
java | FasterXML__jackson-core | src/main/java/tools/jackson/core/io/DataOutputAsStream.java | {
"start": 65,
"end": 197
} | class ____ support use of {@link DataOutput} for output, directly,
* without caller having to provide for implementation.
*/
public | to |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/RequestCacheConfigurerTests.java | {
"start": 16521,
"end": 16923
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().authenticated()
)
.formLogin(withDefaults())
.requestCache(withDefaults());
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | RequestCacheInLambdaConfig |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/promql/PromqlParamsTests.java | {
"start": 1222,
"end": 8806
} | class ____ extends ESTestCase {
private static final EsqlParser parser = new EsqlParser();
@BeforeClass
public static void checkPromqlEnabled() {
assumeTrue("requires snapshot build with promql feature enabled", PromqlFeatures.isEnabled());
}
public void testValidRangeQuery() {
PromqlCommand promql = parse("TS test | PROMQL start \"2025-10-31T00:00:00Z\" end \"2025-10-31T01:00:00Z\" step 1m (avg(foo))");
assertThat(promql.start().value(), equalTo(Instant.parse("2025-10-31T00:00:00Z").toEpochMilli()));
assertThat(promql.end().value(), equalTo(Instant.parse("2025-10-31T01:00:00Z").toEpochMilli()));
assertThat(promql.step().value(), equalTo(Duration.ofMinutes(1)));
assertThat(promql.isRangeQuery(), equalTo(true));
assertThat(promql.isInstantQuery(), equalTo(false));
}
public void testValidRangeQueryParams() {
PromqlCommand promql = EsqlTestUtils.as(
parser.createStatement(
"TS test | PROMQL start ?_tstart end ?_tend step ?_step (avg(foo))",
new QueryParams(
List.of(
paramAsConstant("_tstart", "2025-10-31T00:00:00Z"),
paramAsConstant("_tend", "2025-10-31T01:00:00Z"),
paramAsConstant("_step", "1m")
)
)
),
PromqlCommand.class
);
assertThat(promql.start().value(), equalTo(Instant.parse("2025-10-31T00:00:00Z").toEpochMilli()));
assertThat(promql.end().value(), equalTo(Instant.parse("2025-10-31T01:00:00Z").toEpochMilli()));
assertThat(promql.step().value(), equalTo(Duration.ofMinutes(1)));
assertThat(promql.isRangeQuery(), equalTo(true));
assertThat(promql.isInstantQuery(), equalTo(false));
}
public void testValidRangeQueryOnlyStep() {
PromqlCommand promql = parse("TS test | PROMQL `step` \"1\" (avg(foo))");
assertThat(promql.start().value(), nullValue());
assertThat(promql.end().value(), nullValue());
assertThat(promql.step().value(), equalTo(Duration.ofSeconds(1)));
assertThat(promql.isRangeQuery(), equalTo(true));
assertThat(promql.isInstantQuery(), equalTo(false));
}
public void testValidInstantQuery() {
PromqlCommand promql = parse("TS test | PROMQL time \"2025-10-31T00:00:00Z\" (avg(foo))");
assertThat(promql.start().value(), equalTo(Instant.parse("2025-10-31T00:00:00Z").toEpochMilli()));
assertThat(promql.end().value(), equalTo(Instant.parse("2025-10-31T00:00:00Z").toEpochMilli()));
assertThat(promql.step().value(), nullValue());
assertThat(promql.isInstantQuery(), equalTo(true));
assertThat(promql.isRangeQuery(), equalTo(false));
}
public void testValidRangeQueryInvalidQuotedIdentifierValue() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL step `1m` (avg(foo))"));
assertThat(e.getMessage(), containsString("1:23: Parameter value [`1m`] must not be a quoted identifier"));
}
// TODO nicer error messages for missing params
public void testMissingParams() {
assertThrows(ParsingException.class, () -> parse("TS test | PROMQL (avg(foo))"));
}
public void testZeroStep() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL step 0 (avg(foo))"));
assertThat(
e.getMessage(),
containsString(
"1:11: invalid parameter \"step\": zero or negative query resolution step widths are not accepted. "
+ "Try a positive integer"
)
);
}
public void testNegativeStep() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL step \"-1\" (avg(foo))"));
assertThat(
e.getMessage(),
containsString("invalid parameter \"step\": zero or negative query resolution step widths are not accepted")
);
}
public void testEndBeforeStart() {
ParsingException e = assertThrows(
ParsingException.class,
() -> parse("TS test | PROMQL start \"2025-10-31T01:00:00Z\" end \"2025-10-31T00:00:00Z\" step 1m (avg(foo))")
);
assertThat(e.getMessage(), containsString("1:11: invalid parameter \"end\": end timestamp must not be before start time"));
}
public void testInstantAndRangeParams() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("""
TS test
| PROMQL start "2025-10-31T00:00:00Z" end "2025-10-31T01:00:00Z" step 1m time "2025-10-31T00:00:00Z" (
avg(foo)
)"""));
assertThat(
e.getMessage(),
containsString("2:4: Specify either [time] for instant query or [step], [start] or [end] for a range query")
);
}
public void testDuplicateParameter() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL step 1 step 2 (avg(foo))"));
assertThat(e.getMessage(), containsString("[step] already specified"));
}
public void testUnknownParameter() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL stp 1 (avg(foo))"));
assertThat(e.getMessage(), containsString("Unknown parameter [stp], did you mean [step]?"));
}
public void testUnknownParameterNoSuggestion() {
ParsingException e = assertThrows(ParsingException.class, () -> parse("TS test | PROMQL foo 1 (avg(foo))"));
assertThat(e.getMessage(), containsString("Unknown parameter [foo]"));
}
public void testInvalidDateFormat() {
ParsingException e = assertThrows(
ParsingException.class,
() -> parse("TS test | PROMQL start \"not-a-date\" end \"2025-10-31T01:00:00Z\" step 1m (avg(foo))")
);
assertThat(e.getMessage(), containsString("1:24: Invalid date format [not-a-date]"));
}
public void testOnlyStartSpecified() {
ParsingException e = assertThrows(
ParsingException.class,
() -> parse("TS test | PROMQL start \"2025-10-31T00:00:00Z\" step 1m (avg(foo))")
);
assertThat(
e.getMessage(),
containsString("Parameters [start] and [end] must either both be specified or both be omitted for a range query")
);
}
public void testOnlyEndSpecified() {
ParsingException e = assertThrows(
ParsingException.class,
() -> parse("TS test | PROMQL end \"2025-10-31T01:00:00Z\" step 1m (avg(foo))")
);
assertThat(
e.getMessage(),
containsString("Parameters [start] and [end] must either both be specified or both be omitted for a range query")
);
}
public void testRangeQueryMissingStep() {
ParsingException e = assertThrows(
ParsingException.class,
() -> parse("TS test | PROMQL start \"2025-10-31T00:00:00Z\" end \"2025-10-31T01:00:00Z\" (avg(foo))")
);
assertThat(e.getMessage(), containsString("Parameter [step] or [time] is required"));
}
private static PromqlCommand parse(String query) {
return as(parser.createStatement(query), PromqlCommand.class);
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
}
}
| PromqlParamsTests |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/main/java/io/quarkus/resteasy/reactive/server/deployment/BuiltInReaderOverrideBuildItem.java | {
"start": 220,
"end": 1401
} | class ____ extends MultiBuildItem {
private final String readerClassName;
private final String overrideClassName;
public BuiltInReaderOverrideBuildItem(String readerClassName, String overrideClassName) {
this.readerClassName = readerClassName;
this.overrideClassName = overrideClassName;
}
public String getReaderClassName() {
return readerClassName;
}
public String getOverrideClassName() {
return overrideClassName;
}
public static Map<String, String> toMap(List<BuiltInReaderOverrideBuildItem> items) {
if (items.isEmpty()) {
return Collections.emptyMap();
}
Map<String, String> result = new HashMap<>();
for (BuiltInReaderOverrideBuildItem item : items) {
String previousOverride = result.put(item.getReaderClassName(), item.getOverrideClassName());
if (previousOverride != null) {
throw new IllegalStateException(
"Providing multiple BuiltInReaderOverrideBuildItem for the same readerClassName is not supported");
}
}
return result;
}
}
| BuiltInReaderOverrideBuildItem |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/CommandBuilder.java | {
"start": 2603,
"end": 8739
} | class ____ extends LogicalPlanBuilder {
protected CommandBuilder(Map<Token, SqlTypedParamValue> params, ZoneId zoneId) {
super(params, zoneId);
}
@Override
public Command visitDebug(DebugContext ctx) {
Source source = source(ctx);
if (ctx.FORMAT().size() > 1) {
throw new ParsingException(source, "Debug FORMAT should be specified at most once");
}
if (ctx.PLAN().size() > 1) {
throw new ParsingException(source, "Debug PLAN should be specified at most once");
}
Debug.Type type = null;
if (ctx.type != null) {
if (ctx.type.getType() == SqlBaseLexer.ANALYZED) {
type = Debug.Type.ANALYZED;
} else {
type = Debug.Type.OPTIMIZED;
}
}
boolean graphViz = ctx.format != null && ctx.format.getType() == SqlBaseLexer.GRAPHVIZ;
Debug.Format format = graphViz ? Debug.Format.GRAPHVIZ : Debug.Format.TEXT;
return new Debug(source, plan(ctx.statement()), type, format);
}
@Override
public Command visitExplain(ExplainContext ctx) {
Source source = source(ctx);
if (ctx.PLAN().size() > 1) {
throw new ParsingException(source, "Explain TYPE should be specified at most once");
}
if (ctx.FORMAT().size() > 1) {
throw new ParsingException(source, "Explain FORMAT should be specified at most once");
}
if (ctx.VERIFY().size() > 1) {
throw new ParsingException(source, "Explain VERIFY should be specified at most once");
}
Explain.Type type = null;
if (ctx.type != null) {
type = switch (ctx.type.getType()) {
case SqlBaseLexer.PARSED -> Explain.Type.PARSED;
case SqlBaseLexer.ANALYZED -> Explain.Type.ANALYZED;
case SqlBaseLexer.OPTIMIZED -> Explain.Type.OPTIMIZED;
case SqlBaseLexer.MAPPED -> Explain.Type.MAPPED;
case SqlBaseLexer.EXECUTABLE -> Explain.Type.EXECUTABLE;
default -> Explain.Type.ALL;
};
}
boolean graphViz = ctx.format != null && ctx.format.getType() == SqlBaseLexer.GRAPHVIZ;
Explain.Format format = graphViz ? Explain.Format.GRAPHVIZ : Explain.Format.TEXT;
boolean verify = (ctx.verify == null || Booleans.parseBoolean(ctx.verify.getText().toLowerCase(Locale.ROOT), true));
return new Explain(source, plan(ctx.statement()), type, format, verify);
}
@Override
public Object visitShowFunctions(ShowFunctionsContext ctx) {
return new ShowFunctions(source(ctx), visitLikePattern(ctx.likePattern()));
}
@Override
public Object visitShowTables(ShowTablesContext ctx) {
TableIdentifier ti = visitTableIdentifier(ctx.tableIdent);
String index = ti != null ? ti.qualifiedIndex() : null;
boolean includeFrozen = ctx.FROZEN() != null;
maybeWarnDeprecatedFrozenSyntax(includeFrozen, "INCLUDE FROZEN");
return new ShowTables(
source(ctx),
visitLikePattern(ctx.clusterLike),
visitString(ctx.cluster),
index,
visitLikePattern(ctx.tableLike),
includeFrozen
);
}
@Override
public Object visitShowSchemas(ShowSchemasContext ctx) {
return new ShowSchemas(source(ctx));
}
@Override
public Object visitShowColumns(ShowColumnsContext ctx) {
TableIdentifier ti = visitTableIdentifier(ctx.tableIdent);
String index = ti != null ? ti.qualifiedIndex() : null;
boolean includeFrozen = ctx.FROZEN() != null;
maybeWarnDeprecatedFrozenSyntax(includeFrozen, "INCLUDE FROZEN");
return new ShowColumns(source(ctx), string(ctx.cluster), index, visitLikePattern(ctx.tableLike), includeFrozen);
}
@Override
public Object visitShowCatalogs(ShowCatalogsContext ctx) {
return new ShowCatalogs(source(ctx));
}
@Override
public SysTables visitSysTables(SysTablesContext ctx) {
List<IndexType> types = new ArrayList<>();
for (StringContext string : ctx.string()) {
String value = string(string);
if (Strings.isEmpty(value) == false) {
// check special ODBC wildcard case
if (value.equals(StringUtils.SQL_WILDCARD) && ctx.string().size() == 1) {
// treat % as null
// https://docs.microsoft.com/en-us/sql/odbc/reference/develop-app/value-list-arguments
} else {
switch (value.toUpperCase(Locale.ROOT)) {
case IndexResolver.SQL_TABLE -> types.add(IndexType.STANDARD_INDEX);
case IndexResolver.SQL_VIEW -> types.add(IndexType.ALIAS);
default -> types.add(IndexType.UNKNOWN);
}
}
}
}
// if the ODBC enumeration is specified, skip validation
EnumSet<IndexType> set = types.isEmpty() ? null : EnumSet.copyOf(types);
TableIdentifier ti = visitTableIdentifier(ctx.tableIdent);
String index = ti != null ? ti.qualifiedIndex() : null;
return new SysTables(source(ctx), visitLikePattern(ctx.clusterLike), index, visitLikePattern(ctx.tableLike), set);
}
@Override
public Object visitSysColumns(SysColumnsContext ctx) {
TableIdentifier ti = visitTableIdentifier(ctx.tableIdent);
String index = ti != null ? ti.qualifiedIndex() : null;
return new SysColumns(
source(ctx),
string(ctx.cluster),
index,
visitLikePattern(ctx.tableLike),
visitLikePattern(ctx.columnPattern)
);
}
@Override
public SysTypes visitSysTypes(SysTypesContext ctx) {
int type = 0;
if (ctx.type != null) {
Literal value = (Literal) visit(ctx.type);
type = ((Number) value.fold()).intValue();
}
return new SysTypes(source(ctx), Integer.valueOf(type));
}
}
| CommandBuilder |
java | google__error-prone | core/src/test/java/com/google/errorprone/dataflow/nullnesspropagation/NullnessInferenceTest.java | {
"start": 11055,
"end": 11382
} | class ____ {
List<@NonNull Object> return_tests(Object o1, Object o2) {
// BUG: Diagnostic contains: {}
inspectInferredGenerics(List.cons(o1, List.cons(o2, List.nil())));
// BUG: Diagnostic contains: {Z=Non-null}
return inspectInferredGenerics(List.cons(o1, List.cons(o2, List.nil())));
}
}
abstract | ReturnTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RScoredSortedSetRx.java | {
"start": 1207,
"end": 52697
} | interface ____<V> extends RExpirableRx, RSortableRx<Set<V>> {
/**
* Removes and returns first available tail element of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for an element to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param queueNames name of queues
* @param timeout how long to wait before giving up, in units of
* {@code unit}
* @param unit a {@code TimeUnit} determining how to interpret the
* {@code timeout} parameter
* @return the tail element, or {@code null} if all sorted sets are empty
*/
Maybe<V> pollLastFromAny(long timeout, TimeUnit unit, String... queueNames);
/**
* Removes and returns first available tail elements of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for elements to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count elements amount
* @param queueNames name of queues
* @return the tail elements
*/
Single<List<V>> pollLastFromAny(Duration duration, int count, String... queueNames);
/**
* Removes and returns first available tail elements
* of <b>any</b> sorted set <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param count elements amount
* @param queueNames name of queues
* @return the tail elements
*/
Single<List<V>> pollLastFromAny(int count, String... queueNames);
/**
* Removes and returns first available tail entries
* of <b>any</b> sorted set <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param count entries amount
* @param queueNames name of queues
* @return the head entries
*/
Single<Map<String, Map<V, Double>>> pollLastEntriesFromAny(int count, String... queueNames);
/**
* Removes and returns first available tail entries of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for elements to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count entries amount
* @param queueNames name of queues
* @return the tail entries
*/
Single<Map<String, Map<V, Double>>> pollLastEntriesFromAny(Duration duration, int count, String... queueNames);
/**
* Removes and returns first available head element of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for an element to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param queueNames name of queues
* @param timeout how long to wait before giving up, in units of
* {@code unit}
* @param unit a {@code TimeUnit} determining how to interpret the
* {@code timeout} parameter
* @return the head element, or {@code null} if all sorted sets are empty
*
*/
Maybe<V> pollFirstFromAny(long timeout, TimeUnit unit, String... queueNames);
/**
* Removes and returns first available head elements of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for elements to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count elements amount
* @param queueNames name of queues
* @return the head elements
*/
Single<List<V>> pollFirstFromAny(Duration duration, int count, String... queueNames);
/**
* Removes and returns first available head elements
* of <b>any</b> sorted set <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param count elements amount
* @param queueNames name of queues
* @return the head elements
*/
Single<List<V>> pollFirstFromAny(int count, String... queueNames);
/**
* Removes and returns first available head entries
* of <b>any</b> sorted set <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param count entries amount
* @param queueNames name of queues
* @return the head elements
*/
Single<Map<String, Map<V, Double>>> pollFirstEntriesFromAny(int count, String... queueNames);
/**
* Removes and returns first available head entries of <b>any</b> sorted set,
* waiting up to the specified wait time if necessary for elements to become available
* in any of defined sorted sets <b>including</b> this one.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count entries amount
* @param queueNames name of queues
* @return the head entries
*/
Single<Map<String, Map<V, Double>>> pollFirstEntriesFromAny(Duration duration, int count, String... queueNames);
/**
* Removes and returns the head element or {@code null} if this sorted set is empty.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param timeout how long to wait before giving up, in units of
* {@code unit}
* @param unit a {@code TimeUnit} determining how to interpret the
* {@code timeout} parameter
* @return the head element,
* or {@code null} if this sorted set is empty
*/
@Deprecated
Maybe<V> pollFirst(long timeout, TimeUnit unit);
/**
* Removes and returns the head element or {@code null} if this sorted set is empty.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @return the head element,
* or {@code null} if this sorted set is empty
*/
Maybe<V> pollFirst(Duration duration);
/**
* Removes and returns the head elements.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count elements amount
* @return the head element
*/
Single<List<V>> pollFirst(Duration duration, int count);
/**
* Removes and returns the tail element or {@code null} if this sorted set is empty.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param timeout how long to wait before giving up, in units of
* {@code unit}
* @param unit a {@code TimeUnit} determining how to interpret the
* {@code timeout} parameter
* @return the tail element or {@code null} if this sorted set is empty
*/
@Deprecated
Maybe<V> pollLast(long timeout, TimeUnit unit);
/**
* Removes and returns the tail element or {@code null} if this sorted set is empty.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @return the tail element or {@code null} if this sorted set is empty
*/
Maybe<V> pollLast(Duration duration);
/**
* Removes and returns the tail elements.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @return the tail elements
*/
Single<List<V>> pollLast(Duration duration, int count);
/**
* Removes and returns the head elements of this sorted set.
*
* @param count - elements amount
* @return the head elements of this sorted set
*/
Single<Collection<V>> pollFirst(int count);
/**
* Removes and returns the tail elements of this sorted set.
*
* @param count - elements amount
* @return the tail elements of this sorted set
*/
Single<Collection<V>> pollLast(int count);
/**
* Removes and returns the head element or {@code null} if this sorted set is empty.
*
* @return the head element,
* or {@code null} if this sorted set is empty
*/
Maybe<V> pollFirst();
/**
* Removes and returns the head entry (value and its score) or {@code null} if this sorted set is empty.
*
* @return the head entry,
* or {@code null} if this sorted set is empty
*/
Maybe<ScoredEntry<V>> pollFirstEntry();
/**
* Removes and returns the head entries (value and its score) of this sorted set.
*
* @param count entries amount
* @return the head entries of this sorted set
*/
Single<List<ScoredEntry<V>>> pollFirstEntries(int count);
/**
* Removes and returns the head entries (value and its score).
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count entries amount
* @return the head entries
*/
Single<List<ScoredEntry<V>>> pollFirstEntries(Duration duration, int count);
/**
* Removes and returns the tail element or {@code null} if this sorted set is empty.
*
* @return the tail element or {@code null} if this sorted set is empty
*/
Maybe<V> pollLast();
/**
* Removes and returns the tail entry (value and its score) or {@code null} if this sorted set is empty.
*
* @return the tail entry or {@code null} if this sorted set is empty
*/
Maybe<ScoredEntry<V>> pollLastEntry();
/**
* Removes and returns the tail entries (value and its score) of this sorted set.
*
* @param count entries amount
* @return the tail entries of this sorted set
*/
Single<List<ScoredEntry<V>>> pollLastEntries(int count);
/**
* Removes and returns the head entries (value and its score).
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param duration how long to wait before giving up
* @param count entries amount
* @return the tail entries
*/
Single<List<ScoredEntry<V>>> pollLastEntries(Duration duration, int count);
/**
* Returns the head element or {@code null} if this sorted set is empty.
*
* @return the head element or {@code null} if this sorted set is empty
*/
Maybe<V> first();
/**
* Returns the head entry (value and its score) or {@code null} if this sorted set is empty.
*
* @return the head entry or {@code null} if this sorted set is empty
*/
Maybe<ScoredEntry<V>> firstEntry();
/**
* Returns the tail element or {@code null} if this sorted set is empty.
*
* @return the tail element or {@code null} if this sorted set is empty
*/
Maybe<V> last();
/**
* Returns the tail entry (value and its score) or {@code null} if this sorted set is empty.
*
* @return the tail entry or {@code null} if this sorted set is empty
*/
Maybe<ScoredEntry<V>> lastEntry();
/**
* Returns score of the head element or returns {@code null} if this sorted set is empty.
*
* @return the tail element or {@code null} if this sorted set is empty
*/
Maybe<Double> firstScore();
/**
* Returns score of the tail element or returns {@code null} if this sorted set is empty.
*
* @return the tail element or {@code null} if this sorted set is empty
*/
Maybe<Double> lastScore();
/**
* Returns random element from this sorted set
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @return random element
*/
Maybe<V> random();
/**
* Returns random elements from this sorted set limited by <code>count</code>
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param count - values amount to return
* @return random elements
*/
Single<Collection<V>> random(int count);
/**
* Returns random entries from this sorted set limited by <code>count</code>.
* Each map entry uses element as key and score as value.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param count - entries amount to return
* @return random entries
*/
Single<Map<V, Double>> randomEntries(int count);
/**
* Returns an iterator over elements in this set.
* If <code>pattern</code> is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @return iterator
*/
Flowable<V> iterator(String pattern);
/**
* Returns an iterator over elements in this set.
* Elements are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @param count - size of elements batch
* @return iterator
*/
Flowable<V> iterator(int count);
/**
* Returns an iterator over elements in this set.
* Elements are loaded in batch. Batch size is defined by <code>count</code> param.
* If pattern is not null then only elements match this pattern are loaded.
*
* @param pattern - search pattern
* @param count - size of elements batch
* @return iterator
*/
Flowable<V> iterator(String pattern, int count);
/**
* Returns an iterator over elements in this set.
*
* @return iterator
*/
Flowable<V> iterator();
/**
* Returns an iterator over entries (value and its score) in this set.
*
* @return iterator
*/
Flowable<ScoredEntry<V>> entryIterator();
/**
* Returns an iterator over entries (value and its score) in this set.
* If <code>pattern</code> is not null then only entries match this pattern are loaded.
*
* @param pattern search pattern
* @return iterator
*/
Flowable<ScoredEntry<V>> entryIterator(String pattern);
/**
* Returns an iterator over entries (value and its score) in this set.
* Entries are loaded in batch. Batch size is defined by <code>count</code> param.
*
* @param count size of elements batch
* @return iterator
*/
Flowable<ScoredEntry<V>> entryIterator(int count);
/**
* Returns an iterator over entries (value and its score) in this set.
* Entries are loaded in batch. Batch size is defined by <code>count</code> param.
* If pattern is not null then only entries match this pattern are loaded.
*
* @param pattern search pattern
* @param count size of entries batch
* @return iterator
*/
Flowable<ScoredEntry<V>> entryIterator(String pattern, int count);
/**
* Removes values by score range.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return number of elements removed
*/
Single<Integer> removeRangeByScore(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Removes values by rank range. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
*
* @param startIndex - start index
* @param endIndex - end index
* @return number of elements removed
*/
Single<Integer> removeRangeByRank(int startIndex, int endIndex);
/**
* Returns rank of value, with the scores ordered from low to high.
*
* @param o - object
* @return rank or <code>null</code> if value does not exist
*/
Maybe<Integer> rank(V o);
/**
* Returns rank and score of specified <code>value</code>,
* with the ranks ordered from low to high.
*
* @param value object
* @return ranked entry or <code>null</code> if value does not exist
*/
Maybe<RankedEntry<V>> rankEntry(V value);
/**
* Returns rank of value, with the scores ordered from high to low.
*
* @param o - object
* @return rank or <code>null</code> if value does not exist
*/
Maybe<Integer> revRank(V o);
/**
* Returns rank and score of specified <code>value</code>,
* with the ranks ordered from high to low.
*
* @param value object
* @return ranked entry or <code>null</code> if value does not exist
*/
Maybe<RankedEntry<V>> revRankEntry(V value);
/**
* Returns ranks of elements, with the scores ordered from high to low.
*
* @param elements - elements
* @return ranks or <code>null</code> if value does not exist
*/
Single<List<Integer>> revRank(Collection<V> elements);
/**
* Returns score of element or <code>null</code> if it doesn't exist.
*
* @param o - element
* @return score
*/
Maybe<Double> getScore(V o);
/**
* Returns scores of elements.
*
* @param elements - elements
* @return element scores
*/
Single<List<Double>> getScore(Collection<V> elements);
/**
* Adds element to this set, overrides previous score if it has been already added.
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element has added and <code>false</code> if not.
*/
Single<Boolean> add(double score, V object);
/**
* Adds all elements contained in the specified map to this sorted set.
* Map contains of score mapped by object.
*
* @param objects - map of elements to add
* @return amount of added elements, not including already existing in this sorted set
*/
Single<Integer> addAll(Map<V, Double> objects);
/**
* Adds elements to this set only if they haven't been added before.
* <p>
* Requires <b>Redis 3.0.2 and higher.</b>
*
* @param objects map of elements to add
* @return amount of added elements
*/
Single<Integer> addAllIfAbsent(Map<V, Double> objects);
/**
* Adds elements to this set only if they already exist.
* <p>
* Requires <b>Redis 3.0.2 and higher.</b>
*
* @param objects map of elements to add
* @return amount of added elements
*/
Single<Integer> addAllIfExist(Map<V, Double> objects);
/**
* Adds elements to this set only if new scores greater than current score of existed elements.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param objects map of elements to add
* @return amount of added elements
*/
Single<Integer> addAllIfGreater(Map<V, Double> objects);
/**
* Adds elements to this set only if new scores less than current score of existed elements.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param objects map of elements to add
* @return amount of added elements
*/
Single<Integer> addAllIfLess(Map<V, Double> objects);
/**
* Adds element to this set, overrides previous score if it has been already added.
* Finally return the rank of the item
*
* @param score - object score
* @param object - object itself
* @return rank
*/
Single<Integer> addAndGetRank(double score, V object);
/**
* Adds element to this set, overrides previous score if it has been already added.
* Finally return the reverse rank of the item
*
* @param score - object score
* @param object - object itself
* @return reverse rank
*/
Single<Integer> addAndGetRevRank(double score, V object);
/**
* Adds elements to this set, overrides previous score if it has been already added.
* Finally returns reverse rank list of the items
* @param map - map of object and scores, make sure to use an ordered map
* @return collection of reverse ranks
*/
Single<List<Integer>> addAndGetRevRank(Map<? extends V, Double> map);
/**
* Use {@link #addIfAbsent(double, Object)} instead
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element has added and <code>false</code> if not.
*/
@Deprecated
Single<Boolean> tryAdd(double score, V object);
/**
* Adds element to this set only if has not been added before.
* <p>
* Requires <b>Redis 3.0.2 and higher.</b>
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element added and <code>false</code> if not.
*/
Single<Boolean> addIfAbsent(double score, V object);
/**
* Adds element to this set only if it's already exists.
* <p>
* Requires <b>Redis 3.0.2 and higher.</b>
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element added and <code>false</code> if not.
*/
Single<Boolean> addIfExists(double score, V object);
/**
* Adds element to this set only if new score less than current score of existed element.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element added and <code>false</code> if not.
*/
Single<Boolean> addIfLess(double score, V object);
/**
* Adds element to this set only if new score greater than current score of existed element.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param score - object score
* @param object - object itself
* @return <code>true</code> if element added and <code>false</code> if not.
*/
Single<Boolean> addIfGreater(double score, V object);
/**
* Replaces a previous <code>oldObject</code> with a <code>newObject</code>.
* Returns <code>false</code> if previous object doesn't exist.
*
* @param oldObject old object
* @param newObject new object
* @return <code>true</code> if object has been replaced otherwise <code>false</code>.
*/
Single<Boolean> replace(V oldObject, V newObject);
/**
* Removes a single instance of the specified element from this
* sorted set, if it is present.
*
* @param object element to be removed from this sorted set, if present
* @return <code>true</code> if an element was removed as a result of this call
*/
Single<Boolean> remove(V object);
/**
* Returns size of this set.
*
* @return size
*/
Single<Integer> size();
/**
* Returns <code>true</code> if this sorted set contains encoded state of the specified element.
*
* @param o element whose presence in this collection is to be tested
* @return <code>true</code> if this sorted set contains the specified
* element and <code>false</code> otherwise
*/
Single<Boolean> contains(V o);
/**
* Returns <code>true</code> if this sorted set contains all of the elements
* in encoded state in the specified collection.
*
* @param c collection to be checked for containment in this sorted set
* @return <code>true</code> if this sorted set contains all of the elements
* in the specified collection
*/
Single<Boolean> containsAll(Collection<?> c);
/**
* Removes all of this sorted set's elements that are also contained in the
* specified collection.
*
* @param c sorted set containing elements to be removed from this collection
* @return <code>true</code> if this sorted set changed as a result of the
* call
*/
Single<Boolean> removeAll(Collection<?> c);
/**
* Retains only the elements in this sorted set that are contained in the
* specified collection.
*
* @param c collection containing elements to be retained in this collection
* @return <code>true</code> if this sorted set changed as a result of the call
*/
Single<Boolean> retainAll(Collection<?> c);
/**
* Increases score of specified element by value.
*
* @param element - element whose score needs to be increased
* @param value - value
* @return updated score of element
*/
Single<Double> addScore(V element, Number value);
/**
* Adds score to element and returns its reverse rank
*
* @param object - object itself
* @param value - object score
* @return reverse rank
*/
Single<Integer> addScoreAndGetRevRank(V object, Number value);
/**
* Adds score to element and returns its rank
*
* @param object - object itself
* @param value - object score
* @return rank
*/
Single<Integer> addScoreAndGetRank(V object, Number value);
/**
* Stores to defined ScoredSortedSet values by rank range. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startIndex - start index
* @param endIndex - end index
* @return elements
*/
Single<Integer> rangeTo(String destName, int startIndex, int endIndex);
/**
* Stores to defined ScoredSortedSet values between <code>startScore</code> and <code>endScore</code>.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return values
*/
Single<Integer> rangeTo(String destName, double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Stores to defined ScoredSortedSet values between <code>startScore</code> and <code>endScore</code>.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return values
*/
Single<Integer> rangeTo(String destName, double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Stores to defined ScoredSortedSet values in reversed order by rank range. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startIndex - start index
* @param endIndex - end index
* @return elements
*/
Single<Integer> revRangeTo(String destName, int startIndex, int endIndex);
/**
* Stores to defined ScoredSortedSet values in reversed order between <code>startScore</code> and <code>endScore</code>.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return values
*/
Single<Integer> revRangeTo(String destName, double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Stores to defined ScoredSortedSet values in reversed order between <code>startScore</code> and <code>endScore</code>.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return values
*/
Single<Integer> revRangeTo(String destName, double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Returns values by rank range. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
*
* @param startIndex - start index
* @param endIndex - end index
* @return elements
*/
Single<Collection<V>> valueRange(int startIndex, int endIndex);
/**
* Returns entries (value and its score) by rank range. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
*
* @param startIndex - start index
* @param endIndex - end index
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRange(int startIndex, int endIndex);
/**
* Returns all values between <code>startScore</code> and <code>endScore</code>.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return values
*/
Single<Collection<V>> valueRange(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Returns all entries (value and its score) between <code>startScore</code> and <code>endScore</code>.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRange(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Returns all values between <code>startScore</code> and <code>endScore</code>.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return values
*/
Single<Collection<V>> valueRange(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Returns all entries (value and its score) between <code>startScore</code> and <code>endScore</code>.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRange(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Returns values by rank range in reverse order. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
*
* @param startIndex - start index
* @param endIndex - end index
* @return elements
*/
Single<Collection<V>> valueRangeReversed(int startIndex, int endIndex);
/**
* Returns all values between <code>startScore</code> and <code>endScore</code> in reversed order.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return values
*/
Single<Collection<V>> valueRangeReversed(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Returns all values between <code>startScore</code> and <code>endScore</code> in reversed order.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return values
*/
Single<Collection<V>> valueRangeReversed(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Returns entries (value and its score) by rank range in reverse order. Indexes are zero based.
* <code>-1</code> means the highest score, <code>-2</code> means the second highest score.
*
* @param startIndex - start index
* @param endIndex - end index
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRangeReversed(int startIndex, int endIndex);
/**
* Returns all entries (value and its score) between <code>startScore</code> and <code>endScore</code> in reversed order.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRangeReversed(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Returns all entries (value and its score) between <code>startScore</code> and <code>endScore</code> in reversed order.
*
* @param startScore - start score.
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* Use <code>Double.POSITIVE_INFINITY</code> or <code>Double.NEGATIVE_INFINITY</code>
* to define infinity numbers
*
* @param endScoreInclusive - end score inclusive
* @param offset - offset of sorted data
* @param count - amount of sorted data
* @return entries
*/
Single<Collection<ScoredEntry<V>>> entryRangeReversed(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive, int offset, int count);
/**
* Returns the number of elements with a score between <code>startScore</code> and <code>endScore</code>.
*
* @param startScore - start score
* @param startScoreInclusive - start score inclusive
* @param endScore - end score
* @param endScoreInclusive - end score inclusive
* @return count
*/
Single<Integer> count(double startScore, boolean startScoreInclusive, double endScore, boolean endScoreInclusive);
/**
* Read all values at once.
*
* @return values
*/
Single<Collection<V>> readAll();
/**
* Use {@link #intersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets
* and store result to current ScoredSortedSet
*
* @param names - names of ScoredSortedSet
* @return length of intersection
*/
@Deprecated
Single<Integer> intersection(String... names);
/**
* Use {@link #intersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param aggregate - score aggregation mode
* @param names - names of ScoredSortedSet
* @return length of intersection
*/
@Deprecated
Single<Integer> intersection(Aggregate aggregate, String... names);
/**
* Use {@link #intersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets mapped to weight multiplier
* and store result to current ScoredSortedSet
*
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return length of intersection
*/
@Deprecated
Single<Integer> intersection(Map<String, Double> nameWithWeight);
/**
* Use {@link #intersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param aggregate - score aggregation mode
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return length of intersection
*/
@Deprecated
Single<Integer> intersection(Aggregate aggregate, Map<String, Double> nameWithWeight);
/**
* Intersect provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param args object
* @return length of intersection
*/
Single<Integer> intersection(SetIntersectionArgs args);
/**
* Use {@link #readIntersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets
* with current ScoredSortedSet without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param names - names of ScoredSortedSet
* @return result of intersection
*/
@Deprecated
Single<Collection<V>> readIntersection(String... names);
/**
* Use {@link #readIntersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets with current ScoredSortedSet using defined aggregation method
* without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param aggregate - score aggregation mode
* @param names - names of ScoredSortedSet
* @return result of intersection
*/
@Deprecated
Single<Collection<V>> readIntersection(Aggregate aggregate, String... names);
/**
* Use {@link #readIntersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets mapped to weight multiplier
* with current ScoredSortedSet without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return result of intersection
*/
@Deprecated
Single<Collection<V>> readIntersection(Map<String, Double> nameWithWeight);
/**
* Use {@link #readIntersection(SetIntersectionArgs)} instead.
* <p>
* Intersect provided ScoredSortedSets mapped to weight multiplier
* with current ScoredSortedSet using defined aggregation method
* without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param aggregate - score aggregation mode
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return result of intersection
*/
@Deprecated
Single<Collection<V>> readIntersection(Aggregate aggregate, Map<String, Double> nameWithWeight);
/**
* Intersect provided ScoredSortedSets
* with current ScoredSortedSet
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param args object
* @return result of intersection
*/
Single<Collection<V>> readIntersection(SetIntersectionArgs args);
/**
* Intersect provided ScoredSortedSets
* with current ScoredSortedSet
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param args object
* @return result of intersection entries (value and its score)
*/
Single<Collection<ScoredEntry<V>>> readIntersectionEntries(SetIntersectionArgs args);
/**
* Counts elements of set as a result of sets intersection with current set.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param names - name of sets
* @return amount of elements
*/
Single<Integer> countIntersection(String... names);
/**
* Counts elements of set as a result of sets intersection with current set.
* <p>
* Requires <b>Redis 7.0.0 and higher.</b>
*
* @param names - name of sets
* @param limit - sets intersection limit
* @return amount of elements
*/
Single<Integer> countIntersection(int limit, String... names);
/**
* Use {@link #union(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets
* and store result to current ScoredSortedSet
*
* @param names - names of ScoredSortedSet
* @return length of union
*/
@Deprecated
Single<Integer> union(String... names);
/**
* Use {@link #union(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param aggregate - score aggregation mode
* @param names - names of ScoredSortedSet
* @return length of union
*/
@Deprecated
Single<Integer> union(Aggregate aggregate, String... names);
/**
* Use {@link #union(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets mapped to weight multiplier
* and store result to current ScoredSortedSet
*
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return length of union
*/
@Deprecated
Single<Integer> union(Map<String, Double> nameWithWeight);
/**
* Use {@link #union(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param aggregate - score aggregation mode
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return length of union
*/
@Deprecated
Single<Integer> union(Aggregate aggregate, Map<String, Double> nameWithWeight);
/**
* Union provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and store result to current ScoredSortedSet
*
* @param args object
* @return length of union
*/
Single<Integer> union(SetUnionArgs args);
/**
* Use {@link #readUnion(SetUnionArgs)} instead.
* <p>
* Union ScoredSortedSets specified by name with current ScoredSortedSet
* without state change.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param names - names of ScoredSortedSet
* @return result of union
*/
@Deprecated
Single<Collection<V>> readUnion(String... names);
/**
* Use {@link #readUnion(SetUnionArgs)} instead.
* <p>
* Union ScoredSortedSets specified by name with defined aggregation method
* and current ScoredSortedSet without state change.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param aggregate - score aggregation mode
* @param names - names of ScoredSortedSet
* @return result of union
*/
@Deprecated
Single<Collection<V>> readUnion(Aggregate aggregate, String... names);
/**
* Use {@link #readUnion(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets mapped to weight multiplier
* and current ScoredSortedSet without state change.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return result of union
*/
@Deprecated
Single<Collection<V>> readUnion(Map<String, Double> nameWithWeight);
/**
* Use {@link #readUnion(SetUnionArgs)} instead.
* <p>
* Union provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and current ScoredSortedSet without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param aggregate - score aggregation mode
* @param nameWithWeight - name of ScoredSortedSet mapped to weight multiplier
* @return result of union
*/
@Deprecated
Single<Collection<V>> readUnion(Aggregate aggregate, Map<String, Double> nameWithWeight);
/**
* Union provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and current ScoredSortedSet without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param args object
* @return result of union
*/
Single<Collection<V>> readUnion(SetUnionArgs args);
/**
* Union provided ScoredSortedSets mapped to weight multiplier
* with defined aggregation method
* and current ScoredSortedSet without state change
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param args object
* @return result of union entries (value and its score)
*/
Single<Collection<ScoredEntry<V>>> readUnionEntries(SetUnionArgs args);
/**
* Diff ScoredSortedSets specified by name
* with current ScoredSortedSet without state change.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param names - name of sets
* @return result of diff
*/
Single<Collection<V>> readDiff(String... names);
/**
* Diff ScoredSortedSets specified by name
* with current ScoredSortedSet without state change.
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param names - name of sets
* @return result of diff entries (value and its score)
*/
Single<Collection<ScoredEntry<V>>> readDiffEntries(String... names);
/**
* Diff provided ScoredSortedSets
* and store result to current ScoredSortedSet
* <p>
* Requires <b>Redis 6.2.0 and higher.</b>
*
* @param names - name of sets
* @return length of diff
*/
Single<Integer> diff(String... names);
/**
* Removes and returns the head element waiting if necessary for an element to become available.
*
* @return the head element
*/
Single<V> takeFirst();
/**
* Removes and returns the tail element waiting if necessary for an element to become available.
*
* @return the tail element
*/
Single<V> takeLast();
/**
* Retrieves and removes continues stream of elements from the head.
* Waits for next element become available.
*
* @return stream of head elements
*/
Flowable<V> takeFirstElements();
/**
* Retrieves and removes continues stream of elements from the tail.
* Waits for next element become available.
*
* @return stream of tail elements
*/
Flowable<V> takeLastElements();
/**
* Adds object event listener
*
* @see org.redisson.api.listener.TrackingListener
* @see org.redisson.api.listener.ScoredSortedSetAddListener
* @see org.redisson.api.listener.ScoredSortedSetRemoveListener
* @see org.redisson.api.ExpiredObjectListener
* @see org.redisson.api.DeletedObjectListener
*
* @param listener - object event listener
* @return listener id
*/
Single<Integer> addListener(ObjectListener listener);
}
| RScoredSortedSetRx |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/scanning/DuplicateTemplatesResolvedTest.java | {
"start": 365,
"end": 906
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root.addAsResource(new StringAsset("Hello!"), "templates/hello.html"))
.withAdditionalDependency(
d -> d.addAsResource(new StringAsset("Hi!"), "templates/hello.html"));
@Inject
Template hello;
@Test
public void testHello() {
// Root archive takes precedence
assertEquals("Hello!", hello.render());
}
}
| DuplicateTemplatesResolvedTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/keymanytoone/association/Key.java | {
"start": 389,
"end": 626
} | class ____ implements Serializable {
@Id
private String id;
private String name;
public Key(String id) {
this.id = id;
}
Key() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
| Key |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/embeddings/JinaAIEmbeddingsServiceSettings.java | {
"start": 1770,
"end": 8159
} | class ____ extends FilteredXContentObject implements ServiceSettings {
public static final String NAME = "jinaai_embeddings_service_settings";
static final String EMBEDDING_TYPE = "embedding_type";
public static JinaAIEmbeddingsServiceSettings fromMap(Map<String, Object> map, ConfigurationParseContext context) {
ValidationException validationException = new ValidationException();
var commonServiceSettings = JinaAIServiceSettings.fromMap(map, context);
SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException);
Integer dims = removeAsType(map, DIMENSIONS, Integer.class);
Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class);
JinaAIEmbeddingType embeddingTypes = parseEmbeddingType(map, validationException);
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return new JinaAIEmbeddingsServiceSettings(commonServiceSettings, similarity, dims, maxInputTokens, embeddingTypes);
}
static JinaAIEmbeddingType parseEmbeddingType(Map<String, Object> map, ValidationException validationException) {
return Objects.requireNonNullElse(
extractOptionalEnum(
map,
EMBEDDING_TYPE,
ModelConfigurations.SERVICE_SETTINGS,
JinaAIEmbeddingType::fromString,
EnumSet.allOf(JinaAIEmbeddingType.class),
validationException
),
JinaAIEmbeddingType.FLOAT
);
}
private static final TransportVersion JINA_AI_EMBEDDING_TYPE_SUPPORT_ADDED = TransportVersion.fromName(
"jina_ai_embedding_type_support_added"
);
private final JinaAIServiceSettings commonSettings;
private final SimilarityMeasure similarity;
private final Integer dimensions;
private final Integer maxInputTokens;
private final JinaAIEmbeddingType embeddingType;
public JinaAIEmbeddingsServiceSettings(
JinaAIServiceSettings commonSettings,
@Nullable SimilarityMeasure similarity,
@Nullable Integer dimensions,
@Nullable Integer maxInputTokens,
@Nullable JinaAIEmbeddingType embeddingType
) {
this.commonSettings = commonSettings;
this.similarity = similarity;
this.dimensions = dimensions;
this.maxInputTokens = maxInputTokens;
this.embeddingType = embeddingType != null ? embeddingType : JinaAIEmbeddingType.FLOAT;
}
public JinaAIEmbeddingsServiceSettings(StreamInput in) throws IOException {
this.commonSettings = new JinaAIServiceSettings(in);
this.similarity = in.readOptionalEnum(SimilarityMeasure.class);
this.dimensions = in.readOptionalVInt();
this.maxInputTokens = in.readOptionalVInt();
this.embeddingType = (in.getTransportVersion().supports(JINA_AI_EMBEDDING_TYPE_SUPPORT_ADDED))
? Objects.requireNonNullElse(in.readOptionalEnum(JinaAIEmbeddingType.class), JinaAIEmbeddingType.FLOAT)
: JinaAIEmbeddingType.FLOAT;
}
public JinaAIServiceSettings getCommonSettings() {
return commonSettings;
}
@Override
public SimilarityMeasure similarity() {
return similarity;
}
@Override
public Integer dimensions() {
return dimensions;
}
public Integer maxInputTokens() {
return maxInputTokens;
}
@Override
public String modelId() {
return commonSettings.modelId();
}
public JinaAIEmbeddingType getEmbeddingType() {
return embeddingType;
}
@Override
public DenseVectorFieldMapper.ElementType elementType() {
return embeddingType == null ? DenseVectorFieldMapper.ElementType.FLOAT : embeddingType.toElementType();
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder = commonSettings.toXContentFragment(builder, params);
if (similarity != null) {
builder.field(SIMILARITY, similarity);
}
if (dimensions != null) {
builder.field(DIMENSIONS, dimensions);
}
if (maxInputTokens != null) {
builder.field(MAX_INPUT_TOKENS, maxInputTokens);
}
if (embeddingType != null) {
builder.field(EMBEDDING_TYPE, embeddingType);
}
builder.endObject();
return builder;
}
@Override
protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, Params params) throws IOException {
commonSettings.toXContentFragmentOfExposedFields(builder, params);
if (embeddingType != null) {
builder.field(EMBEDDING_TYPE, embeddingType);
}
return builder;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_18_0;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
commonSettings.writeTo(out);
out.writeOptionalEnum(SimilarityMeasure.translateSimilarity(similarity, out.getTransportVersion()));
out.writeOptionalVInt(dimensions);
out.writeOptionalVInt(maxInputTokens);
if (out.getTransportVersion().supports(JINA_AI_EMBEDDING_TYPE_SUPPORT_ADDED)) {
out.writeOptionalEnum(JinaAIEmbeddingType.translateToVersion(embeddingType, out.getTransportVersion()));
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
JinaAIEmbeddingsServiceSettings that = (JinaAIEmbeddingsServiceSettings) o;
return Objects.equals(commonSettings, that.commonSettings)
&& Objects.equals(similarity, that.similarity)
&& Objects.equals(dimensions, that.dimensions)
&& Objects.equals(maxInputTokens, that.maxInputTokens)
&& Objects.equals(embeddingType, that.embeddingType);
}
@Override
public int hashCode() {
return Objects.hash(commonSettings, similarity, dimensions, maxInputTokens, embeddingType);
}
}
| JinaAIEmbeddingsServiceSettings |
java | spring-projects__spring-security | docs/src/test/java/org/springframework/security/docs/servlet/configuration/toplevelcustomizerbean/TopLevelCustomizerBeanTests.java | {
"start": 2068,
"end": 2603
} | class ____ {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
private MockMvc mockMvc;
@Test
void headersCustomizer() throws Exception {
this.spring.register(TopLevelCustomizerBeanConfiguration.class).autowire();
// @formatter:off
this.mockMvc
.perform(get("/"))
.andExpect(cspIsObjectSrcNone());
// @formatter:on
}
private static @NotNull ResultMatcher cspIsObjectSrcNone() {
return header().string("Content-Security-Policy", "object-src 'none'");
}
}
| TopLevelCustomizerBeanTests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ImportAwareAotBeanPostProcessor.java | {
"start": 2288,
"end": 2815
} | class ____ imported
}
try {
MetadataReader metadataReader = this.metadataReaderFactory.getMetadataReader(importingClass);
instance.setImportMetadata(metadataReader.getAnnotationMetadata());
}
catch (IOException ex) {
throw new IllegalStateException(String.format("Failed to read metadata for '%s'", importingClass), ex);
}
}
private @Nullable String getImportingClassFor(ImportAware instance) {
String target = ClassUtils.getUserClass(instance).getName();
return this.importsMapping.get(target);
}
}
| not |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/unique/AlterTableUniqueDelegate.java | {
"start": 671,
"end": 3340
} | class ____ implements UniqueDelegate {
protected final Dialect dialect;
/**
* @param dialect The dialect for which we are handling unique constraints
*/
public AlterTableUniqueDelegate(Dialect dialect ) {
this.dialect = dialect;
}
// legacy model ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public String getColumnDefinitionUniquenessFragment(Column column,
SqlStringGenerationContext context) {
return "";
}
@Override
public String getTableCreationUniqueConstraintsFragment(Table table,
SqlStringGenerationContext context) {
return "";
}
@Override
public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata,
SqlStringGenerationContext context) {
final String tableName = context.format( uniqueKey.getTable().getQualifiedTableName() );
final String constraintName = dialect.quote( uniqueKey.getName() );
return dialect.getAlterTableString( tableName )
+ " add constraint " + constraintName + " " + uniqueConstraintSql( uniqueKey );
}
protected String uniqueConstraintSql(UniqueKey uniqueKey) {
final StringBuilder fragment = new StringBuilder();
fragment.append( "unique (" );
boolean first = true;
for ( Column column : uniqueKey.getColumns() ) {
if ( first ) {
first = false;
}
else {
fragment.append(", ");
}
fragment.append( column.getQuotedName( dialect ) );
if ( uniqueKey.getColumnOrderMap().containsKey( column ) ) {
fragment.append( " " ).append( uniqueKey.getColumnOrderMap().get( column ) );
}
}
fragment.append( ')' );
if ( isNotEmpty( uniqueKey.getOptions() ) ) {
fragment.append( " " ).append( uniqueKey.getOptions() );
}
return fragment.toString();
}
@Override
public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata,
SqlStringGenerationContext context) {
final String tableName = context.format( uniqueKey.getTable().getQualifiedTableName() );
final StringBuilder command = new StringBuilder( dialect.getAlterTableString(tableName) );
command.append( ' ' );
command.append( dialect.getDropUniqueKeyString() );
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
command.append( " if exists " );
command.append( dialect.quote( uniqueKey.getName() ) );
}
else if ( dialect.supportsIfExistsAfterConstraintName() ) {
command.append( ' ' );
command.append( dialect.quote( uniqueKey.getName() ) );
command.append( " if exists" );
}
else {
command.append( ' ' );
command.append( dialect.quote( uniqueKey.getName() ) );
}
return command.toString();
}
}
| AlterTableUniqueDelegate |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/sample/UserRepository.java | {
"start": 30040,
"end": 30406
} | interface ____ {
int getId();
}
record UserExcerpt(String firstname, String lastname) {
}
record AddressDto(String country, String city) {
public AddressDto(Address address) {
this(address != null ? address.getCountry() : null, address != null ? address.getCity() : null);
}
}
record UserRoleCountDtoProjection(User user, Long roleCount) {
}
| IdOnly |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/ui/DefaultOneTimeTokenSubmitPageGeneratingFilter.java | {
"start": 1732,
"end": 5914
} | class ____ extends OncePerRequestFilter {
public static final String DEFAULT_SUBMIT_PAGE_URL = "/login/ott";
private RequestMatcher requestMatcher = PathPatternRequestMatcher.withDefaults()
.matcher(HttpMethod.GET, DEFAULT_SUBMIT_PAGE_URL);
private Function<HttpServletRequest, Map<String, String>> resolveHiddenInputs = (request) -> Collections.emptyMap();
private String loginProcessingUrl = OneTimeTokenAuthenticationFilter.DEFAULT_LOGIN_PROCESSING_URL;
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws ServletException, IOException {
if (!this.requestMatcher.matches(request)) {
filterChain.doFilter(request, response);
return;
}
String html = generateHtml(request);
response.setContentType("text/html;charset=UTF-8");
response.setContentLength(html.getBytes(StandardCharsets.UTF_8).length);
response.getWriter().write(html);
}
private String generateHtml(HttpServletRequest request) {
String contextPath = request.getContextPath();
String token = request.getParameter("token");
String tokenValue = StringUtils.hasText(token) ? token : "";
String hiddenInputs = this.resolveHiddenInputs.apply(request)
.entrySet()
.stream()
.map((inputKeyValue) -> renderHiddenInput(inputKeyValue.getKey(), inputKeyValue.getValue()))
.collect(Collectors.joining("\n"));
return HtmlTemplates.fromTemplate(ONE_TIME_TOKEN_SUBMIT_PAGE_TEMPLATE)
.withValue("contextPath", contextPath)
.withValue("tokenValue", tokenValue)
.withValue("loginProcessingUrl", contextPath + this.loginProcessingUrl)
.withRawHtml("hiddenInputs", hiddenInputs)
.render();
}
private String renderHiddenInput(String name, String value) {
return HtmlTemplates.fromTemplate(HIDDEN_HTML_INPUT_TEMPLATE)
.withValue("name", name)
.withValue("value", value)
.render();
}
/**
* Sets a Function used to resolve a Map of the hidden inputs where the key is the
* name of the input and the value is the value of the input.
* @param resolveHiddenInputs the function to resolve the inputs
*/
public void setResolveHiddenInputs(Function<HttpServletRequest, Map<String, String>> resolveHiddenInputs) {
Assert.notNull(resolveHiddenInputs, "resolveHiddenInputs cannot be null");
this.resolveHiddenInputs = resolveHiddenInputs;
}
/**
* Use this {@link RequestMatcher} to choose whether this filter will handle the
* request. By default, it handles {@code /login/ott}.
* @param requestMatcher the {@link RequestMatcher} to use
*/
public void setRequestMatcher(RequestMatcher requestMatcher) {
Assert.notNull(requestMatcher, "requestMatcher cannot be null");
this.requestMatcher = requestMatcher;
}
/**
* Specifies the URL that the submit form should POST to. Defaults to
* {@code /login/ott}.
* @param loginProcessingUrl
*/
public void setLoginProcessingUrl(String loginProcessingUrl) {
Assert.hasText(loginProcessingUrl, "loginProcessingUrl cannot be null or empty");
this.loginProcessingUrl = loginProcessingUrl;
}
private static final String ONE_TIME_TOKEN_SUBMIT_PAGE_TEMPLATE = """
<!DOCTYPE html>
<html lang="en">
<head>
<title>One-Time Token Login</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"/>
<link href="{{contextPath}}/default-ui.css" rel="stylesheet" />
</head>
<body>
<div class="container">
<form class="login-form" action="{{loginProcessingUrl}}" method="post">
<h2>Please input the token</h2>
<p>
<label for="token" class="screenreader">Token</label>
<input type="text" id="token" name="token" value="{{tokenValue}}" placeholder="Token" required="true" autofocus="autofocus"/>
</p>
<button class="primary" type="submit">Sign in</button>
{{hiddenInputs}}
</form>
</div>
</body>
</html>
""";
private static final String HIDDEN_HTML_INPUT_TEMPLATE = """
<input name="{{name}}" type="hidden" value="{{value}}" />
""";
}
| DefaultOneTimeTokenSubmitPageGeneratingFilter |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SalesforceEndpointBuilderFactory.java | {
"start": 159466,
"end": 167623
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final SalesforceHeaderNameBuilder INSTANCE = new SalesforceHeaderNameBuilder();
/**
* The Streaming API replayId.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceReplayId}.
*/
public String salesforceReplayId() {
return "CamelSalesforceReplayId";
}
/**
* The change event schema.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceChangeEventSchema}.
*/
public String salesforceChangeEventSchema() {
return "CamelSalesforceChangeEventSchema";
}
/**
* The event type.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceEventType}.
*/
public String salesforceEventType() {
return "CamelSalesforceEventType";
}
/**
* The commit timestamp.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceCommitTimestamp}.
*/
public String salesforceCommitTimestamp() {
return "CamelSalesforceCommitTimestamp";
}
/**
* The commit user.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceCommitUser}.
*/
public String salesforceCommitUser() {
return "CamelSalesforceCommitUser";
}
/**
* The commit number.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceCommitNumber}.
*/
public String salesforceCommitNumber() {
return "CamelSalesforceCommitNumber";
}
/**
* The record ids.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceRecordIds}.
*/
public String salesforceRecordIds() {
return "CamelSalesforceRecordIds";
}
/**
* The change type.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceChangeType}.
*/
public String salesforceChangeType() {
return "CamelSalesforceChangeType";
}
/**
* The change origin.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceChangeOrigin}.
*/
public String salesforceChangeOrigin() {
return "CamelSalesforceChangeOrigin";
}
/**
* The transaction key.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceTransactionKey}.
*/
public String salesforceTransactionKey() {
return "CamelSalesforceTransactionKey";
}
/**
* The sequence number.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceSequenceNumber}.
*/
public String salesforceSequenceNumber() {
return "CamelSalesforceSequenceNumber";
}
/**
* Is transaction end.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceIsTransactionEnd}.
*/
public String salesforceIsTransactionEnd() {
return "CamelSalesforceIsTransactionEnd";
}
/**
* The entity name.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceEntityName}.
*/
public String salesforceEntityName() {
return "CamelSalesforceEntityName";
}
/**
* The platform event schema.
*
* The option is a: {@code Object} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforcePlatformEventSchema}.
*/
public String salesforcePlatformEventSchema() {
return "CamelSalesforcePlatformEventSchema";
}
/**
* The created date.
*
* The option is a: {@code java.time.ZonedDateTime} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceCreatedDate}.
*/
public String salesforceCreatedDate() {
return "CamelSalesforceCreatedDate";
}
/**
* The topic name.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceTopicName}.
*/
public String salesforceTopicName() {
return "CamelSalesforceTopicName";
}
/**
* The channel.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceChannel}.
*/
public String salesforceChannel() {
return "CamelSalesforceChannel";
}
/**
* The client id.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforceClientId}.
*/
public String salesforceClientId() {
return "CamelSalesforceClientId";
}
/**
* The Pub/Sub API replayId.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforcePubSubReplayId}.
*/
public String salesforcePubSubReplayId() {
return "CamelSalesforcePubSubReplayId";
}
/**
* The Pub/Sub API event id.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforcePubSubEventId}.
*/
public String salesforcePubSubEventId() {
return "CamelSalesforcePubSubEventId";
}
/**
* The Pub/Sub API rpc id.
*
* The option is a: {@code String} type.
*
* Group: consumer
*
* @return the name of the header {@code SalesforcePubSubRpcId}.
*/
public String salesforcePubSubRpcId() {
return "CamelSalesforcePubSubRpcId";
}
/**
* Total number of records matching a query.
*
* The option is a: {@code int} type.
*
* Group: producer
*
* @return the name of the header {@code
* SalesforceQueryResultTotalSize}.
*/
public String salesforceQueryResultTotalSize() {
return "CamelSalesforceQueryResultTotalSize";
}
}
static SalesforceEndpointBuilder endpointBuilder(String componentName, String path) {
| SalesforceHeaderNameBuilder |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/FailOverNotCatchedExceptionTest.java | {
"start": 1362,
"end": 3761
} | class ____ extends ContextTestSupport {
protected MockEndpoint x;
protected MockEndpoint y;
protected MockEndpoint z;
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
x = getMockEndpoint("mock:x");
y = getMockEndpoint("mock:y");
z = getMockEndpoint("mock:z");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: e1
from("direct:start")
// here we will load balance if IOException was thrown
// any other kind of exception will result in the Exchange
// as failed
// to failover over any kind of exception we can just omit
// the exception
// in the failOver DSL
.loadBalance().failover(IOException.class).to("direct:x", "direct:y", "direct:z");
// END SNIPPET: e1
from("direct:x").to("mock:x").process(new Processor() {
public void process(Exchange exchange) throws Exception {
throw new SocketException("Forced");
}
});
from("direct:y").to("mock:y").process(new Processor() {
public void process(Exchange exchange) {
throw new IllegalArgumentException("Illegal");
}
});
from("direct:z").to("mock:z");
}
};
}
@Test
public void testExceptionNotCatched() throws Exception {
x.expectedMessageCount(1);
y.expectedMessageCount(1);
z.expectedMessageCount(0);
// to test that if a processor throw an exception that the failover
// loadbalancer
// do not catch then the exception is propagated back
try {
template.sendBody("direct:start", "Hello World");
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
assertEquals("Illegal", e.getCause().getMessage());
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
}
assertMockEndpointsSatisfied();
}
}
| FailOverNotCatchedExceptionTest |
java | apache__camel | components/camel-jdbc/src/test/java/org/apache/camel/component/jdbc/JdbcProducerConcurrenctTest.java | {
"start": 1319,
"end": 3302
} | class ____ extends AbstractJdbcTestSupport {
@EndpointInject("mock:result")
private MockEndpoint mock;
@Test
public void testNoConcurrentProducers() throws Exception {
doSendMessages(1, 1);
}
@Test
public void testConcurrentProducers() throws Exception {
doSendMessages(10, 5);
}
@SuppressWarnings("rawtypes")
private void doSendMessages(int files, int poolSize) throws Exception {
mock.expectedMessageCount(files);
ExecutorService executor = Executors.newFixedThreadPool(poolSize);
// we access the responses Map below only inside the main thread,
// so no need for a thread-safe Map implementation
Map<Integer, Future<List<?>>> responses = new HashMap<>();
for (int i = 0; i < files; i++) {
final int index = i;
Future<List<?>> out = executor.submit(new Callable<List<?>>() {
public List<?> call() {
int id = (index % 2) + 1;
return template.requestBody("direct:start", "select * from customer where id = 'cust" + id + "'",
List.class);
}
});
responses.put(index, out);
}
MockEndpoint.assertIsSatisfied(context);
assertEquals(files, responses.size());
for (int i = 0; i < files; i++) {
List<?> rows = responses.get(i).get();
Map columns = (Map) rows.get(0);
if (i % 2 == 0) {
assertEquals("jstrachan", columns.get("NAME"));
} else {
assertEquals("nsandhu", columns.get("NAME"));
}
}
executor.shutdownNow();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").to("jdbc:testdb").to("mock:result");
}
};
}
}
| JdbcProducerConcurrenctTest |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/TripleCustomerProtocolWrapper.java | {
"start": 1101,
"end": 2648
} | class ____ {
static int makeTag(int fieldNumber, int wireType) {
return fieldNumber << 3 | wireType;
}
public static byte[] varIntEncode(int val) {
byte[] data = new byte[varIntComputeLength(val)];
for (int i = 0; i < data.length - 1; i++) {
data[i] = (byte) ((val & 0x7F) | 0x80);
val = val >>> 7;
}
data[data.length - 1] = (byte) (val);
return data;
}
public static int varIntComputeLength(int val) {
if (val == 0) {
return 1;
}
int length = 0;
while (val != 0) {
val = val >>> 7;
length++;
}
return length;
}
public static int readRawVarint32(ByteBuffer byteBuffer) {
int val = 0;
int currentPosition = byteBuffer.position();
int varIntLength = 1;
byte currentByte = byteBuffer.get();
while ((currentByte & 0XF0) >> 7 == 1) {
varIntLength++;
currentByte = byteBuffer.get();
}
for (int index = currentPosition + varIntLength - 1; index >= currentPosition; index--) {
val = val << 7;
val = val | (byteBuffer.get(index) & 0x7F);
}
byteBuffer.position(currentPosition + varIntLength);
return val;
}
public static int extractFieldNumFromTag(int tag) {
return tag >> 3;
}
public static int extractWireTypeFromTag(int tag) {
return tag & 0X07;
}
public static final | TripleCustomerProtocolWrapper |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/custom/response/CompletionResponseParser.java | {
"start": 1023,
"end": 4328
} | class ____ extends BaseCustomResponseParser {
public static final String NAME = "completion_response_parser";
public static final String COMPLETION_PARSER_RESULT = "completion_result";
private final String completionResultPath;
public static CompletionResponseParser fromMap(
Map<String, Object> responseParserMap,
String scope,
ValidationException validationException
) {
var path = extractRequiredString(
responseParserMap,
COMPLETION_PARSER_RESULT,
String.join(".", scope, JSON_PARSER),
validationException
);
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
return new CompletionResponseParser(path);
}
public CompletionResponseParser(String completionResultPath) {
this.completionResultPath = Objects.requireNonNull(completionResultPath);
}
public CompletionResponseParser(StreamInput in) throws IOException {
this.completionResultPath = in.readString();
}
public void writeTo(StreamOutput out) throws IOException {
out.writeString(completionResultPath);
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(JSON_PARSER);
{
builder.field(COMPLETION_PARSER_RESULT, completionResultPath);
}
builder.endObject();
return builder;
}
String getCompletionResultPath() {
return completionResultPath;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CompletionResponseParser that = (CompletionResponseParser) o;
return Objects.equals(completionResultPath, that.completionResultPath);
}
@Override
public int hashCode() {
return Objects.hash(completionResultPath);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public ChatCompletionResults transform(Map<String, Object> map) {
var result = MapPathExtractor.extract(map, completionResultPath);
var extractedField = result.extractedObject();
validateNonNull(extractedField, completionResultPath);
if (extractedField instanceof List<?> extractedList) {
var completionList = castList(extractedList, (obj, fieldName) -> toType(obj, String.class, fieldName), completionResultPath);
return new ChatCompletionResults(completionList.stream().map(ChatCompletionResults.Result::new).toList());
} else if (extractedField instanceof String extractedString) {
return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(extractedString)));
} else {
throw new IllegalArgumentException(
Strings.format(
"Extracted field [%s] from path [%s] is an invalid type, expected a list or a string but received [%s]",
result.getArrayFieldName(0),
completionResultPath,
extractedField.getClass().getSimpleName()
)
);
}
}
}
| CompletionResponseParser |
java | google__dagger | javatests/dagger/internal/codegen/ComponentProcessorTest.java | {
"start": 23579,
"end": 24017
} | class ____ {",
" @Inject SomeInjectableType(SimpleComponent component) {}",
"}");
Source componentFile =
CompilerTests.javaSource(
"test.SimpleComponent",
"package test;",
"",
"import dagger.Component;",
"import dagger.Lazy;",
"import javax.inject.Provider;",
"",
"@Component",
" | SomeInjectableType |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/MyApplicationException.java | {
"start": 852,
"end": 1159
} | class ____ extends Exception {
private static final long serialVersionUID = 1L;
private final int code;
public MyApplicationException(String message, int code) {
super(message);
this.code = code;
}
public int getCode() {
return code;
}
}
| MyApplicationException |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestAuthenticationSessionCookie.java | {
"start": 3191,
"end": 6611
} | class ____ extends FilterInitializer {
@Override
public void initFilter(FilterContainer container, Configuration conf) {
container.addFilter("Dummy2Auth", Dummy2AuthenticationFilter.class
.getName(), new HashMap<>());
}
}
public void startServer(boolean isTestSessionCookie) throws Exception {
Configuration conf = new Configuration();
if (isTestSessionCookie) {
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
DummyFilterInitializer.class.getName());
} else {
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
Dummy2FilterInitializer.class.getName());
}
File base = new File(BASEDIR);
FileUtil.fullyDelete(base);
base.mkdirs();
keystoresDir = new File(BASEDIR).getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false);
Configuration sslConf = KeyStoreTestUtil.getSslConfig();
server = new HttpServer2.Builder()
.setName("test")
.addEndpoint(new URI("http://localhost"))
.addEndpoint(new URI("https://localhost"))
.setConf(conf)
.keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
.keyStore(sslConf.get("ssl.server.keystore.location"),
sslConf.get("ssl.server.keystore.password"),
sslConf.get("ssl.server.keystore.type", "jks"))
.trustStore(sslConf.get("ssl.server.truststore.location"),
sslConf.get("ssl.server.truststore.password"),
sslConf.get("ssl.server.truststore.type", "jks")).build();
server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class);
server.start();
}
@Test
public void testSessionCookie() throws IOException {
try {
startServer(true);
} catch (Exception e) {
// Auto-generated catch block
e.printStackTrace();
}
URL base = new URL("http://" + NetUtils.getHostPortString(server
.getConnectorAddress(0)));
HttpURLConnection conn = (HttpURLConnection) new URL(base,
"/echo").openConnection();
String header = conn.getHeaderField("Set-Cookie");
List<HttpCookie> cookies = HttpCookie.parse(header);
assertTrue(!cookies.isEmpty());
Log.getLog().info(header);
assertFalse(header.contains("; Expires="));
assertTrue("token".equals(cookies.get(0).getValue()));
}
@Test
public void testPersistentCookie() throws IOException {
try {
startServer(false);
} catch (Exception e) {
// Auto-generated catch block
e.printStackTrace();
}
URL base = new URL("http://" + NetUtils.getHostPortString(server
.getConnectorAddress(0)));
HttpURLConnection conn = (HttpURLConnection) new URL(base,
"/echo").openConnection();
String header = conn.getHeaderField("Set-Cookie");
List<HttpCookie> cookies = HttpCookie.parse(header);
assertTrue(!cookies.isEmpty());
Log.getLog().info(header);
assertTrue(header.contains("; Expires="));
assertTrue("token".equals(cookies.get(0).getValue()));
}
@AfterEach
public void cleanup() throws Exception {
server.stop();
FileUtil.fullyDelete(new File(BASEDIR));
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
}
}
| Dummy2FilterInitializer |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/extensions/ExtensionManager.java | {
"start": 395,
"end": 3523
} | interface ____ {
/**
* @return the {@link BuildTool} of this extension manager
*/
BuildTool getBuildTool();
/**
* Returns the list of the imported platforms in the current project.
*
* @return current list of imported platforms
* @throws IOException if a problem occurs while reading the project build file(s)
*/
Collection<ArtifactCoords> getInstalledPlatforms() throws IOException;
/**
* Read the build file(s) to get the list of installed extensions in this Quarkus project.
*
* @return The list of {@link ArtifactCoords} installed in the project build file(s).
* @throws IOException if a problem occurs while reading the project build file(s)
*/
Collection<ArtifactCoords> getInstalled() throws IOException;
/**
* Read build file(s) to check if an extension is installed in this Quarkus project.
*
* @param key the {@link ArtifactKey} of the extension to check
* @return true if it's installed
* @throws IOException if a problem occurs while reading the project build file(s)
*/
default boolean isInstalled(ArtifactKey key) throws IOException {
return getInstalled().stream().anyMatch(i -> Objects.equals(i.getKey(), key));
}
/**
* This is going to install/add all the specified extensions to the project build file(s).
*
* <pre>
* - Extensions which are already installed will ALWAYS be skipped whatever the specified version
* - The provided version will be used if it wasn't already installed
* </pre>
*
* @param coords the list of {@link ArtifactCoords} for the extensions to install
* @return the {@link InstallResult}
* @throws IOException if a problem occurs while reading/writing the project build file(s)
*/
InstallResult install(Collection<ArtifactCoords> coords) throws IOException;
/**
* This is going to install/add all the specified extensions to the project build file(s).
*
* <pre>
* - If the project Quarkus platform bom is not defined, an {@link IllegalStateException} will be thrown
* - Extensions which are already installed will ALWAYS be skipped whatever the specified version
* - The provided version will be used if wasn't already installed
* </pre>
*
* @param request the list of {@link ArtifactCoords} for the extensions to install
* @return the {@link InstallResult}
* @throws IOException if a problem occurs while reading/writing the project build file(s)
*/
InstallResult install(ExtensionInstallPlan request) throws IOException;
/**
* This is going to uninstall/remove all the specified extensions from the project build file(s).
*
* This is ignoring the version
*
* @param keys the set of {@link ArtifactKey} for the extensions to uninstall
* @return the {@link InstallResult}
* @throws IOException if a problem occurs while reading/writing the project build file(s)
*/
UninstallResult uninstall(Collection<ArtifactKey> keys) throws IOException;
| ExtensionManager |
java | elastic__elasticsearch | build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/PomValidationTask.java | {
"start": 1169,
"end": 3897
} | class ____ extends PrecommitTask {
private final RegularFileProperty pomFile;
private boolean foundError;
@Inject
public PomValidationTask(ObjectFactory objects) {
pomFile = objects.fileProperty();
}
@InputFile
@PathSensitive(PathSensitivity.RELATIVE)
public RegularFileProperty getPomFile() {
return pomFile;
}
@TaskAction
public void checkPom() throws Exception {
try (FileReader fileReader = new FileReader(pomFile.getAsFile().get())) {
MavenXpp3Reader reader = new MavenXpp3Reader();
Model model = reader.read(fileReader);
validateString("groupId", model.getGroupId());
validateString("artifactId", model.getArtifactId());
validateString("version", model.getVersion());
validateString("name", model.getName());
validateString("description", model.getDescription());
validateString("url", model.getUrl());
validateCollection("licenses", model.getLicenses(), v -> {
validateString("licenses.name", v.getName());
validateString("licenses.url", v.getUrl());
});
validateCollection("developers", model.getDevelopers(), v -> {
validateString("developers.name", v.getName());
validateString("developers.url", v.getUrl());
});
validateNonNull("scm", model.getScm(), () -> validateString("scm.url", model.getScm().getUrl()));
}
if (foundError) {
throw new GradleException("Check failed for task '" + getPath() + "', see console log for details");
}
}
private void logError(String element, String message) {
foundError = true;
getLogger().error("{} {} in [{}]", element, message, pomFile.getAsFile().get());
}
private <T> void validateNonEmpty(String element, T value, Predicate<T> isEmpty) {
if (isEmpty.test(value)) {
logError(element, "is empty");
}
}
private <T> void validateNonNull(String element, T value, Runnable validator) {
if (value == null) {
logError(element, "is missing");
} else {
validator.run();
}
}
private void validateString(String element, String value) {
validateNonNull(element, value, () -> validateNonEmpty(element, value, String::isBlank));
}
private <T> void validateCollection(String element, Collection<T> value, Consumer<T> validator) {
validateNonNull(element, value, () -> {
validateNonEmpty(element, value, Collection::isEmpty);
value.forEach(validator);
});
}
}
| PomValidationTask |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/PathMatchingResourcePatternResolver.java | {
"start": 23348,
"end": 38786
} | class ____) and add each to the given set of resources in the form of
* a pointer to the root of the jar file content.
* @param result the set of resources to add jar roots to
* @since 4.3
*/
protected void addClassPathManifestEntries(Set<Resource> result) {
Set<ClassPathManifestEntry> entries = this.manifestEntriesCache;
if (entries == null) {
entries = getClassPathManifestEntries();
if (this.useCaches == null || this.useCaches) {
this.manifestEntriesCache = entries;
}
}
for (ClassPathManifestEntry entry : entries) {
if (!result.contains(entry.resource()) &&
(entry.alternative() != null && !result.contains(entry.alternative()))) {
result.add(entry.resource());
}
}
}
private Set<ClassPathManifestEntry> getClassPathManifestEntries() {
Set<ClassPathManifestEntry> manifestEntries = new LinkedHashSet<>();
Set<File> seen = new HashSet<>();
try {
String paths = System.getProperty("java.class.path");
for (String path : StringUtils.delimitedListToStringArray(paths, File.pathSeparator)) {
try {
File jar = new File(path).getAbsoluteFile();
if (jar.isFile() && seen.add(jar)) {
manifestEntries.add(ClassPathManifestEntry.of(jar, this.useCaches));
manifestEntries.addAll(getClassPathManifestEntriesFromJar(jar));
}
}
catch (MalformedURLException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Cannot search for matching files underneath [" + path +
"] because it cannot be converted to a valid 'jar:' URL: " + ex.getMessage());
}
}
}
return Collections.unmodifiableSet(manifestEntries);
}
catch (Exception ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to evaluate 'java.class.path' manifest entries: " + ex);
}
return Collections.emptySet();
}
}
private Set<ClassPathManifestEntry> getClassPathManifestEntriesFromJar(File jar) throws IOException {
URL base = jar.toURI().toURL();
File parent = jar.getAbsoluteFile().getParentFile();
try (JarFile jarFile = new JarFile(jar)) {
Manifest manifest = jarFile.getManifest();
Attributes attributes = (manifest != null ? manifest.getMainAttributes() : null);
String classPath = (attributes != null ? attributes.getValue(Name.CLASS_PATH) : null);
Set<ClassPathManifestEntry> manifestEntries = new LinkedHashSet<>();
if (StringUtils.hasLength(classPath)) {
StringTokenizer tokenizer = new StringTokenizer(classPath);
while (tokenizer.hasMoreTokens()) {
String path = tokenizer.nextToken();
if (path.indexOf(':') >= 0 && !"file".equalsIgnoreCase(new URL(base, path).getProtocol())) {
// See jdk.internal.loader.URLClassPath.JarLoader.tryResolveFile(URL, String)
continue;
}
// Handle absolute paths correctly: do not apply parent to absolute paths.
File pathFile = new File(path);
File candidate = (pathFile.isAbsolute() ? pathFile : new File(parent, path));
// For relative paths, enforce security check: must be under parent.
// For absolute paths, just verify file exists (matching JVM behavior).
if (candidate.isFile() && (pathFile.isAbsolute() ||
candidate.getCanonicalPath().contains(parent.getCanonicalPath()))) {
manifestEntries.add(ClassPathManifestEntry.of(candidate, this.useCaches));
}
}
}
return Collections.unmodifiableSet(manifestEntries);
}
catch (Exception ex) {
if (logger.isDebugEnabled()) {
logger.debug("Failed to load manifest entries from jar file '" + jar + "': " + ex);
}
return Collections.emptySet();
}
}
/**
* Find all resources that match the given location pattern via the Ant-style
* {@link #getPathMatcher() PathMatcher}.
* <p>Supports resources in OSGi bundles, JBoss VFS, jar files, zip files,
* and file systems.
* @param locationPattern the location pattern to match
* @return the result as Resource array
* @throws IOException in case of I/O errors
* @see #determineRootDir(String)
* @see #resolveRootDirResource(Resource)
* @see #isJarResource(Resource)
* @see #doFindPathMatchingJarResources(Resource, URL, String)
* @see #doFindPathMatchingFileResources(Resource, String)
* @see org.springframework.util.PathMatcher
*/
protected Resource[] findPathMatchingResources(String locationPattern) throws IOException {
String rootDirPath = determineRootDir(locationPattern);
String subPattern = locationPattern.substring(rootDirPath.length());
// Look for pre-cached root dir resources, either a direct match or
// a match for a parent directory in the same classpath locations.
Resource[] rootDirResources = this.rootDirCache.get(rootDirPath);
String actualRootPath = null;
if (rootDirResources == null) {
// No direct match -> search for a common parent directory match
// (cached based on repeated searches in the same base location,
// in particular for different root directories in the same jar).
String commonPrefix = null;
String existingPath = null;
boolean commonUnique = true;
for (String path : this.rootDirCache.keySet()) {
String currentPrefix = null;
for (int i = 0; i < path.length(); i++) {
if (i == rootDirPath.length() || path.charAt(i) != rootDirPath.charAt(i)) {
currentPrefix = path.substring(0, path.lastIndexOf('/', i - 1) + 1);
break;
}
}
if (currentPrefix != null) {
if (checkPathWithinPackage(path.substring(currentPrefix.length()))) {
// A prefix match found, potentially to be turned into a common parent cache entry.
if (commonPrefix == null || !commonUnique || currentPrefix.length() > commonPrefix.length()) {
commonPrefix = currentPrefix;
existingPath = path;
}
else if (currentPrefix.equals(commonPrefix)) {
commonUnique = false;
}
}
}
else if (actualRootPath == null || path.length() > actualRootPath.length()) {
// A direct match found for a parent directory -> use it.
rootDirResources = this.rootDirCache.get(path);
actualRootPath = path;
}
}
if (rootDirResources == null && StringUtils.hasLength(commonPrefix)) {
// Try common parent directory as long as it points to the same classpath locations.
rootDirResources = getResources(commonPrefix);
Resource[] existingResources = this.rootDirCache.get(existingPath);
if (existingResources != null && rootDirResources.length == existingResources.length) {
// Replace existing subdirectory cache entry with common parent directory,
// avoiding repeated determination of root directories in the same jar.
this.rootDirCache.remove(existingPath);
this.rootDirCache.put(commonPrefix, rootDirResources);
actualRootPath = commonPrefix;
}
else if (commonPrefix.equals(rootDirPath)) {
// The identified common directory is equal to the currently requested path ->
// worth caching specifically, even if it cannot replace the existing sub-entry.
this.rootDirCache.put(rootDirPath, rootDirResources);
}
else {
// Mismatch: parent directory points to more classpath locations.
rootDirResources = null;
}
}
if (rootDirResources == null) {
// Lookup for specific directory, creating a cache entry for it.
rootDirResources = getResources(rootDirPath);
if (this.useCaches == null || this.useCaches) {
this.rootDirCache.put(rootDirPath, rootDirResources);
}
}
}
Set<Resource> result = new LinkedHashSet<>(64);
for (Resource rootDirResource : rootDirResources) {
if (actualRootPath != null && actualRootPath.length() < rootDirPath.length()) {
// Create sub-resource for requested sub-location from cached common root directory.
rootDirResource = rootDirResource.createRelative(rootDirPath.substring(actualRootPath.length()));
}
rootDirResource = resolveRootDirResource(rootDirResource);
URL rootDirUrl = rootDirResource.getURL();
if (equinoxResolveMethod != null && rootDirUrl.getProtocol().startsWith("bundle")) {
URL resolvedUrl = (URL) ReflectionUtils.invokeMethod(equinoxResolveMethod, null, rootDirUrl);
if (resolvedUrl != null) {
rootDirUrl = resolvedUrl;
}
UrlResource urlResource = new UrlResource(rootDirUrl);
if (this.useCaches != null) {
urlResource.setUseCaches(this.useCaches);
}
rootDirResource = urlResource;
}
if (rootDirUrl.getProtocol().startsWith(ResourceUtils.URL_PROTOCOL_VFS)) {
result.addAll(VfsResourceMatchingDelegate.findMatchingResources(rootDirUrl, subPattern, getPathMatcher()));
}
else if (ResourceUtils.isJarURL(rootDirUrl) || isJarResource(rootDirResource)) {
result.addAll(doFindPathMatchingJarResources(rootDirResource, rootDirUrl, subPattern));
}
else {
result.addAll(doFindPathMatchingFileResources(rootDirResource, subPattern));
}
}
if (logger.isTraceEnabled()) {
logger.trace("Resolved location pattern [" + locationPattern + "] to resources " + result);
}
return result.toArray(EMPTY_RESOURCE_ARRAY);
}
/**
* Determine the root directory for the given location.
* <p>Used for determining the starting point for file matching, resolving the
* root directory location to be passed into {@link #getResources(String)},
* with the remainder of the location to be used as the sub pattern.
* <p>Will return "/WEB-INF/" for the location "/WEB-INF/*.xml", for example.
* @param location the location to check
* @return the part of the location that denotes the root directory
* @see #findPathMatchingResources(String)
*/
protected String determineRootDir(String location) {
int prefixEnd = location.indexOf(':') + 1;
int rootDirEnd = location.length();
while (rootDirEnd > prefixEnd && getPathMatcher().isPattern(location.substring(prefixEnd, rootDirEnd))) {
rootDirEnd = location.lastIndexOf('/', rootDirEnd - 2) + 1;
}
if (rootDirEnd == 0) {
rootDirEnd = prefixEnd;
}
return location.substring(0, rootDirEnd);
}
/**
* Resolve the supplied root directory resource for path matching.
* <p>By default, {@link #findPathMatchingResources(String)} resolves Equinox
* OSGi "bundleresource:" and "bundleentry:" URLs into standard jar file URLs
* that will be traversed using Spring's standard jar file traversal algorithm.
* <p>For any custom resolution, override this template method and replace the
* supplied resource handle accordingly.
* <p>The default implementation of this method returns the supplied resource
* unmodified.
* @param original the resource to resolve
* @return the resolved resource (may be identical to the supplied resource)
* @throws IOException in case of resolution failure
* @see #findPathMatchingResources(String)
*/
protected Resource resolveRootDirResource(Resource original) throws IOException {
return original;
}
/**
* Determine if the given resource handle indicates a jar resource that the
* {@link #doFindPathMatchingJarResources} method can handle.
* <p>{@link #findPathMatchingResources(String)} delegates to
* {@link ResourceUtils#isJarURL(URL)} to determine whether the given URL
* points to a resource in a jar file, and only invokes this method as a fallback.
* <p>This template method therefore allows for detecting further kinds of
* jar-like resources — for example, via {@code instanceof} checks on
* the resource handle type.
* <p>The default implementation of this method returns {@code false}.
* @param resource the resource handle to check (usually the root directory
* to start path matching from)
* @return {@code true} if the given resource handle indicates a jar resource
* @throws IOException in case of I/O errors
* @see #findPathMatchingResources(String)
* @see #doFindPathMatchingJarResources(Resource, URL, String)
* @see org.springframework.util.ResourceUtils#isJarURL
*/
protected boolean isJarResource(Resource resource) throws IOException {
return false;
}
/**
* Find all resources in jar files that match the given location pattern
* via the Ant-style {@link #getPathMatcher() PathMatcher}.
* @param rootDirResource the root directory as Resource
* @param rootDirUrl the pre-resolved root directory URL
* @param subPattern the sub pattern to match (below the root directory)
* @return a mutable Set of matching Resource instances
* @throws IOException in case of I/O errors
* @since 4.3
* @see java.net.JarURLConnection
* @see org.springframework.util.PathMatcher
*/
protected Set<Resource> doFindPathMatchingJarResources(Resource rootDirResource, URL rootDirUrl, String subPattern)
throws IOException {
String jarFileUrl = null;
String rootEntryPath = "";
String urlFile = rootDirUrl.getFile();
int separatorIndex = urlFile.indexOf(ResourceUtils.WAR_URL_SEPARATOR);
if (separatorIndex == -1) {
separatorIndex = urlFile.indexOf(ResourceUtils.JAR_URL_SEPARATOR);
}
if (separatorIndex >= 0) {
jarFileUrl = urlFile.substring(0, separatorIndex);
rootEntryPath = urlFile.substring(separatorIndex + 2); // both separators are 2 chars
NavigableSet<String> entriesCache = this.jarEntriesCache.get(jarFileUrl);
if (entriesCache != null) {
Set<Resource> result = new LinkedHashSet<>(64);
// Clean root entry path to match jar entries format without "!" separators
rootEntryPath = rootEntryPath.replace(ResourceUtils.JAR_URL_SEPARATOR, "/");
// Search sorted entries from first entry with rootEntryPath prefix
boolean rootEntryPathFound = false;
for (String entryPath : entriesCache.tailSet(rootEntryPath, false)) {
if (!entryPath.startsWith(rootEntryPath)) {
// We are beyond the potential matches in the current TreeSet.
break;
}
rootEntryPathFound = true;
String relativePath = entryPath.substring(rootEntryPath.length());
if (getPathMatcher().match(subPattern, relativePath)) {
result.add(rootDirResource.createRelative(relativePath));
}
}
if (rootEntryPathFound) {
return result;
}
}
}
URLConnection con = rootDirUrl.openConnection();
JarFile jarFile;
boolean closeJarFile;
if (con instanceof JarURLConnection jarCon) {
// Should usually be the case for traditional JAR files.
if (this.useCaches != null) {
jarCon.setUseCaches(this.useCaches);
}
try {
jarFile = jarCon.getJarFile();
jarFileUrl = jarCon.getJarFileURL().toExternalForm();
JarEntry jarEntry = jarCon.getJarEntry();
rootEntryPath = (jarEntry != null ? jarEntry.getName() : "");
closeJarFile = !jarCon.getUseCaches();
}
catch (ZipException | FileNotFoundException | NoSuchFileException ex) {
// Happens in case of a non-jar file or in case of a cached root directory
// without the specific subdirectory present, respectively.
return Collections.emptySet();
}
}
else {
// No JarURLConnection -> need to resort to URL file parsing.
// We'll assume URLs of the format "jar:path!/entry", with the protocol
// being arbitrary as long as following the entry format.
// We'll also handle paths with and without leading "file:" prefix.
try {
if (jarFileUrl != null) {
jarFile = getJarFile(jarFileUrl);
}
else {
jarFile = new JarFile(urlFile);
jarFileUrl = urlFile;
rootEntryPath = "";
}
closeJarFile = true;
}
catch (ZipException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Skipping invalid jar | loader |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authorization/method/SecuredAuthorizationManagerTests.java | {
"start": 9496,
"end": 9931
} | class ____ extends TestClass implements TargetClassAware, InterfaceLevelAnnotations {
@Override
public Class<?> getTargetClass() {
return TestClass.class;
}
@Override
public void doSomething() {
super.doSomething();
}
@Override
public void securedUserOrAdmin() {
super.securedUserOrAdmin();
}
@Override
public void inheritedAnnotations() {
super.inheritedAnnotations();
}
}
}
| TestTargetClassAware |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/jta/OneToManyJtaSessionClosedBeforeCommitTest.java | {
"start": 1404,
"end": 2777
} | class ____ {
private static final Integer ENTITY_ID = 1;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) throws Exception {
TestingJtaPlatformImpl.INSTANCE.getTransactionManager().begin();
var entityManager = scope.getEntityManagerFactory().createEntityManager();
try {
SetRefEdEntity edEntity = new SetRefEdEntity( 2, "edEntity" );
entityManager.persist( edEntity );
SetRefIngEntity ingEntity = new SetRefIngEntity( ENTITY_ID, "ingEntity" );
Set<SetRefIngEntity> sries = new HashSet<>();
sries.add( ingEntity );
ingEntity.setReference( edEntity );
edEntity.setReffering( sries );
entityManager.persist( ingEntity );
entityManager.flush();
}
finally {
entityManager.close();
TestingJtaPlatformImpl.tryCommit();
}
}
@Test
public void testRevisionCounts(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> assertEquals(
Arrays.asList( 1 ),
AuditReaderFactory.get( entityManager ).getRevisions( SetRefIngEntity.class, ENTITY_ID )
) );
}
@Test
public void testRevisionHistory(EntityManagerFactoryScope scope) {
scope.inEntityManager( entityManager -> assertEquals(
new SetRefIngEntity( ENTITY_ID, "ingEntity" ),
AuditReaderFactory.get( entityManager ).find( SetRefIngEntity.class, ENTITY_ID, 1 )
) );
}
}
| OneToManyJtaSessionClosedBeforeCommitTest |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java | {
"start": 2049,
"end": 5886
} | class ____ extends SqlCall {
/** Use this operator only if you don't have a better one. */
private static final SqlOperator OPERATOR =
new SqlSpecialOperator("SqlTableConstraint", SqlKind.OTHER);
private final SqlIdentifier constraintName;
private final SqlLiteral uniqueSpec;
private final SqlNodeList columns;
private final SqlLiteral enforcement;
// Whether this is a table constraint, currently it is only used for SQL unparse.
private final boolean isTableConstraint;
/**
* Creates a table constraint node.
*
* @param constraintName Constraint name
* @param uniqueSpec Unique specification
* @param columns Column list on which the constraint enforces or null if this is a column
* constraint
* @param enforcement Whether the constraint is enforced
* @param isTableConstraint Whether this is a table constraint
* @param pos Parser position
*/
public SqlTableConstraint(
@Nullable SqlIdentifier constraintName,
SqlLiteral uniqueSpec,
SqlNodeList columns,
@Nullable SqlLiteral enforcement,
boolean isTableConstraint,
SqlParserPos pos) {
super(pos);
this.constraintName = constraintName;
this.uniqueSpec = uniqueSpec;
this.columns = columns;
this.enforcement = enforcement;
this.isTableConstraint = isTableConstraint;
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
/** Returns whether the constraint is UNIQUE. */
public boolean isUnique() {
return this.uniqueSpec.getValueAs(SqlUniqueSpec.class) == SqlUniqueSpec.UNIQUE;
}
/** Returns whether the constraint is PRIMARY KEY. */
public boolean isPrimaryKey() {
return this.uniqueSpec.getValueAs(SqlUniqueSpec.class) == SqlUniqueSpec.PRIMARY_KEY;
}
/** Returns whether the constraint is enforced. */
public boolean isEnforced() {
// Default is enforced.
return this.enforcement == null
|| this.enforcement.getValueAs(SqlConstraintEnforcement.class)
== SqlConstraintEnforcement.ENFORCED;
}
public Optional<String> getConstraintName() {
String ret = constraintName != null ? constraintName.getSimple() : null;
return Optional.ofNullable(ret);
}
public Optional<SqlIdentifier> getConstraintNameIdentifier() {
return Optional.ofNullable(constraintName);
}
public SqlNodeList getColumns() {
return columns;
}
public boolean isTableConstraint() {
return isTableConstraint;
}
/** Returns the columns as a string array. */
public String[] getColumnNames() {
return columns.getList().stream()
.map(col -> ((SqlIdentifier) col).getSimple())
.toArray(String[]::new);
}
@Override
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(constraintName, uniqueSpec, columns, enforcement);
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
if (this.constraintName != null) {
writer.keyword("CONSTRAINT");
this.constraintName.unparse(writer, leftPrec, rightPrec);
}
this.uniqueSpec.unparse(writer, leftPrec, rightPrec);
if (isTableConstraint) {
SqlWriter.Frame frame = writer.startList("(", ")");
for (SqlNode column : this.columns) {
writer.sep(",", false);
column.unparse(writer, leftPrec, rightPrec);
}
writer.endList(frame);
}
if (this.enforcement != null) {
this.enforcement.unparse(writer, leftPrec, rightPrec);
}
}
}
| SqlTableConstraint |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/node/JsonNodeDeepCopyTest.java | {
"start": 307,
"end": 2606
} | class ____ extends DatabindTestUtil
{
private final ObjectMapper mapper = new ObjectMapper();
@Test
public void testWithObjectSimple()
{
ObjectNode root = mapper.createObjectNode();
root.put("a", 3);
assertEquals(1, root.size());
ObjectNode copy = root.deepCopy();
assertEquals(1, copy.size());
// adding to root won't change copy:
root.put("b", 7);
assertEquals(2, root.size());
assertEquals(1, copy.size());
// nor vice versa
copy.put("c", 3);
assertEquals(2, root.size());
assertEquals(2, copy.size());
}
@Test
public void testWithArraySimple()
{
ArrayNode root = mapper.createArrayNode();
root.add("a");
assertEquals(1, root.size());
ArrayNode copy = root.deepCopy();
assertEquals(1, copy.size());
// adding to root won't change copy:
root.add( 7);
assertEquals(2, root.size());
assertEquals(1, copy.size());
// nor vice versa
copy.add(3);
assertEquals(2, root.size());
assertEquals(2, copy.size());
}
@Test
public void testWithNested()
{
ObjectNode root = mapper.createObjectNode();
ObjectNode leafObject = root.putObject("ob");
ArrayNode leafArray = root.putArray("arr");
assertEquals(2, root.size());
leafObject.put("a", 3);
assertEquals(1, leafObject.size());
leafArray.add(true);
assertEquals(1, leafArray.size());
ObjectNode copy = root.deepCopy();
assertNotSame(copy, root);
assertEquals(2, copy.size());
// should be detached, once again
leafObject.put("x", 9);
assertEquals(2, leafObject.size());
assertEquals(1, copy.get("ob").size());
leafArray.add("foobar");
assertEquals(2, leafArray.size());
assertEquals(1, copy.get("arr").size());
// nor vice versa
((ObjectNode) copy.get("ob")).put("c", 3);
assertEquals(2, leafObject.size());
assertEquals(2, copy.get("ob").size());
((ArrayNode) copy.get("arr")).add(13);
assertEquals(2, leafArray.size());
assertEquals(2, copy.get("arr").size());
}
}
| JsonNodeDeepCopyTest |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/Receiver.java | {
"start": 3075,
"end": 14033
} | class ____ implements DataTransferProtocol {
private final Tracer tracer;
protected DataInputStream in;
protected Receiver(Tracer tracer) {
this.tracer = tracer;
}
/** Initialize a receiver for DataTransferProtocol with a socket. */
protected void initialize(final DataInputStream in) {
this.in = in;
}
/** Read an Op. It also checks protocol version. */
protected final Op readOp() throws IOException {
final short version = in.readShort();
if (version != DataTransferProtocol.DATA_TRANSFER_VERSION) {
throw new IOException( "Version Mismatch (Expected: " +
DataTransferProtocol.DATA_TRANSFER_VERSION +
", Received: " + version + " )");
}
return Op.read(in);
}
private TraceScope continueTraceSpan(ByteString spanContextBytes,
String description) {
TraceScope scope = null;
SpanContext spanContext =
TraceUtils.byteStringToSpanContext(spanContextBytes);
if (spanContext != null) {
scope = tracer.newScope(description, spanContext);
}
return scope;
}
private TraceScope continueTraceSpan(ClientOperationHeaderProto header,
String description) {
return continueTraceSpan(header.getBaseHeader(), description);
}
private TraceScope continueTraceSpan(BaseHeaderProto header,
String description) {
return continueTraceSpan(header.getTraceInfo().getSpanContext(),
description);
}
/** Process op by the corresponding method. */
protected final void processOp(Op op) throws IOException {
switch(op) {
case READ_BLOCK:
opReadBlock();
break;
case WRITE_BLOCK:
opWriteBlock(in);
break;
case REPLACE_BLOCK:
opReplaceBlock(in);
break;
case COPY_BLOCK:
opCopyBlock(in);
break;
case BLOCK_CHECKSUM:
opBlockChecksum(in);
break;
case BLOCK_GROUP_CHECKSUM:
opStripedBlockChecksum(in);
break;
case TRANSFER_BLOCK:
opTransferBlock(in);
break;
case REQUEST_SHORT_CIRCUIT_FDS:
opRequestShortCircuitFds(in);
break;
case RELEASE_SHORT_CIRCUIT_FDS:
opReleaseShortCircuitFds(in);
break;
case REQUEST_SHORT_CIRCUIT_SHM:
opRequestShortCircuitShm(in);
break;
default:
throw new IOException("Unknown op " + op + " in data stream");
}
}
static private CachingStrategy getCachingStrategy(CachingStrategyProto strategy) {
Boolean dropBehind = strategy.hasDropBehind() ?
strategy.getDropBehind() : null;
Long readahead = strategy.hasReadahead() ?
strategy.getReadahead() : null;
return new CachingStrategy(dropBehind, readahead);
}
/** Receive OP_READ_BLOCK */
private void opReadBlock() throws IOException {
OpReadBlockProto proto = OpReadBlockProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
readBlock(PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock()),
PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()),
proto.getHeader().getClientName(),
proto.getOffset(),
proto.getLen(),
proto.getSendChecksums(),
(proto.hasCachingStrategy() ?
getCachingStrategy(proto.getCachingStrategy()) :
CachingStrategy.newDefaultStrategy()));
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive OP_WRITE_BLOCK */
private void opWriteBlock(DataInputStream in) throws IOException {
final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in));
final DatanodeInfo[] targets = PBHelperClient.convert(proto.getTargetsList());
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
writeBlock(PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock()),
PBHelperClient.convertStorageType(proto.getStorageType()),
PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()),
proto.getHeader().getClientName(),
targets,
PBHelperClient.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length),
PBHelperClient.convert(proto.getSource()),
fromProto(proto.getStage()),
proto.getPipelineSize(),
proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(),
proto.getLatestGenerationStamp(),
fromProto(proto.getRequestedChecksum()),
(proto.hasCachingStrategy() ?
getCachingStrategy(proto.getCachingStrategy()) :
CachingStrategy.newDefaultStrategy()),
(proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false),
(proto.hasPinning() ? proto.getPinning(): false),
(PBHelperClient.convertBooleanList(proto.getTargetPinningsList())),
proto.getStorageId(),
proto.getTargetStorageIdsList().toArray(new String[0]));
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive {@link Op#TRANSFER_BLOCK} */
private void opTransferBlock(DataInputStream in) throws IOException {
final OpTransferBlockProto proto =
OpTransferBlockProto.parseFrom(vintPrefixed(in));
final DatanodeInfo[] targets = PBHelperClient.convert(proto.getTargetsList());
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
final ExtendedBlock block =
PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock());
final StorageType[] targetStorageTypes =
PBHelperClient.convertStorageTypes(proto.getTargetStorageTypesList(),
targets.length);
transferBlock(block,
PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()),
proto.getHeader().getClientName(),
targets,
targetStorageTypes,
proto.getTargetStorageIdsList().toArray(new String[0])
);
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive {@link Op#REQUEST_SHORT_CIRCUIT_FDS} */
private void opRequestShortCircuitFds(DataInputStream in) throws IOException {
final OpRequestShortCircuitAccessProto proto =
OpRequestShortCircuitAccessProto.parseFrom(vintPrefixed(in));
SlotId slotId = (proto.hasSlotId()) ?
PBHelperClient.convert(proto.getSlotId()) : null;
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
requestShortCircuitFds(PBHelperClient.convert(proto.getHeader().getBlock()),
PBHelperClient.convert(proto.getHeader().getToken()),
slotId, proto.getMaxVersion(),
proto.getSupportsReceiptVerification());
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive {@link Op#RELEASE_SHORT_CIRCUIT_FDS} */
private void opReleaseShortCircuitFds(DataInputStream in)
throws IOException {
final ReleaseShortCircuitAccessRequestProto proto =
ReleaseShortCircuitAccessRequestProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(
proto.getTraceInfo().getSpanContext(),
proto.getClass().getSimpleName());
try {
releaseShortCircuitFds(PBHelperClient.convert(proto.getSlotId()));
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive {@link Op#REQUEST_SHORT_CIRCUIT_SHM} */
private void opRequestShortCircuitShm(DataInputStream in) throws IOException {
final ShortCircuitShmRequestProto proto =
ShortCircuitShmRequestProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(
proto.getTraceInfo().getSpanContext(),
proto.getClass().getSimpleName());
try {
requestShortCircuitShm(proto.getClientName());
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive OP_REPLACE_BLOCK */
private void opReplaceBlock(DataInputStream in) throws IOException {
OpReplaceBlockProto proto = OpReplaceBlockProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
replaceBlock(PBHelperClient.convert(proto.getHeader().getBlock()),
PBHelperClient.convertStorageType(proto.getStorageType()),
PBHelperClient.convert(proto.getHeader().getToken()),
proto.getDelHint(),
PBHelperClient.convert(proto.getSource()),
proto.getStorageId());
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive OP_COPY_BLOCK */
private void opCopyBlock(DataInputStream in) throws IOException {
OpCopyBlockProto proto = OpCopyBlockProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
copyBlock(PBHelperClient.convert(proto.getHeader().getBlock()),
PBHelperClient.convert(proto.getHeader().getToken()));
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive OP_BLOCK_CHECKSUM */
private void opBlockChecksum(DataInputStream in) throws IOException {
OpBlockChecksumProto proto = OpBlockChecksumProto.parseFrom(vintPrefixed(in));
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
try {
blockChecksum(PBHelperClient.convert(proto.getHeader().getBlock()),
PBHelperClient.convert(proto.getHeader().getToken()),
PBHelperClient.convert(proto.getBlockChecksumOptions()));
} finally {
if (traceScope != null) traceScope.close();
}
}
/** Receive OP_STRIPED_BLOCK_CHECKSUM. */
private void opStripedBlockChecksum(DataInputStream dis) throws IOException {
OpBlockGroupChecksumProto proto =
OpBlockGroupChecksumProto.parseFrom(vintPrefixed(dis));
TraceScope traceScope = continueTraceSpan(proto.getHeader(),
proto.getClass().getSimpleName());
StripedBlockInfo stripedBlockInfo = new StripedBlockInfo(
PBHelperClient.convert(proto.getHeader().getBlock()),
PBHelperClient.convert(proto.getDatanodes()),
PBHelperClient.convertTokens(proto.getBlockTokensList()),
PBHelperClient.convertBlockIndices(proto.getBlockIndicesList()),
PBHelperClient.convertErasureCodingPolicy(proto.getEcPolicy())
);
try {
blockGroupChecksum(stripedBlockInfo,
PBHelperClient.convert(proto.getHeader().getToken()),
proto.getRequestedNumBytes(),
PBHelperClient.convert(proto.getBlockChecksumOptions()));
} finally {
if (traceScope != null) {
traceScope.close();
}
}
}
}
| Receiver |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/layout/Encoder.java | {
"start": 1151,
"end": 1499
} | interface ____<T> {
/**
* Encodes the specified source object to some binary representation and writes the result to the specified
* destination.
*
* @param source the object to encode.
* @param destination holds the ByteBuffer to write into.
*/
void encode(T source, ByteBufferDestination destination);
}
| Encoder |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/ParameterizedClass.java | {
"start": 11258,
"end": 12383
} | class ____ implement
* {@link AutoCloseable} will be closed after their corresponding
* invocation.
*
* <p>Defaults to {@code true}.
*
* <p><strong>WARNING</strong>: if an argument that implements
* {@code AutoCloseable} is reused for multiple invocations of the same
* parameterized class, you must set {@code autoCloseArguments} to
* {@code false} to ensure that the argument is not closed between
* invocations.
*
* @see java.lang.AutoCloseable
*/
boolean autoCloseArguments() default true;
/**
* Configure whether zero invocations are allowed for this
* parameterized class.
*
* <p>Set this attribute to {@code true} if the absence of invocations is
* expected in some cases and should not cause a test failure.
*
* <p>Defaults to {@code false}.
*/
boolean allowZeroInvocations() default false;
/**
* Configure how the number of arguments provided by an
* {@link ArgumentsSource} are validated.
*
* <p>Defaults to {@link ArgumentCountValidationMode#DEFAULT}.
*
* <p>When an {@link ArgumentsSource} provides more arguments than declared
* by the parameterized | that |
java | google__guava | android/guava/src/com/google/common/collect/Maps.java | {
"start": 121285,
"end": 122347
} | class ____ extends Maps.KeySet<K, V> {
KeySet() {
super(FilteredEntryMap.this);
}
@Override
public boolean remove(@Nullable Object o) {
if (containsKey(o)) {
unfiltered.remove(o);
return true;
}
return false;
}
@Override
public boolean removeAll(Collection<?> collection) {
return removeAllKeys(unfiltered, predicate, collection);
}
@Override
public boolean retainAll(Collection<?> collection) {
return retainAllKeys(unfiltered, predicate, collection);
}
@Override
public @Nullable Object[] toArray() {
// creating an ArrayList so filtering happens once
return Lists.newArrayList(iterator()).toArray();
}
@Override
@SuppressWarnings("nullness") // b/192354773 in our checker affects toArray declarations
public <T extends @Nullable Object> T[] toArray(T[] array) {
return Lists.newArrayList(iterator()).toArray(array);
}
}
}
private static final | KeySet |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/GreetingsFrom.java | {
"start": 784,
"end": 907
} | class ____ {
public String groot(String name) {
return "(%s) - I am Groot!".formatted(name);
}
}
| GreetingsFrom |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/ability/control/ServerAbilityControlManager.java | {
"start": 1405,
"end": 3674
} | class ____ extends AbstractAbilityControlManager {
public ServerAbilityControlManager() {
}
@Override
protected Map<AbilityMode, Map<AbilityKey, Boolean>> initCurrentNodeAbilities() {
// init client abilities
Map<AbilityMode, Map<AbilityKey, Boolean>> res = new HashMap<>(2);
res.put(AbilityMode.CLUSTER_CLIENT, initClusterClientAbilities());
res.put(AbilityMode.SDK_CLIENT, initSdkClientAbilities());
// init server abilities
// static abilities
Map<AbilityKey, Boolean> staticAbilities = ServerAbilities.getStaticAbilities();
// all function server can support
Set<AbilityKey> abilityKeys = staticAbilities.keySet();
Map<AbilityKey, Boolean> abilityTable = new HashMap<>(abilityKeys.size());
// if not define in config, then load from ServerAbilities
Set<AbilityKey> unIncludedInConfig = new HashSet<>();
abilityKeys.forEach(abilityKey -> {
String key = AbilityConfigs.PREFIX + abilityKey.getName();
try {
Boolean property = EnvUtil.getProperty(key, Boolean.class);
// if not null
if (property != null) {
abilityTable.put(abilityKey, property);
} else {
unIncludedInConfig.add(abilityKey);
}
} catch (Exception e) {
// from ServerAbilities
unIncludedInConfig.add(abilityKey);
}
});
// load from ServerAbilities
unIncludedInConfig.forEach(abilityKey -> abilityTable.put(abilityKey, staticAbilities.get(abilityKey)));
res.put(AbilityMode.SERVER, abilityTable);
return res;
}
/**
* init cluster client abilities.
*/
private Map<AbilityKey, Boolean> initClusterClientAbilities() {
// static abilities
return ClusterClientAbilities.getStaticAbilities();
}
/**
* init sdk client abilities.
*/
private Map<AbilityKey, Boolean> initSdkClientAbilities() {
// static abilities
return SdkClientAbilities.getStaticAbilities();
}
@Override
public int getPriority() {
return 1;
}
}
| ServerAbilityControlManager |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java | {
"start": 15018,
"end": 15557
} | class ____ the following system properties (passed with -Dkey=value to the application)
* <ul>
* <li>-D{@value #TESTS_ENABLE_MOCK_MODULES} - a boolean value to enable or disable mock modules. This is
* useful to test the system without asserting modules that to make sure they don't hide any bugs in production.</li>
* <li> - a random seed used to initialize the index random context.
* </ul>
*/
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // doesn't work with potential multi data path from test cluster yet
public abstract | supports |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/retry/RetryTemplateTests.java | {
"start": 15161,
"end": 15438
} | class ____ extends FileNotFoundException {
}
/**
* Custom {@link RuntimeException} that implements {@link #equals(Object)}
* and {@link #hashCode()} for use in assertions that check for equality.
*/
@SuppressWarnings("serial")
private static | CustomFileNotFoundException |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java | {
"start": 2020,
"end": 2083
} | class ____ the methods of the SimulatedFSDataset.
*/
public | tests |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/datasource/ReactiveSortedSetCommandsImpl.java | {
"start": 970,
"end": 14515
} | class ____<K, V> extends AbstractSortedSetCommands<K, V>
implements ReactiveSortedSetCommands<K, V> {
private final ReactiveRedisDataSource reactive;
public ReactiveSortedSetCommandsImpl(ReactiveRedisDataSourceImpl redis, Type k, Type v) {
super(redis, k, v);
this.reactive = redis;
}
@Override
public ReactiveRedisDataSource getDataSource() {
return reactive;
}
@Override
public Uni<Boolean> zadd(K key, double score, V value) {
return zadd(key, new ZAddArgs(), score, value);
}
@Override
public Uni<Integer> zadd(K key, Map<V, Double> items) {
return zadd(key, new ZAddArgs(), items);
}
@Override
public Uni<Integer> zadd(K key, ScoredValue<V>... items) {
return zadd(key, new ZAddArgs(), items);
}
@Override
public Uni<Boolean> zadd(K key, ZAddArgs args, double score, V value) {
return super._zadd(key, args, score, value)
.map(this::decodeIntAsBoolean);
}
@Override
public Uni<Integer> zadd(K key, ZAddArgs args, Map<V, Double> items) {
return super._zadd(key, args, items)
.map(Response::toInteger);
}
@Override
public Uni<Integer> zadd(K key, ZAddArgs args, ScoredValue<V>... items) {
return super._zadd(key, args, items)
.map(Response::toInteger);
}
@Override
public Uni<Double> zaddincr(K key, double score, V value) {
return zaddincr(key, new ZAddArgs(), score, value);
}
@Override
public Uni<Double> zaddincr(K key, ZAddArgs args, double score, V value) {
return super._zaddincr(key, args, score, value)
.map(this::decodeAsDouble);
}
@Override
public Uni<Long> zcard(K key) {
return super._zcard(key)
.map(this::decodeLongOrZero);
}
@Override
public Uni<Long> zcount(K key, ScoreRange<Double> range) {
return super._zcount(key, range)
.map(Response::toLong);
}
@Override
public Uni<List<V>> zdiff(K... keys) {
return super._zdiff(keys)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<List<ScoredValue<V>>> zdiffWithScores(K... keys) {
return super._zdiffWithScores(keys)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<Long> zdiffstore(K destination, K... keys) {
return super._zdiffstore(destination, keys)
.map(Response::toLong);
}
@Override
public Uni<Double> zincrby(K key, double increment, V value) {
return super._zincrby(key, increment, value)
.map(Response::toDouble);
}
@Override
public Uni<List<V>> zinter(ZAggregateArgs args, K... keys) {
return super._zinter(args, keys)
.map(r -> marshaller.decodeAsList(r, typeOfValue));
}
@Override
public Uni<List<V>> zinter(K... keys) {
return zinter(DEFAULT_INSTANCE_AGG, keys);
}
@Override
public Uni<List<ScoredValue<V>>> zinterWithScores(ZAggregateArgs arguments, K... keys) {
return super._zinterWithScores(arguments, keys)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<List<ScoredValue<V>>> zinterWithScores(K... keys) {
return zinterWithScores(DEFAULT_INSTANCE_AGG, keys);
}
@Override
public Uni<Long> zintercard(K... keys) {
return super._zintercard(keys)
.map(Response::toLong);
}
@Override
public Uni<Long> zintercard(long limit, K... keys) {
return super._zintercard(limit, keys)
.map(Response::toLong);
}
@Override
public Uni<Long> zinterstore(K destination, ZAggregateArgs arguments, K... keys) {
return super._zinterstore(destination, arguments, keys)
.map(Response::toLong);
}
@SafeVarargs
@Override
public final Uni<Long> zinterstore(K destination, K... keys) {
return zinterstore(destination, DEFAULT_INSTANCE_AGG, keys);
}
@Override
public Uni<Long> zlexcount(K key, Range<String> range) {
return super._zlexcount(key, range)
.map(Response::toLong);
}
@Override
public Uni<ScoredValue<V>> zmpopMin(K... keys) {
return super._zmpopMin(keys)
.map(this::decodePopResponse);
}
@Override
public Uni<List<ScoredValue<V>>> zmpopMin(int count, K... keys) {
return super._zmpopMin(count, keys)
.map(this::decodePopResponseWithCount);
}
@Override
public Uni<ScoredValue<V>> zmpopMax(K... keys) {
return super._zmpopMax(keys)
.map(this::decodePopResponse);
}
@Override
public Uni<List<ScoredValue<V>>> zmpopMax(int count, K... keys) {
return super._zmpopMax(count, keys)
.map(this::decodePopResponseWithCount);
}
@Override
public Uni<ScoredValue<V>> bzmpopMin(Duration timeout, K... keys) {
return super._bzmpopMin(timeout, keys)
.map(this::decodePopResponse);
}
@Override
public Uni<List<ScoredValue<V>>> bzmpopMin(Duration timeout, int count, K... keys) {
return super._bzmpopMin(timeout, count, keys)
.map(this::decodePopResponseWithCount);
}
@Override
public Uni<ScoredValue<V>> bzmpopMax(Duration timeout, K... keys) {
return super._bzmpopMax(timeout, keys)
.map(this::decodePopResponse);
}
@Override
public Uni<List<ScoredValue<V>>> bzmpopMax(Duration timeout, int count, K... keys) {
return super._bzmpopMax(timeout, count, keys)
.map(this::decodePopResponseWithCount);
}
@Override
public Uni<List<Double>> zmscore(K key, V... values) {
return super._zmscore(key, values)
.map(this::decodeAsListOfDouble);
}
@Override
public Uni<ScoredValue<V>> zpopmax(K key) {
return super._zpopmax(key)
.map(this::decodeAsScoredValue);
}
@Override
public Uni<List<ScoredValue<V>>> zpopmax(K key, int count) {
return super._zpopmax(key, count)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<ScoredValue<V>> zpopmin(K key) {
return super._zpopmin(key)
.map(this::decodeAsScoredValue);
}
@Override
public Uni<List<ScoredValue<V>>> zpopmin(K key, int count) {
return super._zpopmin(key, count)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<V> zrandmember(K key) {
return super._zrandmember(key)
.map(this::decodeV);
}
@Override
public Uni<List<V>> zrandmember(K key, int count) {
return super._zrandmember(key, count)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<ScoredValue<V>> zrandmemberWithScores(K key) {
return super._zrandmemberWithScores(key)
.map(this::decodeAsScoredValueOrEmpty);
}
@Override
public Uni<List<ScoredValue<V>>> zrandmemberWithScores(K key, int count) {
return super._zrandmemberWithScores(key, count)
.map(this::decodeAsListOfScoredValues);
}
@SafeVarargs
@Override
public final Uni<KeyValue<K, ScoredValue<V>>> bzpopmin(Duration timeout, K... keys) {
return super._bzpopmin(timeout, keys)
.map(this::decodeAsKeyValue);
}
@SafeVarargs
@Override
public final Uni<KeyValue<K, ScoredValue<V>>> bzpopmax(Duration timeout, K... keys) {
return super._bzpopmax(timeout, keys)
.map(this::decodeAsKeyValue);
}
@Override
public Uni<List<V>> zrange(K key, long start, long stop, ZRangeArgs args) {
return super._zrange(key, start, stop, args)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<List<ScoredValue<V>>> zrangeWithScores(K key, long start, long stop, ZRangeArgs args) {
return super._zrangeWithScores(key, start, stop, args)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<List<V>> zrange(K key, long start, long stop) {
return zrange(key, start, stop, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<List<ScoredValue<V>>> zrangeWithScores(K key, long start, long stop) {
return zrangeWithScores(key, start, stop, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<List<V>> zrangebylex(K key, Range<String> range, ZRangeArgs args) {
return super._zrangebylex(key, range, args)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<List<V>> zrangebylex(K key, Range<String> range) {
return zrangebylex(key, range, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<List<V>> zrangebyscore(K key, ScoreRange<Double> range, ZRangeArgs args) {
return super._zrangebyscore(key, range, args)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, ScoreRange<Double> range, ZRangeArgs args) {
return super._zrangebyscoreWithScores(key, range, args)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<List<V>> zrangebyscore(K key, ScoreRange<Double> range) {
return zrangebyscore(key, range, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<List<ScoredValue<V>>> zrangebyscoreWithScores(K key, ScoreRange<Double> range) {
return zrangebyscoreWithScores(key, range, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<Long> zrangestore(K dst, K src, long min, long max, ZRangeArgs args) {
return super._zrangestore(dst, src, min, max, args)
.map(Response::toLong);
}
@Override
public Uni<Long> zrangestore(K dst, K src, long min, long max) {
return zrangestore(dst, src, min, max, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<Long> zrangestorebylex(K dst, K src, Range<String> range, ZRangeArgs args) {
return super._zrangestorebylex(dst, src, range, args)
.map(Response::toLong);
}
@Override
public Uni<Long> zrangestorebylex(K dst, K src, Range<String> range) {
return zrangestorebylex(dst, src, range, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<Long> zrangestorebyscore(K dst, K src, ScoreRange<Double> range, ZRangeArgs args) {
return super._zrangestorebyscore(dst, src, range, args)
.map(Response::toLong);
}
@Override
public Uni<Long> zrangestorebyscore(K dst, K src, ScoreRange<Double> range) {
return zrangestorebyscore(dst, src, range, DEFAULT_INSTANCE_RANGE);
}
@Override
public Uni<Long> zrank(K key, V value) {
return super._zrank(key, value)
.map(this::decodeAsLong);
}
@Override
public Uni<Integer> zrem(K key, V... values) {
return super._zrem(key, values)
.map(Response::toInteger);
}
@Override
public Uni<Long> zremrangebylex(K key, Range<String> range) {
return super._zremrangebylex(key, range)
.map(Response::toLong);
}
@Override
public Uni<Long> zremrangebyrank(K key, long start, long stop) {
return super._zremrangebyrank(key, start, stop)
.map(Response::toLong);
}
@Override
public Uni<Long> zremrangebyscore(K key, ScoreRange<Double> range) {
return super._zremrangebyscore(key, range)
.map(Response::toLong);
}
@Override
public Uni<Long> zrevrank(K key, V value) {
return super._zrevrank(key, value)
.map(this::decodeAsLong);
}
@Override
public ReactiveZScanCursor<V> zscan(K key) {
nonNull(key, "key");
return new ZScanReactiveCursorImpl<>(redis, key, marshaller, typeOfValue, Collections.emptyList());
}
@Override
public ReactiveZScanCursor<V> zscan(K key, ScanArgs args) {
nonNull(key, "key");
nonNull(args, "args");
return new ZScanReactiveCursorImpl<>(redis, key, marshaller, typeOfValue, args.toArgs());
}
@Override
public Uni<Double> zscore(K key, V value) {
return super._zscore(key, value)
.map(this::decodeAsDouble);
}
@Override
public Uni<List<V>> zunion(ZAggregateArgs args, K... keys) {
return super._zunion(args, keys)
.map(this::decodeAsListOfValues);
}
@Override
public Uni<List<V>> zunion(K... keys) {
return zunion(DEFAULT_INSTANCE_AGG, keys);
}
@Override
public Uni<List<ScoredValue<V>>> zunionWithScores(K... keys) {
return zunionWithScores(DEFAULT_INSTANCE_AGG, keys);
}
@Override
public Uni<List<ScoredValue<V>>> zunionWithScores(ZAggregateArgs args, K... keys) {
return super._zunionWithScores(args, keys)
.map(this::decodeAsListOfScoredValues);
}
@Override
public Uni<Long> zunionstore(K destination, ZAggregateArgs args, K... keys) {
return super._zunionstore(destination, args, keys)
.map(Response::toLong);
}
@Override
public Uni<Long> zunionstore(K destination, K... keys) {
return zunionstore(destination, DEFAULT_INSTANCE_AGG, keys);
}
}
| ReactiveSortedSetCommandsImpl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java | {
"start": 963,
"end": 2256
} | enum ____ implements Writeable {
INTERSECTS("intersects"),
DISJOINT("disjoint"),
WITHIN("within"),
CONTAINS("contains");
private final String relationName;
ShapeRelation(String relationName) {
this.relationName = relationName;
}
public static ShapeRelation readFromStream(StreamInput in) throws IOException {
return in.readEnum(ShapeRelation.class);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeEnum(this);
}
public static ShapeRelation getRelationByName(String name) {
name = name.toLowerCase(Locale.ENGLISH);
for (ShapeRelation relation : ShapeRelation.values()) {
if (relation.relationName.equals(name)) {
return relation;
}
}
return null;
}
/** Maps ShapeRelation to Lucene's LatLonShapeRelation */
public QueryRelation getLuceneRelation() {
return switch (this) {
case INTERSECTS -> QueryRelation.INTERSECTS;
case DISJOINT -> QueryRelation.DISJOINT;
case WITHIN -> QueryRelation.WITHIN;
case CONTAINS -> QueryRelation.CONTAINS;
};
}
public String getRelationName() {
return relationName;
}
}
| ShapeRelation |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/DateTruncEmulation.java | {
"start": 1757,
"end": 4905
} | class ____ extends AbstractSqmFunctionDescriptor implements FunctionRenderer {
protected final String toDateFunction;
protected DateTruncEmulation(String toDateFunction, TypeConfiguration typeConfiguration) {
super(
"trunc",
new ArgumentTypesValidator( StandardArgumentsValidators.exactly( 2 ), TEMPORAL, TEMPORAL_UNIT ),
StandardFunctionReturnTypeResolvers.useArgType( 1 ),
StandardFunctionArgumentTypeResolvers.invariant( typeConfiguration, TEMPORAL, TEMPORAL_UNIT )
);
this.toDateFunction = toDateFunction;
}
@Override
public void render(
SqlAppender sqlAppender,
List<? extends SqlAstNode> sqlAstArguments,
ReturnableType<?> returnType,
SqlAstTranslator<?> walker) {
sqlAppender.appendSql( toDateFunction );
sqlAppender.append( '(' );
sqlAstArguments.get( 1 ).accept( walker );
sqlAppender.append( ',' );
sqlAstArguments.get( 2 ).accept( walker );
sqlAppender.append( ')' );
}
@Override
protected <T> SelfRenderingSqmFunction<T> generateSqmFunctionExpression(
List<? extends SqmTypedNode<?>> arguments,
ReturnableType<T> impliedResultType,
QueryEngine queryEngine) {
final NodeBuilder nodeBuilder = queryEngine.getCriteriaBuilder();
final TemporalUnit temporalUnit = ( (SqmExtractUnit<?>) arguments.get( 1 ) ).getUnit();
final String pattern;
final String literal;
switch ( temporalUnit ) {
case YEAR:
pattern = "yyyy";
literal = "-01-01 00:00:00";
break;
case MONTH:
pattern = "yyyy-MM";
literal = "-01 00:00:00";
break;
case DAY:
pattern = "yyyy-MM-dd";
literal = " 00:00:00";
break;
case HOUR:
pattern = "yyyy-MM-dd HH";
literal = ":00:00";
break;
case MINUTE:
pattern = "yyyy-MM-dd HH:mm";
literal = ":00";
break;
case SECOND:
pattern = "yyyy-MM-dd HH:mm:ss";
literal = null;
break;
default:
throw new UnsupportedOperationException( "Temporal unit not supported [" + temporalUnit + "]" );
}
final var datetime = arguments.get( 0 );
final var formatExpression =
queryEngine.getSqmFunctionRegistry()
.getFunctionDescriptor( "format" )
.generateSqmExpression(
asList(
datetime,
new SqmFormat( pattern, nodeBuilder.getStringType(), nodeBuilder )
),
null,
queryEngine
);
final SqmExpression<?> formattedDatetime;
if ( literal != null ) {
formattedDatetime =
queryEngine.getSqmFunctionRegistry()
.getFunctionDescriptor( "concat" )
.generateSqmExpression(
asList(
formatExpression,
new SqmLiteral<>( literal, nodeBuilder.getStringType(), nodeBuilder )
),
null,
queryEngine
);
}
else {
formattedDatetime = formatExpression;
}
return new SelfRenderingSqmFunction<>(
this,
this,
// the first argument is needed for SybaseDateTruncEmulation
asList( datetime, formattedDatetime, new SqmFormat( "yyyy-MM-dd HH:mm:ss", nodeBuilder.getStringType(), nodeBuilder ) ),
impliedResultType,
null,
getReturnTypeResolver(),
nodeBuilder,
getName()
);
}
}
| DateTruncEmulation |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/direct/DirectEndpointRouteInlinedTest.java | {
"start": 1154,
"end": 2480
} | class ____ extends ContextTestSupport {
@Test
public void testDirect() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:result");
}
});
context.start();
// invoke start a 2nd time wont break stuff
context.start();
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testDirectExistingExists() {
FailedToStartRouteException e = assertThrows(FailedToStartRouteException.class,
() -> {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:result");
from("direct:start").to("mock:bar");
}
});
}, "Should have thrown exception");
assertTrue(e.getMessage().matches(
"Failed to start route: route[0-9]+ because: Multiple consumers for the same endpoint is not allowed: direct://start"));
}
}
| DirectEndpointRouteInlinedTest |
java | netty__netty | common/src/main/java/io/netty/util/internal/CleanerJava25.java | {
"start": 1113,
"end": 10752
} | class ____ implements Cleaner {
private static final InternalLogger logger;
private static final MethodHandle INVOKE_ALLOCATOR;
static {
boolean suitableJavaVersion;
if (System.getProperty("org.graalvm.nativeimage.imagecode") != null) {
// native image supports this since 25, but we don't use PlatformDependent0 here, since
// we need to initialize CleanerJava25 at build time.
String v = System.getProperty("java.specification.version");
try {
suitableJavaVersion = Integer.parseInt(v) >= 25;
} catch (NumberFormatException e) {
suitableJavaVersion = false;
}
// also need to prevent initializing the logger at build time
logger = null;
} else {
// Only attempt to use MemorySegments on Java 25 or greater, because of the following JDK bugs:
// - https://bugs.openjdk.org/browse/JDK-8357145
// - https://bugs.openjdk.org/browse/JDK-8357268
suitableJavaVersion = PlatformDependent0.javaVersion() >= 25;
logger = InternalLoggerFactory.getInstance(CleanerJava25.class);
}
MethodHandle method;
Throwable error;
if (suitableJavaVersion) {
try {
// Here we compose and construct a MethodHandle that takes an 'int' capacity argument,
// and produces a 'CleanableDirectBufferImpl' instance.
// The method handle will create a new shared Arena instance, allocate a MemorySegment from it,
// convert the MemorySegment to a ByteBuffer and a memory address, and then pass both the Arena,
// the ByteBuffer, and the memory address to the CleanableDirectBufferImpl constructor,
// returning the resulting object.
//
// Effectively, we are recreating the following the Java code through MethodHandles alone:
//
// Arena arena = Arena.ofShared();
// MemorySegment segment = arena.allocate(size);
// return new CleanableDirectBufferImpl(
// (AutoCloseable) arena,
// segment.asByteBuffer(),
// segment.address());
//
// First, we need the types we'll use to set this all up.
Class<?> arenaCls = Class.forName("java.lang.foreign.Arena");
Class<?> memsegCls = Class.forName("java.lang.foreign.MemorySegment");
Class<CleanableDirectBufferImpl> bufCls = CleanableDirectBufferImpl.class;
// Acquire the private look up, so we can access the package-private 'CleanableDirectBufferImpl'
// constructor.
MethodHandles.Lookup lookup = MethodHandles.lookup();
// ofShared.type() = ()Arena
MethodHandle ofShared = lookup.findStatic(arenaCls, "ofShared", methodType(arenaCls));
// Try to access shared Arena which might fail on GraalVM 25.0.0 if not enabled
// See https://github.com/netty/netty/issues/15762
Object shared = ofShared.invoke();
((AutoCloseable) shared).close();
// allocate.type() = (Arena,long)MemorySegment
MethodHandle allocate = lookup.findVirtual(arenaCls, "allocate", methodType(memsegCls, long.class));
// asByteBuffer.type() = (MemorySegment)ByteBuffer
MethodHandle asByteBuffer = lookup.findVirtual(memsegCls, "asByteBuffer", methodType(ByteBuffer.class));
// address.type() = (MemorySegment)long
MethodHandle address = lookup.findVirtual(memsegCls, "address", methodType(long.class));
// bufClsCtor.type() = (AutoCloseable,ByteBuffer,long)CleanableDirectBufferImpl
MethodHandle bufClsCtor = lookup.findConstructor(bufCls,
methodType(void.class, AutoCloseable.class, ByteBuffer.class, long.class));
// The 'allocate' method takes a 'long' capacity, but we'll be providing an 'int'.
// Explicitly cast the 'long' to 'int' so we can use 'invokeExact'.
// allocateInt.type() = (Arena,int)MemorySegment
MethodHandle allocateInt = MethodHandles.explicitCastArguments(allocate,
methodType(memsegCls, arenaCls, int.class));
// Use the 'asByteBuffer' and 'address' methods as a filter, to transform the constructor into a method
// that takes two MemorySegment arguments instead of a ByteBuffer and a long argument.
// ctorArenaMemsegMemseg.type() = (Arena,MemorySegment,MemorySegment)CleanableDirectBufferImpl
MethodHandle ctorArenaMemsegMemseg = MethodHandles.explicitCastArguments(
MethodHandles.filterArguments(bufClsCtor, 1, asByteBuffer, address),
methodType(bufCls, arenaCls, memsegCls, memsegCls));
// Our method now takes two MemorySegment arguments, but we actually only want to pass one.
// Specifically, we want to get both the ByteBuffer and the memory address from the same MemorySegment
// instance.
// We permute the argument array such that the first MemorySegment argument gest passed to both
// parameters, and then the second parameter value gets ignored.
// ctorArenaMemsegNull.type() = (Arena,MemorySegment,MemorySegment)CleanableDirectBufferImpl
MethodHandle ctorArenaMemsegNull = MethodHandles.permuteArguments(ctorArenaMemsegMemseg,
methodType(bufCls, arenaCls, memsegCls, memsegCls), 0, 1, 1);
// With the second MemorySegment argument ignored, we can statically bind it to 'null' to effectively
// drop it from our parameter list.
MethodHandle ctorArenaMemseg = MethodHandles.insertArguments(
ctorArenaMemsegNull, 2, new Object[]{null});
// Use the 'allocateInt' method to transform the last MemorySegment argument of the constructor,
// into an (Arena,int) argument pair.
// ctorArenaArenaInt.type() = (Arena,Arena,int)CleanableDirectBufferImpl
MethodHandle ctorArenaArenaInt = MethodHandles.collectArguments(ctorArenaMemseg, 1, allocateInt);
// Our method now takes two Arena arguments, but we actually only want to pass one. Specifically, it's
// very important that it's the same arena we use for both allocation and deallocation.
// We permute the argument array such that the first Arena argument gets passed to both parameters,
// and the second parameter value gets ignored.
// ctorArenaNullInt.type() = (Arena,Arena,int)CleanableDirectBufferImpl
MethodHandle ctorArenaNullInt = MethodHandles.permuteArguments(ctorArenaArenaInt,
methodType(bufCls, arenaCls, arenaCls, int.class), 0, 0, 2);
// With the second Arena parameter value ignored, we can statically bind it to 'null' to effectively
// drop it from our parameter list.
// ctorArenaInt.type() = (Arena,int)CleanableDirectBufferImpl
MethodHandle ctorArenaInt = MethodHandles.insertArguments(ctorArenaNullInt, 1, new Object[]{null});
// Now we just need to create our Arena instance. We fold the Arena parameter into the 'ofShared'
// static method, so we effectively bind the argument to the result of calling that method.
// Since 'ofShared' takes no further parameters, we effectively eliminate the first parameter.
// This creates our method handle that takes an 'int' and returns a 'CleanableDirectBufferImpl'.
// ctorInt.type() = (int)CleanableDirectBufferImpl
method = MethodHandles.foldArguments(ctorArenaInt, ofShared);
error = null;
} catch (Throwable throwable) {
method = null;
error = throwable;
}
} else {
method = null;
error = new UnsupportedOperationException("java.lang.foreign.MemorySegment unavailable");
}
if (logger != null) {
if (error == null) {
logger.debug("java.nio.ByteBuffer.cleaner(): available");
} else {
logger.debug("java.nio.ByteBuffer.cleaner(): unavailable", error);
}
}
INVOKE_ALLOCATOR = method;
}
static boolean isSupported() {
return INVOKE_ALLOCATOR != null;
}
@SuppressWarnings("OverlyStrongTypeCast") // The cast is needed for 'invokeExact' semantics.
@Override
public CleanableDirectBuffer allocate(int capacity) {
try {
return (CleanableDirectBufferImpl) INVOKE_ALLOCATOR.invokeExact(capacity);
} catch (RuntimeException e) {
throw e; // Propagate the runtime exceptions that the Arena would normally throw.
} catch (Throwable e) {
throw new IllegalStateException("Unexpected allocation exception", e);
}
}
@Override
public void freeDirectBuffer(ByteBuffer buffer) {
throw new UnsupportedOperationException("Cannot clean arbitrary ByteBuffer instances");
}
private static final | CleanerJava25 |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/nogc/OpenHashStringMap.java | {
"start": 25142,
"end": 30088
} | class ____ {
private HashCommon() {}
/** 2<sup>32</sup> · φ, φ = (√5 − 1)/2. */
private static final int INT_PHI = 0x9E3779B9;
/** The reciprocal of {@link #INT_PHI} modulo 2<sup>32</sup>. */
private static final int INV_INT_PHI = 0x144cbc89;
/** Avalanches the bits of an integer by applying the finalisation step of MurmurHash3.
*
* <p>This method implements the finalisation step of Austin Appleby's
* <a href="http://code.google.com/p/smhasher/">MurmurHash3</a>.
* Its purpose is to avalanche the bits of the argument to within 0.25% bias.
*
* @param x an integer.
* @return a hash value with good avalanching properties.
*/
public static int murmurHash3(final int x) {
int h = x;
h ^= h >>> 16;
h *= 0x85ebca6b;
h ^= h >>> 13;
h *= 0xc2b2ae35;
h ^= h >>> 16;
return h;
}
/**
* Quickly mixes the bits of an integer.
*
* <p>This method mixes the bits of the argument by multiplying by the golden ratio and
* xorshifting the result. It is borrowed from <a href="https://github.com/OpenHFT/Koloboke">Koloboke</a>, and
* it has slightly worse behaviour than {@link #murmurHash3(int)} (in open-addressing hash tables the average
* number of probes is slightly larger), but it's much faster.
*
* @param x an integer.
* @return a hash value obtained by mixing the bits of {@code x}.
* @see #invMix(int)
*/
public static int mix(final int x) {
final int h = x * INT_PHI;
return h ^ (h >>> 16);
}
/** The inverse of {@link #mix(int)}. This method is mainly useful to create unit tests.
*
* @param x an integer.
* @return a value that passed through {@link #mix(int)} would give {@code x}.
*/
public static int invMix(final int x) {
return (x ^ x >>> 16) * INV_INT_PHI;
}
/** Return the least power of two greater than or equal to the specified value.
*
* <p>Note that this function will return 1 when the argument is 0.
*
* @param x an integer smaller than or equal to 2<sup>30</sup>.
* @return the least power of two greater than or equal to the specified value.
*/
public static int nextPowerOfTwo(final int x) {
if (x == 0) {
return 1;
}
int r = x;
r--;
r |= r >> 1;
r |= r >> 2;
r |= r >> 4;
r |= r >> 8;
return (r | r >> 16) + 1;
}
/** Return the least power of two greater than or equal to the specified value.
*
* <p>Note that this function will return 1 when the argument is 0.
*
* @param x a long integer smaller than or equal to 2<sup>62</sup>.
* @return the least power of two greater than or equal to the specified value.
*/
public static long nextPowerOfTwo(final long x) {
if (x == 0) {
return 1;
}
long r = x;
r--;
r |= r >> 1;
r |= r >> 2;
r |= r >> 4;
r |= r >> 8;
r |= r >> 16;
return (r | r >> 32) + 1;
}
/** Returns the maximum number of entries that can be filled before rehashing.
*
* @param n the size of the backing array.
* @param f the load factor.
* @return the maximum number of entries before rehashing.
*/
public static int maxFill(final int n, final float f) {
/* We must guarantee that there is always at least
* one free entry (even with pathological load factors). */
return Math.min((int) Math.ceil(n * f), n - 1);
}
/**
* Returns the least power of two smaller than or equal to 2<sup>30</sup> and larger than or equal to
* <code>Math.ceil( expected / f )</code>.
*
* @param expected the expected number of elements in a hash table.
* @param f the load factor.
* @return the minimum possible size for a backing array.
* @throws IllegalArgumentException if the necessary size is larger than 2<sup>30</sup>.
*/
public static int arraySize(final int expected, final float f) {
final long result = Math.max(2, nextPowerOfTwo((long) Math.ceil(expected / f)));
if (result > (1 << 30)) {
throw new IllegalArgumentException(
"Too large (" + expected + " expected elements with load factor " + f + ")");
}
return (int) result;
}
}
}
| HashCommon |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ssl/NoSuchSslBundleException.java | {
"start": 892,
"end": 1811
} | class ____ extends RuntimeException {
private final String bundleName;
/**
* Create a new {@code SslBundleNotFoundException} instance.
* @param bundleName the name of the bundle that could not be found
* @param message the exception message
*/
public NoSuchSslBundleException(String bundleName, String message) {
this(bundleName, message, null);
}
/**
* Create a new {@code SslBundleNotFoundException} instance.
* @param bundleName the name of the bundle that could not be found
* @param message the exception message
* @param cause the exception cause
*/
public NoSuchSslBundleException(String bundleName, String message, @Nullable Throwable cause) {
super(message, cause);
this.bundleName = bundleName;
}
/**
* Return the name of the bundle that was not found.
* @return the bundle name
*/
public String getBundleName() {
return this.bundleName;
}
}
| NoSuchSslBundleException |
java | netty__netty | transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollSocketDataReadInitialStateTest.java | {
"start": 914,
"end": 1200
} | class ____ extends SocketDataReadInitialStateTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
return EpollSocketTestPermutation.INSTANCE.socketWithFastOpen();
}
}
| EpollSocketDataReadInitialStateTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/spi/JsonGeneratingVisitor.java | {
"start": 1449,
"end": 1508
} | class ____ serialize managed type values to JSON.
*/
public | to |
java | apache__camel | components/camel-box/camel-box-component/src/test/java/org/apache/camel/component/box/BoxFilesManagerIT.java | {
"start": 1982,
"end": 2187
} | class ____ {@link BoxFilesManager} APIs.
*/
@EnabledIf(value = "org.apache.camel.component.box.AbstractBoxITSupport#hasCredentials",
disabledReason = "Box credentials were not provided")
public | for |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java | {
"start": 13346,
"end": 15543
} | class ____ extends WithOrdinals {
protected final IndexOrdinalsFieldData indexFieldData;
public FieldData(IndexOrdinalsFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
final LeafOrdinalsFieldData atomicFieldData = indexFieldData.load(context);
return atomicFieldData.getBytesValues();
}
@Override
public SortedSetDocValues ordinalsValues(LeafReaderContext context) {
final LeafOrdinalsFieldData atomicFieldData = indexFieldData.load(context);
return atomicFieldData.getOrdinalsValues();
}
@Override
public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) {
final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader) context.parent.reader());
final LeafOrdinalsFieldData atomicFieldData = global.load(context);
return atomicFieldData.getOrdinalsValues();
}
@Override
public boolean supportsGlobalOrdinalsMapping() {
return indexFieldData.supportsGlobalOrdinalsMapping();
}
@Override
public LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) throws IOException {
final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader) context.parent.reader());
final OrdinalMap map = global.getOrdinalMap();
if (map == null) {
// segments and global ordinals are the same
return LongUnaryOperator.identity();
}
final org.apache.lucene.util.LongValues segmentToGlobalOrd = map.getGlobalOrds(context.ord);
return segmentToGlobalOrd::get;
}
}
}
public static | FieldData |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldHaveCauseExactlyInstance_create_Test.java | {
"start": 943,
"end": 2478
} | class ____ {
@Test
void should_create_error_message_for_no_cause() {
// GIVEN
Throwable actual = new RuntimeException();
Throwable expected = new IllegalStateException();
// WHEN
String message = shouldHaveCauseExactlyInstance(actual, expected.getClass()).create();
// THEN
then(message).isEqualTo("%nExpecting a throwable with cause being exactly an instance of:%n" +
" %s%n" +
"but current throwable has no cause." +
"%nThrowable that failed the check:%n%s", expected, getStackTrace(actual));
}
@Test
void should_create_error_message_for_wrong_cause() {
// GIVEN
Throwable expected = new IllegalStateException();
Throwable cause = new IllegalArgumentException("oops...% %s %n");
Throwable actual = new RuntimeException(cause);
// WHEN
String message = shouldHaveCauseExactlyInstance(actual, expected.getClass()).create();
// THEN
then(message).isEqualTo(format("%nExpecting a throwable with cause being exactly an instance of:%n" +
" java.lang.IllegalStateException%n" +
"but was an instance of:%n" +
" java.lang.IllegalArgumentException%n" +
"Throwable that failed the check:%n" +
"%n" +
"%s", getStackTrace(actual)));
}
}
| ShouldHaveCauseExactlyInstance_create_Test |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/creators/TestCreators2.java | {
"start": 2424,
"end": 2708
} | class ____ {
@JsonCreator
public BeanFor438(@JsonProperty("name") String s) {
throw new IllegalArgumentException("I don't like that name!");
}
}
// For [JACKSON-470]: should be appropriately detected, reported error about
static | BeanFor438 |
java | quarkusio__quarkus | extensions/scheduler/deployment/src/test/java/io/quarkus/scheduler/test/InvalidDelayedExpressionTest.java | {
"start": 276,
"end": 790
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest test = new QuarkusUnitTest()
.assertException(t -> {
assertThat(t).cause().isInstanceOf(IllegalStateException.class)
.hasMessageContaining("Invalid delayed() expression");
})
.withApplicationRoot((jar) -> jar
.addClasses(InvalidBean.class));
@Test
public void test() throws InterruptedException {
}
static | InvalidDelayedExpressionTest |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/hook/CheckForbiddenHook.java | {
"start": 916,
"end": 1060
} | interface ____ {
String hookName();
void checkForbidden(final CheckForbiddenContext context) throws MQClientException;
}
| CheckForbiddenHook |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/issues/ExceptionThrownFromOnExceptionTest.java | {
"start": 1362,
"end": 16201
} | class ____ extends ContextTestSupport {
private static final AtomicInteger RETRY = new AtomicInteger();
private static final AtomicInteger ON_EXCEPTION_RETRY = new AtomicInteger();
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testExceptionThrownFromOnException() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
ON_EXCEPTION_RETRY.incrementAndGet();
throw new IOException("Some other IOException");
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
CamelExecutionException e
= assertThrows(CamelExecutionException.class, () -> template.sendBody("direct:start", "Hello World"),
"Should have thrown an exception");
IOException cause = assertIsInstanceOf(IOException.class, e.getCause());
assertEquals("Some other IOException", cause.getMessage());
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
@Test
public void testExceptionThrownFromOnExceptionAndHandled() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3)
// this time we handle the exception
.handled(true).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
ON_EXCEPTION_RETRY.incrementAndGet();
throw new IOException("Some other IOException");
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
CamelExecutionException e
= assertThrows(CamelExecutionException.class, () -> template.sendBody("direct:start", "Hello World"),
"Should have thrown an exception");
IOException cause = assertIsInstanceOf(IOException.class, e.getCause());
assertEquals("Some other IOException", cause.getMessage());
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
@Test
public void testExceptionThrownFromOnExceptionWithDeadLetterChannel() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// DLC
deadLetterChannel("mock:error").redeliveryDelay(0).maximumRedeliveries(3);
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
ON_EXCEPTION_RETRY.incrementAndGet();
throw new IOException("Some other IOException");
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
// the error will not be handled by DLC since we had an onException, and
// that failed,
// so the exchange will throw an exception
getMockEndpoint("mock:error").expectedMessageCount(0);
CamelExecutionException e = assertThrows(CamelExecutionException.class,
() -> template.sendBody("direct:start", "Hello World"),
"Should have thrown an exception");
IOException cause = assertIsInstanceOf(IOException.class, e.getCause());
assertEquals("Some other IOException", cause.getMessage());
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
@Test
public void testExceptionThrownFromOnExceptionAndHandledWithDeadLetterChannel() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// DLC
deadLetterChannel("mock:error").redeliveryDelay(0).maximumRedeliveries(3);
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3)
// this time we handle the exception
.handled(true).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
ON_EXCEPTION_RETRY.incrementAndGet();
throw new IOException("Some other IOException");
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
// the error will not be handled by DLC since we had an onException, and
// that failed,
// so the exchange will throw an exception
getMockEndpoint("mock:error").expectedMessageCount(0);
CamelExecutionException e = assertThrows(CamelExecutionException.class,
() -> template.sendBody("direct:start", "Hello World"),
"Should have thrown an exception");
IOException cause = assertIsInstanceOf(IOException.class, e.getCause());
assertEquals("Some other IOException", cause.getMessage());
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
@Test
public void testNoExceptionThrownFromOnExceptionWithDeadLetterChannel() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// DLC
deadLetterChannel("mock:error").redeliveryDelay(0).maximumRedeliveries(3);
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) {
ON_EXCEPTION_RETRY.incrementAndGet();
// no exception is thrown this time
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
// the exception is handled by the onException and thus not the DLC
getMockEndpoint("mock:error").expectedMessageCount(0);
// and this time there was no exception thrown from onException,
// but the caller still fails since handled is false on onException
CamelExecutionException e = assertThrows(CamelExecutionException.class,
() -> template.sendBody("direct:start", "Hello World"),
"Should have thrown an exception");
IOException cause = assertIsInstanceOf(IOException.class, e.getCause());
assertEquals("IO error", cause.getMessage());
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
@Test
public void testNoExceptionThrownFromOnExceptionAndHandledWithDeadLetterChannel() throws Exception {
RETRY.set(0);
ON_EXCEPTION_RETRY.set(0);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// DLC
deadLetterChannel("mock:error").redeliveryDelay(0).maximumRedeliveries(3);
// on exception to catch all IO exceptions and handle them
// specially
onException(IOException.class).redeliveryDelay(0).maximumRedeliveries(3)
// we now handle the exception
.handled(true).to("mock:b").process(new Processor() {
@Override
public void process(Exchange exchange) {
ON_EXCEPTION_RETRY.incrementAndGet();
// no exception is thrown this time
}
}).to("mock:c");
from("direct:start").to("direct:intermediate").to("mock:result");
from("direct:intermediate").to("mock:a").process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
RETRY.incrementAndGet();
throw new IOException("IO error");
}
}).to("mock:end");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
// the exception is handled by onException so it goes not in DLC
getMockEndpoint("mock:error").expectedMessageCount(0);
// and this time there was no exception thrown from onException,
// and the exception is handled so the caller should not fail
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
assertEquals(4, RETRY.get(), "Should try 4 times (1 first, 3 retry)");
assertEquals(1, ON_EXCEPTION_RETRY.get(), "Should only invoke onException once");
}
}
| ExceptionThrownFromOnExceptionTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/component/StructComponentAssociationErrorTest.java | {
"start": 1325,
"end": 1881
} | class ____ {
@Test
public void testOneToOneMappedBy() {
final StandardServiceRegistry ssr = ServiceRegistryUtil.serviceRegistry();
try {
new MetadataSources( ssr )
.addAnnotatedClass( Book1.class )
.getMetadataBuilder()
.build();
Assertions.fail( "Expected a failure" );
}
catch (MappingException ex) {
assertThat( ex.getMessage(), containsString( "authors.favoriteBook" ) );
}
finally {
StandardServiceRegistryBuilder.destroy( ssr );
}
}
@Entity(name = "Book")
public static | StructComponentAssociationErrorTest |
java | apache__camel | components/camel-azure/camel-azure-storage-datalake/src/test/java/org/apache/camel/component/azure/storage/datalake/integration/DataLakeFileOperationIT.java | {
"start": 2295,
"end": 6274
} | class ____ extends Base {
private DataLakeFileSystemClientWrapper fileSystemClientWrapper;
private String randomFileName;
@BeforeAll
public void setup() throws Exception {
randomFileName = RandomStringUtils.randomAlphabetic(10);
fileSystemClientWrapper = new DataLakeServiceClientWrapper(serviceClient)
.getDataLakeFileSystemClientWrapper(configuration.getFileSystemName());
fileSystemClientWrapper.createFileSystem(null, null, null);
final InputStream inputStream = new ByteArrayInputStream("testing".getBytes(Charset.defaultCharset()));
final FileParallelUploadOptions options
= new FileParallelUploadOptions(inputStream);
fileSystemClientWrapper.getDataLakeFileClientWrapper(randomFileName).uploadWithResponse(options, null);
}
@AfterAll
public void delete() {
fileSystemClientWrapper.deleteFileSystem(null, null);
}
@Test
void testGetFile(@TempDir Path testDir) throws Exception {
final DataLakeFileClientWrapper fileClientWrapper
= fileSystemClientWrapper.getDataLakeFileClientWrapper(randomFileName);
final DataLakeFileOperations operations = new DataLakeFileOperations(configuration, fileClientWrapper);
final Exchange exchange = new DefaultExchange(context);
final DataLakeOperationResponse response = operations.getFile(exchange);
assertNotNull(response);
assertNotNull(response.getBody());
assertNotNull(response.getHeaders());
final InputStream inputStream = (InputStream) response.getBody();
final String bufferedText = new BufferedReader(new InputStreamReader(inputStream, Charset.defaultCharset())).readLine();
assertEquals("testing", bufferedText);
final File testFile = new File(testDir.toFile(), "test_file.txt");
exchange.getIn().setBody(new FileOutputStream(testFile));
final DataLakeOperationResponse responseWithFile = operations.getFile(exchange);
final String fileContent = FileUtils.readFileToString(testFile, Charset.defaultCharset());
assertNotNull(responseWithFile);
assertNotNull(responseWithFile.getHeaders());
assertNotNull(responseWithFile.getBody());
assertTrue(fileContent.contains("testing"));
}
@Test
void testDownloadToFile(@TempDir Path testDir) throws IOException {
final DataLakeFileClientWrapper fileClientWrapper
= fileSystemClientWrapper.getDataLakeFileClientWrapper(randomFileName);
final DataLakeFileOperations operations = new DataLakeFileOperations(configuration, fileClientWrapper);
final Exchange exchange = new DefaultExchange(context);
exchange.getIn().setHeader(DataLakeConstants.FILE_DIR, testDir.toString());
exchange.getIn().setHeader(DataLakeConstants.FILE_NAME, randomFileName);
final DataLakeOperationResponse response = operations.downloadToFile(exchange);
final File testFile = testDir.resolve(randomFileName).toFile();
final String fileContent = FileUtils.readFileToString(testFile, Charset.defaultCharset());
assertNotNull(response);
assertNotNull(response.getBody());
assertNotNull(response.getHeaders());
assertNotNull(response.getHeaders().get(DataLakeConstants.FILE_NAME));
assertTrue(fileContent.contains("testing"));
}
@Test
void testDownloadLink() {
final DataLakeFileClientWrapper clientWrapper = fileSystemClientWrapper.getDataLakeFileClientWrapper(randomFileName);
final DataLakeFileOperations fileOperations = new DataLakeFileOperations(configuration, clientWrapper);
final DataLakeOperationResponse response = fileOperations.downloadLink(null);
assertNotNull(response);
assertNotNull(response.getBody());
assertNotNull(response.getHeaders().get(DataLakeConstants.DOWNLOAD_LINK));
}
}
| DataLakeFileOperationIT |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/Stores.java | {
"start": 4116,
"end": 33936
} | class ____ {
/**
* Create a persistent {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link TimestampedKeyValueStore} or {@link VersionedKeyValueStore}
* you should use {@link #persistentTimestampedKeyValueStore(String)} or
* {@link #persistentVersionedKeyValueStore(String, Duration)}, respectively,
* to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used
* to build a persistent key-value store
*/
public static KeyValueBytesStoreSupplier persistentKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new RocksDBKeyValueBytesStoreSupplier(name, false);
}
/**
* Create a persistent {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link KeyValueStore} or a {@link VersionedKeyValueStore}
* you should use {@link #persistentKeyValueStore(String)} or
* {@link #persistentVersionedKeyValueStore(String, Duration)}, respectively,
* to create a store supplier instead.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used
* to build a persistent key-(timestamp/value) store
*/
public static KeyValueBytesStoreSupplier persistentTimestampedKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new RocksDBKeyValueBytesStoreSupplier(name, true);
}
/**
* Create a persistent versioned key-value store {@link VersionedBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #versionedKeyValueStoreBuilder(VersionedBytesStoreSupplier, Serde, Serde)}.
* <p>
* Note that it is not safe to change the value of {@code historyRetention} between
* application restarts without clearing local state from application instances,
* as this may cause incorrect values to be read from the state store if it impacts
* the underlying storage format.
*
* @param name name of the store (cannot be {@code null})
* @param historyRetention length of time that old record versions are available for query
* (cannot be negative). If a timestamp bound provided to
* {@link VersionedKeyValueStore#get(Object, long)} is older than this
* specified history retention, then the get operation will not return data.
* This parameter also determines the "grace period" after which
* out-of-order writes will no longer be accepted.
* @return an instance of {@link VersionedBytesStoreSupplier}
* @throws IllegalArgumentException if {@code historyRetention} can't be represented as {@code long milliseconds}
*/
public static VersionedBytesStoreSupplier persistentVersionedKeyValueStore(final String name,
final Duration historyRetention) {
Objects.requireNonNull(name, "name cannot be null");
final String hrMsgPrefix = prepareMillisCheckFailMsgPrefix(historyRetention, "historyRetention");
final long historyRetentionMs = validateMillisecondDuration(historyRetention, hrMsgPrefix);
if (historyRetentionMs < 0L) {
throw new IllegalArgumentException("historyRetention cannot be negative");
}
return new RocksDbVersionedKeyValueBytesStoreSupplier(name, historyRetentionMs);
}
/**
* Create a persistent versioned key-value store {@link VersionedBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #versionedKeyValueStoreBuilder(VersionedBytesStoreSupplier, Serde, Serde)}.
* <p>
* Note that it is not safe to change the value of {@code segmentInterval} between
* application restarts without clearing local state from application instances,
* as this may cause incorrect values to be read from the state store otherwise.
*
* @param name name of the store (cannot be {@code null})
* @param historyRetention length of time that old record versions are available for query
* (cannot be negative). If a timestamp bound provided to
* {@link VersionedKeyValueStore#get(Object, long)} is older than this
* specified history retention, then the get operation will not return data.
* This parameter also determines the "grace period" after which
* out-of-order writes will no longer be accepted.
* @param segmentInterval size of segments for storing old record versions (must be positive). Old record versions
* for the same key in a single segment are stored (updated and accessed) together.
* The only impact of this parameter is performance. If segments are large
* and a workload results in many record versions for the same key being collected
* in a single segment, performance may degrade as a result. On the other hand,
* historical reads (which access older segments) and out-of-order writes may
* slow down if there are too many segments.
* @return an instance of {@link VersionedBytesStoreSupplier}
* @throws IllegalArgumentException if {@code historyRetention} or {@code segmentInterval} can't be represented as {@code long milliseconds}
*/
public static VersionedBytesStoreSupplier persistentVersionedKeyValueStore(final String name,
final Duration historyRetention,
final Duration segmentInterval) {
Objects.requireNonNull(name, "name cannot be null");
final String hrMsgPrefix = prepareMillisCheckFailMsgPrefix(historyRetention, "historyRetention");
final long historyRetentionMs = validateMillisecondDuration(historyRetention, hrMsgPrefix);
if (historyRetentionMs < 0L) {
throw new IllegalArgumentException("historyRetention cannot be negative");
}
final String siMsgPrefix = prepareMillisCheckFailMsgPrefix(segmentInterval, "segmentInterval");
final long segmentIntervalMs = validateMillisecondDuration(segmentInterval, siMsgPrefix);
if (segmentIntervalMs < 1L) {
throw new IllegalArgumentException("segmentInterval cannot be zero or negative");
}
return new RocksDbVersionedKeyValueBytesStoreSupplier(name, historyRetentionMs, segmentIntervalMs);
}
/**
* Create an in-memory {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}
* or {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @return an instance of a {@link KeyValueBytesStoreSupplier} than can be used to
* build an in-memory store
*/
public static KeyValueBytesStoreSupplier inMemoryKeyValueStore(final String name) {
Objects.requireNonNull(name, "name cannot be null");
return new InMemoryKeyValueBytesStoreSupplier(name);
}
/**
* Create a LRU Map {@link KeyValueBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #keyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}
* or {@link #timestampedKeyValueStoreBuilder(KeyValueBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @param maxCacheSize maximum number of items in the LRU (cannot be negative)
* @return an instance of a {@link KeyValueBytesStoreSupplier} that can be used to build
* an LRU Map based store
* @throws IllegalArgumentException if {@code maxCacheSize} is negative
*/
public static KeyValueBytesStoreSupplier lruMap(final String name, final int maxCacheSize) {
Objects.requireNonNull(name, "name cannot be null");
if (maxCacheSize < 0) {
throw new IllegalArgumentException("maxCacheSize cannot be negative");
}
return new KeyValueBytesStoreSupplier() {
@Override
public String name() {
return name;
}
@Override
public KeyValueStore<Bytes, byte[]> get() {
return new MemoryNavigableLRUCache(name, maxCacheSize);
}
@Override
public String metricsScope() {
return "in-memory-lru";
}
};
}
/**
* Create a persistent {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #windowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link TimestampedWindowStore} you should use
* {@link #persistentTimestampedWindowStore(String, Duration, Duration, boolean)} to create a store supplier instead.
* <p>
* Note that it is not safe to change the value of {@code retentionPeriod} between
* application restarts without clearing local state from application instances,
* as this may cause incorrect values to be read from the state store if it impacts
* the underlying storage format.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period)
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier persistentWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, false);
}
/**
* Create a persistent {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a
* {@link #timestampedWindowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
* If you want to create a {@link WindowStore} you should use
* {@link #persistentWindowStore(String, Duration, Duration, boolean)} to create a store supplier instead.
* <p>
* Note that it is not safe to change the value of {@code retentionPeriod} between
* application restarts without clearing local state from application instances,
* as this may cause incorrect values to be read from the state store if it impacts
* the underlying storage format.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period)
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier persistentTimestampedWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, true);
}
private static WindowBytesStoreSupplier persistentWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates,
final boolean timestampedStore) {
Objects.requireNonNull(name, "name cannot be null");
final String rpMsgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, rpMsgPrefix);
final String wsMsgPrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, wsMsgPrefix);
final long defaultSegmentInterval = Math.max(retentionMs / 2, 60_000L);
return persistentWindowStore(name, retentionMs, windowSizeMs, retainDuplicates, defaultSegmentInterval, timestampedStore);
}
private static WindowBytesStoreSupplier persistentWindowStore(final String name,
final long retentionPeriod,
final long windowSize,
final boolean retainDuplicates,
final long segmentInterval,
final boolean timestampedStore) {
Objects.requireNonNull(name, "name cannot be null");
if (retentionPeriod < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
if (windowSize < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (segmentInterval < 1L) {
throw new IllegalArgumentException("segmentInterval cannot be zero or negative");
}
if (windowSize > retentionPeriod) {
throw new IllegalArgumentException("The retention period of the window store "
+ name + " must be no smaller than its window size. Got size=["
+ windowSize + "], retention=[" + retentionPeriod + "]");
}
return new RocksDbWindowBytesStoreSupplier(
name,
retentionPeriod,
segmentInterval,
windowSize,
retainDuplicates,
timestampedStore);
}
/**
* Create an in-memory {@link WindowBytesStoreSupplier}.
* <p>
* This store supplier can be passed into a {@link #windowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)} or
* {@link #timestampedWindowStoreBuilder(WindowBytesStoreSupplier, Serde, Serde)}.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* Note that the retention period must be at least long enough to contain the
* windowed data's entire life cycle, from window-start through window-end,
* and for the entire grace period.
* @param windowSize size of the windows (cannot be negative)
* @param retainDuplicates whether or not to retain duplicates. Turning this on will automatically disable
* caching and means that null values will be ignored.
* @return an instance of {@link WindowBytesStoreSupplier}
* @throws IllegalArgumentException if {@code retentionPeriod} or {@code windowSize} can't be represented as {@code long milliseconds}
* @throws IllegalArgumentException if {@code retentionPeriod} is smaller than {@code windowSize}
*/
public static WindowBytesStoreSupplier inMemoryWindowStore(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates) throws IllegalArgumentException {
Objects.requireNonNull(name, "name cannot be null");
final String repartitionPeriodErrorMessagePrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, repartitionPeriodErrorMessagePrefix);
if (retentionMs < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
final String windowSizeErrorMessagePrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, windowSizeErrorMessagePrefix);
if (windowSizeMs < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (windowSizeMs > retentionMs) {
throw new IllegalArgumentException("The retention period of the window store "
+ name + " must be no smaller than its window size. Got size=["
+ windowSize + "], retention=[" + retentionPeriod + "]");
}
return new InMemoryWindowBytesStoreSupplier(name, retentionMs, windowSizeMs, retainDuplicates);
}
/**
* Create a persistent {@link SessionBytesStoreSupplier}.
* <p>
* Note that it is not safe to change the value of {@code retentionPeriod} between
* application restarts without clearing local state from application instances,
* as this may cause incorrect values to be read from the state store if it impacts
* the underlying storage format.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least as long enough to
* contain the inactivity gap of the session and the entire grace period.)
* @return an instance of a {@link SessionBytesStoreSupplier}
*/
public static SessionBytesStoreSupplier persistentSessionStore(final String name,
final Duration retentionPeriod) {
Objects.requireNonNull(name, "name cannot be null");
final String msgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionPeriodMs = validateMillisecondDuration(retentionPeriod, msgPrefix);
if (retentionPeriodMs < 0) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
return new RocksDbSessionBytesStoreSupplier(name, retentionPeriodMs);
}
/**
* Create an in-memory {@link SessionBytesStoreSupplier}.
*
* @param name name of the store (cannot be {@code null})
* @param retentionPeriod length of time to retain data in the store (cannot be negative)
* (note that the retention period must be at least as long enough to
* contain the inactivity gap of the session and the entire grace period.)
* @return an instance of a {@link SessionBytesStoreSupplier}
*/
public static SessionBytesStoreSupplier inMemorySessionStore(final String name, final Duration retentionPeriod) {
Objects.requireNonNull(name, "name cannot be null");
final String msgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionPeriodMs = validateMillisecondDuration(retentionPeriod, msgPrefix);
if (retentionPeriodMs < 0) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
return new InMemorySessionBytesStoreSupplier(name, retentionPeriodMs);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link KeyValueStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link TimestampedKeyValueStore TimestampedKeyValueStores}.
*
* @param supplier a {@link KeyValueBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of a {@link StoreBuilder} that can build a {@link KeyValueStore}
*/
public static <K, V> StoreBuilder<KeyValueStore<K, V>> keyValueStoreBuilder(final KeyValueBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new KeyValueStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link TimestampedKeyValueStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link KeyValueStore KeyValueStores}. For this case, passed in timestamps will be dropped and not stored in the
* key-value-store. On read, no valid timestamp but a dummy timestamp will be returned.
*
* @param supplier a {@link KeyValueBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of a {@link StoreBuilder} that can build a {@link KeyValueStore}
*/
public static <K, V> StoreBuilder<TimestampedKeyValueStore<K, V>> timestampedKeyValueStoreBuilder(final KeyValueBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new TimestampedKeyValueStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link VersionedKeyValueStore}.
*
* @param supplier a {@link VersionedBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as a deletion
* @param <K> key type
* @param <V> value type
* @return an instance of a {@link StoreBuilder} that can build a {@link VersionedKeyValueStore}
*/
public static <K, V> StoreBuilder<VersionedKeyValueStore<K, V>> versionedKeyValueStoreBuilder(final VersionedBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new VersionedKeyValueStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link WindowStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link TimestampedWindowStore TimestampedWindowStores}.
*
* @param supplier a {@link WindowBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} than can build a {@link WindowStore}
*/
public static <K, V> StoreBuilder<WindowStore<K, V>> windowStoreBuilder(final WindowBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new WindowStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link TimestampedWindowStore}.
* <p>
* The provided supplier should <strong>not</strong> be a supplier for
* {@link WindowStore WindowStores}. For this case, passed in timestamps will be dropped and not stored in the
* window-store. On read, no valid timestamp but a dummy timestamp will be returned.
*
* @param supplier a {@link WindowBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} that can build a {@link TimestampedWindowStore}
*/
public static <K, V> StoreBuilder<TimestampedWindowStore<K, V>> timestampedWindowStoreBuilder(final WindowBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new TimestampedWindowStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
/**
* Creates a {@link StoreBuilder} that can be used to build a {@link SessionStore}.
*
* @param supplier a {@link SessionBytesStoreSupplier} (cannot be {@code null})
* @param keySerde the key serde to use
* @param valueSerde the value serde to use; if the serialized bytes is {@code null} for put operations,
* it is treated as delete
* @param <K> key type
* @param <V> value type
* @return an instance of {@link StoreBuilder} than can build a {@link SessionStore}
*/
public static <K, V> StoreBuilder<SessionStore<K, V>> sessionStoreBuilder(final SessionBytesStoreSupplier supplier,
final Serde<K> keySerde,
final Serde<V> valueSerde) {
Objects.requireNonNull(supplier, "supplier cannot be null");
return new SessionStoreBuilder<>(supplier, keySerde, valueSerde, Time.SYSTEM);
}
} | Stores |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/text/translate/NumericEntityEscaperTest.java | {
"start": 1106,
"end": 2490
} | class ____ extends AbstractLangTest {
@Test
void testAbove() {
final NumericEntityEscaper nee = NumericEntityEscaper.above('F');
final String input = "ADFGZ";
final String result = nee.translate(input);
assertEquals("ADFGZ", result, "Failed to escape numeric entities via the above method");
}
@Test
void testBelow() {
final NumericEntityEscaper nee = NumericEntityEscaper.below('F');
final String input = "ADFGZ";
final String result = nee.translate(input);
assertEquals("ADFGZ", result, "Failed to escape numeric entities via the below method");
}
@Test
void testBetween() {
final NumericEntityEscaper nee = NumericEntityEscaper.between('F', 'L');
final String input = "ADFGZ";
final String result = nee.translate(input);
assertEquals("ADFGZ", result, "Failed to escape numeric entities via the between method");
}
// See LANG-617
@Test
void testSupplementary() {
final NumericEntityEscaper nee = new NumericEntityEscaper();
final String input = "\uD803\uDC22";
final String expected = "𐰢";
final String result = nee.translate(input);
assertEquals(expected, result, "Failed to escape numeric entities supplementary characters");
}
}
| NumericEntityEscaperTest |
java | apache__camel | test-infra/camel-test-infra-smb/src/main/java/org/apache/camel/test/infra/smb/common/SmbProperties.java | {
"start": 860,
"end": 1162
} | class ____ {
public static final String SERVICE_ADDRESS = "smb.service.address";
public static final String SHARE_NAME = "smb.service.share";
public static final String SMB_USERNAME = "smb.service.username";
public static final String SMB_PASSWORD = "smb.service.password";
}
| SmbProperties |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/EmbeddedRocksDBStateBackend.java | {
"start": 16230,
"end": 27153
} | class ____.
* @return The re-configured variant of the state backend
*/
@Override
public EmbeddedRocksDBStateBackend configure(ReadableConfig config, ClassLoader classLoader) {
return new EmbeddedRocksDBStateBackend(this, config, classLoader);
}
// ------------------------------------------------------------------------
// State backend methods
// ------------------------------------------------------------------------
@Override
public boolean supportsNoClaimRestoreMode() {
// We are able to create CheckpointType#FULL_CHECKPOINT. (we might potentially reupload some
// shared files when taking incremental snapshots)
return true;
}
@Override
public boolean supportsSavepointFormat(SavepointFormatType formatType) {
return true;
}
private void lazyInitializeForJob(
Environment env, @SuppressWarnings("unused") String operatorIdentifier)
throws IOException {
if (isInitialized) {
return;
}
this.jobId = env.getJobID();
// initialize the paths where the local RocksDB files should be stored
if (localRocksDbDirectories == null) {
initializedDbBasePaths = new File[] {env.getTaskManagerInfo().getTmpWorkingDirectory()};
} else {
List<File> dirs = new ArrayList<>(localRocksDbDirectories.length);
StringBuilder errorMessage = new StringBuilder();
for (File f : localRocksDbDirectories) {
File testDir = new File(f, UUID.randomUUID().toString());
if (!testDir.mkdirs()) {
String msg =
"Local DB files directory '"
+ f
+ "' does not exist and cannot be created. ";
LOG.error(msg);
errorMessage.append(msg);
} else {
dirs.add(f);
}
//noinspection ResultOfMethodCallIgnored
testDir.delete();
}
if (dirs.isEmpty()) {
throw new IOException("No local storage directories available. " + errorMessage);
} else {
initializedDbBasePaths = dirs.toArray(new File[0]);
}
}
nextDirectory = new Random().nextInt(initializedDbBasePaths.length);
isInitialized = true;
}
private File getNextStoragePath() {
int ni = nextDirectory + 1;
ni = ni >= initializedDbBasePaths.length ? 0 : ni;
nextDirectory = ni;
return initializedDbBasePaths[ni];
}
// ------------------------------------------------------------------------
// State holding data structures
// ------------------------------------------------------------------------
@Override
public <K> AbstractKeyedStateBackend<K> createKeyedStateBackend(
KeyedStateBackendParameters<K> parameters) throws IOException {
Environment env = parameters.getEnv();
// first, make sure that the RocksDB JNI library is loaded
// we do this explicitly here to have better error handling
String tempDir = env.getTaskManagerInfo().getTmpWorkingDirectory().getAbsolutePath();
ensureRocksDBIsLoaded(tempDir);
// replace all characters that are not legal for filenames with underscore
String fileCompatibleIdentifier =
parameters.getOperatorIdentifier().replaceAll("[^a-zA-Z0-9\\-]", "_");
lazyInitializeForJob(env, fileCompatibleIdentifier);
File instanceBasePath =
new File(
getNextStoragePath(),
"job_"
+ jobId
+ "_op_"
+ fileCompatibleIdentifier
+ "_uuid_"
+ UUID.randomUUID());
LocalRecoveryConfig localRecoveryConfig =
env.getTaskStateManager().createLocalRecoveryConfig();
final OpaqueMemoryResource<RocksDBSharedResources> sharedResources =
RocksDBOperationUtils.allocateSharedCachesIfConfigured(
memoryConfiguration,
env,
parameters.getManagedMemoryFraction(),
LOG,
rocksDBMemoryFactory);
if (sharedResources != null) {
LOG.info("Obtained shared RocksDB cache of size {} bytes", sharedResources.getSize());
}
final RocksDBResourceContainer resourceContainer =
createOptionsAndResourceContainer(
sharedResources,
instanceBasePath,
nativeMetricOptions.isStatisticsEnabled());
ExecutionConfig executionConfig = env.getExecutionConfig();
StreamCompressionDecorator keyGroupCompressionDecorator =
getCompressionDecorator(executionConfig);
LatencyTrackingStateConfig latencyTrackingStateConfig =
latencyTrackingConfigBuilder.setMetricGroup(parameters.getMetricGroup()).build();
SizeTrackingStateConfig sizeTrackingStateConfig =
sizeTrackingConfigBuilder.setMetricGroup(parameters.getMetricGroup()).build();
RocksDBKeyedStateBackendBuilder<K> builder =
new RocksDBKeyedStateBackendBuilder<>(
parameters.getOperatorIdentifier(),
env.getUserCodeClassLoader().asClassLoader(),
instanceBasePath,
resourceContainer,
stateName -> resourceContainer.getColumnOptions(),
parameters.getKvStateRegistry(),
parameters.getKeySerializer(),
parameters.getNumberOfKeyGroups(),
parameters.getKeyGroupRange(),
executionConfig,
localRecoveryConfig,
priorityQueueConfig,
parameters.getTtlTimeProvider(),
latencyTrackingStateConfig,
sizeTrackingStateConfig,
parameters.getMetricGroup(),
parameters.getCustomInitializationMetrics(),
parameters.getStateHandles(),
keyGroupCompressionDecorator,
parameters.getCancelStreamRegistry())
.setEnableIncrementalCheckpointing(isIncrementalCheckpointsEnabled())
.setNumberOfTransferingThreads(getNumberOfTransferThreads())
.setNativeMetricOptions(
resourceContainer.getMemoryWatcherOptions(nativeMetricOptions))
.setWriteBatchSize(getWriteBatchSize())
.setOverlapFractionThreshold(getOverlapFractionThreshold())
.setIncrementalRestoreAsyncCompactAfterRescale(
getIncrementalRestoreAsyncCompactAfterRescale())
.setUseIngestDbRestoreMode(getUseIngestDbRestoreMode())
.setRescalingUseDeleteFilesInRange(isRescalingUseDeleteFilesInRange())
.setIOExecutor(
MdcUtils.scopeToJob(
jobId,
parameters.getEnv().getIOManager().getExecutorService()))
.setManualCompactionConfig(
manualCompactionConfig == null
? RocksDBManualCompactionConfig.getDefault()
: manualCompactionConfig)
.setAsyncExceptionHandler(
(ign, throwable) -> parameters.getEnv().failExternally(throwable));
return builder.build();
}
@Override
public OperatorStateBackend createOperatorStateBackend(
OperatorStateBackendParameters parameters) throws Exception {
// the default for RocksDB; eventually there can be a operator state backend based on
// RocksDB, too.
final boolean asyncSnapshots = true;
return new DefaultOperatorStateBackendBuilder(
parameters.getEnv().getUserCodeClassLoader().asClassLoader(),
parameters.getEnv().getExecutionConfig(),
asyncSnapshots,
parameters.getStateHandles(),
parameters.getCancelStreamRegistry())
.build();
}
private RocksDBOptionsFactory configureOptionsFactory(
@Nullable RocksDBOptionsFactory originalOptionsFactory,
@Nullable String factoryClassName,
ReadableConfig config,
ClassLoader classLoader)
throws DynamicCodeLoadingException {
RocksDBOptionsFactory optionsFactory = null;
if (originalOptionsFactory != null) {
if (originalOptionsFactory instanceof ConfigurableRocksDBOptionsFactory) {
originalOptionsFactory =
((ConfigurableRocksDBOptionsFactory) originalOptionsFactory)
.configure(config);
}
LOG.info("Using application-defined options factory: {}.", originalOptionsFactory);
optionsFactory = originalOptionsFactory;
} else if (factoryClassName != null) {
// Do nothing if user does not define any factory class.
try {
Class<? extends RocksDBOptionsFactory> clazz =
Class.forName(factoryClassName, false, classLoader)
.asSubclass(RocksDBOptionsFactory.class);
optionsFactory = clazz.newInstance();
if (optionsFactory instanceof ConfigurableRocksDBOptionsFactory) {
optionsFactory =
((ConfigurableRocksDBOptionsFactory) optionsFactory).configure(config);
}
LOG.info("Using configured options factory: {}.", optionsFactory);
} catch (ClassNotFoundException e) {
throw new DynamicCodeLoadingException(
"Cannot find configured options factory class: " + factoryClassName, e);
} catch (ClassCastException | InstantiationException | IllegalAccessException e) {
throw new DynamicCodeLoadingException(
"The | loader |
java | spring-projects__spring-boot | module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/web/servlet/SecurityFilterAutoConfiguration.java | {
"start": 2457,
"end": 3499
} | class ____ {
private static final String DEFAULT_FILTER_NAME = AbstractSecurityWebApplicationInitializer.DEFAULT_FILTER_NAME;
@Bean
@ConditionalOnBean(name = DEFAULT_FILTER_NAME)
DelegatingFilterProxyRegistrationBean securityFilterChainRegistration(
SecurityFilterProperties securityFilterProperties) {
DelegatingFilterProxyRegistrationBean registration = new DelegatingFilterProxyRegistrationBean(
DEFAULT_FILTER_NAME);
registration.setOrder(securityFilterProperties.getOrder());
registration.setDispatcherTypes(getDispatcherTypes(securityFilterProperties));
return registration;
}
private @Nullable EnumSet<DispatcherType> getDispatcherTypes(SecurityFilterProperties securityFilterProperties) {
if (securityFilterProperties.getDispatcherTypes() == null) {
return null;
}
return securityFilterProperties.getDispatcherTypes()
.stream()
.map((type) -> DispatcherType.valueOf(type.name()))
.collect(Collectors.toCollection(() -> EnumSet.noneOf(DispatcherType.class)));
}
}
| SecurityFilterAutoConfiguration |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/window/groupwindow/triggers/WindowTrigger.java | {
"start": 1206,
"end": 1633
} | class ____<W extends Window> extends Trigger<W> {
/** The {@link TriggerContext} of the window trigger. */
protected transient TriggerContext ctx;
/**
* Returns the trigger time of the window, this should be called after TriggerContext
* initialized.
*/
protected long triggerTime(W window) {
return toEpochMillsForTimer(window.maxTimestamp(), ctx.getShiftTimeZone());
}
}
| WindowTrigger |
java | spring-projects__spring-boot | module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/DiscovererEndpointFilterTests.java | {
"start": 3341,
"end": 3777
} | class ____ extends EndpointDiscoverer<ExposableEndpoint<Operation>, Operation> {
TestDiscovererB(ApplicationContext applicationContext, ParameterValueMapper parameterValueMapper,
Collection<OperationInvokerAdvisor> invokerAdvisors,
Collection<EndpointFilter<ExposableEndpoint<Operation>>> filters) {
super(applicationContext, parameterValueMapper, invokerAdvisors, filters, Collections.emptyList());
}
}
}
| TestDiscovererB |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/internal/impl/DefaultProjectManager.java | {
"start": 2324,
"end": 9178
} | class ____ implements ProjectManager {
private final InternalMavenSession session;
private final ArtifactManager artifactManager;
@Inject
public DefaultProjectManager(InternalMavenSession session, ArtifactManager artifactManager) {
this.session = session;
this.artifactManager = artifactManager;
}
@Nonnull
@Override
public Optional<Path> getPath(@Nonnull Project project) {
requireNonNull(project, "project" + " cannot be null");
Optional<ProducedArtifact> mainArtifact = project.getMainArtifact();
return mainArtifact.flatMap(artifactManager::getPath);
}
@Nonnull
@Override
public Collection<ProducedArtifact> getAttachedArtifacts(@Nonnull Project project) {
requireNonNull(project, "project" + " cannot be null");
Collection<ProducedArtifact> attached =
map(getMavenProject(project).getAttachedArtifacts(), a -> getSession(project)
.getArtifact(ProducedArtifact.class, RepositoryUtils.toArtifact(a)));
return Collections.unmodifiableCollection(attached);
}
@Override
@Nonnull
public Collection<ProducedArtifact> getAllArtifacts(@Nonnull Project project) {
requireNonNull(project, "project cannot be null");
ArrayList<ProducedArtifact> result = new ArrayList<>(2);
result.addAll(project.getArtifacts());
result.addAll(getAttachedArtifacts(project));
return Collections.unmodifiableCollection(result);
}
@Override
@SuppressWarnings("deprecation")
public void attachArtifact(@Nonnull Project project, @Nonnull ProducedArtifact artifact, @Nonnull Path path) {
requireNonNull(project, "project cannot be null");
requireNonNull(artifact, "artifact cannot be null");
requireNonNull(path, "path cannot be null");
if (artifact.getGroupId().isEmpty()
|| artifact.getArtifactId().isEmpty()
|| artifact.getBaseVersion().toString().isEmpty()) {
artifact = session.createProducedArtifact(
artifact.getGroupId().isEmpty() ? project.getGroupId() : artifact.getGroupId(),
artifact.getArtifactId().isEmpty() ? project.getArtifactId() : artifact.getArtifactId(),
artifact.getBaseVersion().toString().isEmpty()
? session.parseVersion(project.getVersion()).toString()
: artifact.getBaseVersion().toString(),
artifact.getClassifier(),
artifact.getExtension(),
null);
}
if (!Objects.equals(project.getGroupId(), artifact.getGroupId())
|| !Objects.equals(project.getArtifactId(), artifact.getArtifactId())
|| !Objects.equals(
project.getVersion(), artifact.getBaseVersion().toString())) {
throw new IllegalArgumentException(
"The produced artifact must have the same groupId/artifactId/version than the project it is attached to. Expecting "
+ project.getGroupId() + ":" + project.getArtifactId() + ":" + project.getVersion()
+ " but received " + artifact.getGroupId() + ":" + artifact.getArtifactId() + ":"
+ artifact.getBaseVersion());
}
getMavenProject(project)
.addAttachedArtifact(
RepositoryUtils.toArtifact(getSession(project).toArtifact(artifact)));
artifactManager.setPath(artifact, path);
}
@Nonnull
@Override
public Collection<SourceRoot> getSourceRoots(@Nonnull Project project) {
MavenProject prj = getMavenProject(requireNonNull(project, "project" + " cannot be null"));
return prj.getSourceRoots();
}
@Nonnull
@Override
public Stream<SourceRoot> getEnabledSourceRoots(@Nonnull Project project, ProjectScope scope, Language language) {
MavenProject prj = getMavenProject(requireNonNull(project, "project" + " cannot be null"));
return prj.getEnabledSourceRoots(scope, language);
}
@Override
public void addSourceRoot(@Nonnull Project project, @Nonnull SourceRoot source) {
MavenProject prj = getMavenProject(requireNonNull(project, "project" + " cannot be null"));
prj.addSourceRoot(requireNonNull(source, "source" + " cannot be null"));
}
@Override
public void addSourceRoot(
@Nonnull Project project,
@Nonnull ProjectScope scope,
@Nonnull Language language,
@Nonnull Path directory) {
MavenProject prj = getMavenProject(requireNonNull(project, "project" + " cannot be null"));
prj.addSourceRoot(
requireNonNull(scope, "scope" + " cannot be null"),
requireNonNull(language, "language" + " cannot be null"),
requireNonNull(directory, "directory" + " cannot be null"));
}
@Override
@Nonnull
public List<RemoteRepository> getRemoteProjectRepositories(@Nonnull Project project) {
return Collections.unmodifiableList(new MappedList<>(
getMavenProject(project).getRemoteProjectRepositories(), session::getRemoteRepository));
}
@Override
@Nonnull
public List<RemoteRepository> getRemotePluginRepositories(@Nonnull Project project) {
return Collections.unmodifiableList(
new MappedList<>(getMavenProject(project).getRemotePluginRepositories(), session::getRemoteRepository));
}
@Override
public void setProperty(@Nonnull Project project, @Nonnull String key, String value) {
Properties properties = getMavenProject(project).getProperties();
if (value == null) {
properties.remove(key);
} else {
properties.setProperty(key, value);
}
}
@Override
@Nonnull
public Map<String, String> getProperties(@Nonnull Project project) {
return Collections.unmodifiableMap(
new PropertiesAsMap(getMavenProject(project).getProperties()));
}
@Override
@Nonnull
public Optional<Project> getExecutionProject(@Nonnull Project project) {
// Session keep tracks of the Project per project id,
// so we cannot use session.getProject(p) for forked projects
// which are temporary clones
return Optional.ofNullable(getMavenProject(project).getExecutionProject())
.map(p -> new DefaultProject(session, p));
}
private MavenProject getMavenProject(Project project) {
return ((DefaultProject) project).getProject();
}
private static InternalSession getSession(Project project) {
return ((DefaultProject) project).getSession();
}
}
| DefaultProjectManager |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/bug/Bug_for_kongmu.java | {
"start": 1173,
"end": 1503
} | class ____ {
public Result() {
this.code = 11;
}
private int code;
public int getCode() {
return code;
}
public void setCode(int code) {
this.code = code;
}
}
}
public static | Result |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/client/ReactiveRedisClientImpl.java | {
"start": 384,
"end": 51003
} | class ____ implements ReactiveRedisClient {
private final RedisAPI redisAPI;
private final Redis redis;
public ReactiveRedisClientImpl(Redis redis, RedisAPI redisAPI) {
this.redis = redis;
this.redisAPI = redisAPI;
}
@Override
public void close() {
redisAPI.close();
}
@Override
public Uni<Response> append(String arg0, String arg1) {
return redisAPI.append(arg0, arg1);
}
@Override
public Response appendAndAwait(String arg0, String arg1) {
return redisAPI.appendAndAwait(arg0, arg1);
}
@Override
public Uni<Response> asking() {
return redisAPI.asking();
}
@Override
public Response askingAndAwait() {
return redisAPI.askingAndAwait();
}
@Override
public Uni<Response> auth(List<String> args) {
return redisAPI.auth(args);
}
@Override
public Response authAndAwait(List<String> args) {
return redisAPI.authAndAwait(args);
}
@Override
public Uni<Response> bgrewriteaof() {
return redisAPI.bgrewriteaof();
}
@Override
public Response bgrewriteaofAndAwait() {
return redisAPI.bgrewriteaofAndAwait();
}
@Override
public Uni<Response> bgsave(List<String> args) {
return redisAPI.bgsave(args);
}
@Override
public Response bgsaveAndAwait(List<String> args) {
return redisAPI.bgsaveAndAwait(args);
}
@Override
public Uni<Response> bitcount(List<String> args) {
return redisAPI.bitcount(args);
}
@Override
public Response bitcountAndAwait(List<String> args) {
return redisAPI.bitcountAndAwait(args);
}
@Override
public Uni<Response> bitfield(List<String> args) {
return redisAPI.bitfield(args);
}
@Override
public Response bitfieldAndAwait(List<String> args) {
return redisAPI.bitfieldAndAwait(args);
}
@Override
public Uni<Response> bitop(List<String> args) {
return redisAPI.bitop(args);
}
@Override
public Response bitopAndAwait(List<String> args) {
return redisAPI.bitopAndAwait(args);
}
@Override
public Uni<Response> bitpos(List<String> args) {
return redisAPI.bitpos(args);
}
@Override
public Response bitposAndAwait(List<String> args) {
return redisAPI.bitposAndAwait(args);
}
@Override
public Uni<Response> blpop(List<String> args) {
return redisAPI.blpop(args);
}
@Override
public Response blpopAndAwait(List<String> args) {
return redisAPI.blpopAndAwait(args);
}
@Override
public Uni<Response> brpop(List<String> args) {
return redisAPI.brpop(args);
}
@Override
public Response brpopAndAwait(List<String> args) {
return redisAPI.brpopAndAwait(args);
}
@Override
public Uni<Response> brpoplpush(String arg0, String arg1, String arg2) {
return redisAPI.brpoplpush(arg0, arg1, arg2);
}
@Override
public Response brpoplpushAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.brpoplpushAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> bzpopmax(List<String> args) {
return redisAPI.bzpopmax(args);
}
@Override
public Response bzpopmaxAndAwait(List<String> args) {
return redisAPI.bzpopmaxAndAwait(args);
}
@Override
public Uni<Response> bzpopmin(List<String> args) {
return redisAPI.bzpopmin(args);
}
@Override
public Response bzpopminAndAwait(List<String> args) {
return redisAPI.bzpopminAndAwait(args);
}
@Override
public Uni<Response> client(List<String> args) {
return redisAPI.client(args);
}
@Override
public Response clientAndAwait(List<String> args) {
return redisAPI.clientAndAwait(args);
}
@Override
public Uni<Response> cluster(List<String> args) {
return redisAPI.cluster(args);
}
@Override
public Response clusterAndAwait(List<String> args) {
return redisAPI.clusterAndAwait(args);
}
@Override
public Uni<Response> command(List<String> args) {
return redisAPI.command(args);
}
@Override
public Response commandAndAwait(List<String> args) {
return redisAPI.commandAndAwait(args);
}
@Override
public Uni<Response> config(List<String> args) {
return redisAPI.config(args);
}
@Override
public Response configAndAwait(List<String> args) {
return redisAPI.configAndAwait(args);
}
@Override
public Uni<Response> dbsize() {
return redisAPI.dbsize();
}
@Override
public Response dbsizeAndAwait() {
return redisAPI.dbsizeAndAwait();
}
@Override
public Uni<Response> debug(List<String> args) {
return redisAPI.debug(args);
}
@Override
public Response debugAndAwait(List<String> args) {
return redisAPI.debugAndAwait(args);
}
@Override
public Uni<Response> decr(String arg0) {
return redisAPI.decr(arg0);
}
@Override
public Response decrAndAwait(String arg0) {
return redisAPI.decrAndAwait(arg0);
}
@Override
public Uni<Response> decrby(String arg0, String arg1) {
return redisAPI.decrby(arg0, arg1);
}
@Override
public Response decrbyAndAwait(String arg0, String arg1) {
return redisAPI.decrbyAndAwait(arg0, arg1);
}
@Override
public Uni<Response> del(List<String> args) {
return redisAPI.del(args);
}
@Override
public Response delAndAwait(List<String> args) {
return redisAPI.delAndAwait(args);
}
@Override
public Uni<Response> discard() {
return redisAPI.discard();
}
@Override
public Response discardAndAwait() {
return redisAPI.discardAndAwait();
}
@Override
public Uni<Response> dump(String arg0) {
return redisAPI.dump(arg0);
}
@Override
public Response dumpAndAwait(String arg0) {
return redisAPI.dumpAndAwait(arg0);
}
@Override
public Uni<Response> echo(String arg0) {
return redisAPI.echo(arg0);
}
@Override
public Response echoAndAwait(String arg0) {
return redisAPI.echoAndAwait(arg0);
}
@Override
public Uni<Response> eval(List<String> args) {
return redisAPI.eval(args);
}
@Override
public Response evalAndAwait(List<String> args) {
return redisAPI.evalAndAwait(args);
}
@Override
public Uni<Response> evalsha(List<String> args) {
return redisAPI.evalsha(args);
}
@Override
public Response evalshaAndAwait(List<String> args) {
return redisAPI.evalshaAndAwait(args);
}
@Override
public Uni<Response> exec() {
return redisAPI.exec();
}
@Override
public Response execAndAwait() {
return redisAPI.execAndAwait();
}
@Override
public Uni<Response> exists(List<String> args) {
return redisAPI.exists(args);
}
@Override
public Response existsAndAwait(List<String> args) {
return redisAPI.existsAndAwait(args);
}
@Override
public Uni<Response> expire(String arg0, String arg1) {
return redisAPI.expire(List.of(arg0, arg1));
}
@Override
public Response expireAndAwait(String arg0, String arg1) {
return redisAPI.expireAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> expireat(String arg0, String arg1) {
return redisAPI.expireat(List.of(arg0, arg1));
}
@Override
public Response expireatAndAwait(String arg0, String arg1) {
return redisAPI.expireatAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> flushall(List<String> args) {
return redisAPI.flushall(args);
}
@Override
public Response flushallAndAwait(List<String> args) {
return redisAPI.flushallAndAwait(args);
}
@Override
public Uni<Response> flushdb(List<String> args) {
return redisAPI.flushdb(args);
}
@Override
public Response flushdbAndAwait(List<String> args) {
return redisAPI.flushdbAndAwait(args);
}
@Override
public Uni<Response> geoadd(List<String> args) {
return redisAPI.geoadd(args);
}
@Override
public Response geoaddAndAwait(List<String> args) {
return redisAPI.geoaddAndAwait(args);
}
@Override
public Uni<Response> geodist(List<String> args) {
return redisAPI.geodist(args);
}
@Override
public Response geodistAndAwait(List<String> args) {
return redisAPI.geodistAndAwait(args);
}
@Override
public Uni<Response> geohash(List<String> args) {
return redisAPI.geohash(args);
}
@Override
public Response geohashAndAwait(List<String> args) {
return redisAPI.geohashAndAwait(args);
}
@Override
public Uni<Response> geopos(List<String> args) {
return redisAPI.geopos(args);
}
@Override
public Response geoposAndAwait(List<String> args) {
return redisAPI.geoposAndAwait(args);
}
@Override
public Uni<Response> georadius(List<String> args) {
return redisAPI.georadius(args);
}
@Override
public Response georadiusAndAwait(List<String> args) {
return redisAPI.georadiusAndAwait(args);
}
@Override
public Uni<Response> georadiusRo(List<String> args) {
return redisAPI.georadiusRo(args);
}
@Override
public Response georadiusRoAndAwait(List<String> args) {
return redisAPI.georadiusRoAndAwait(args);
}
@Override
public Uni<Response> georadiusbymember(List<String> args) {
return redisAPI.georadiusbymember(args);
}
@Override
public Response georadiusbymemberAndAwait(List<String> args) {
return redisAPI.georadiusbymemberAndAwait(args);
}
@Override
public Uni<Response> georadiusbymemberRo(List<String> args) {
return redisAPI.georadiusbymemberRo(args);
}
@Override
public Response georadiusbymemberRoAndAwait(List<String> args) {
return redisAPI.georadiusbymemberRoAndAwait(args);
}
@Override
public Uni<Response> get(String arg0) {
return redisAPI.get(arg0);
}
@Override
public Response getAndAwait(String arg0) {
return redisAPI.getAndAwait(arg0);
}
@Override
public Uni<Response> getbit(String arg0, String arg1) {
return redisAPI.getbit(arg0, arg1);
}
@Override
public Response getbitAndAwait(String arg0, String arg1) {
return redisAPI.getbitAndAwait(arg0, arg1);
}
@Override
public Uni<Response> getrange(String arg0, String arg1, String arg2) {
return redisAPI.getrange(arg0, arg1, arg2);
}
@Override
public Response getrangeAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.getrangeAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> getset(String arg0, String arg1) {
return redisAPI.getset(arg0, arg1);
}
@Override
public Response getsetAndAwait(String arg0, String arg1) {
return redisAPI.getsetAndAwait(arg0, arg1);
}
@Override
public Uni<Response> hdel(List<String> args) {
return redisAPI.hdel(args);
}
@Override
public Response hdelAndAwait(List<String> args) {
return redisAPI.hdelAndAwait(args);
}
@Override
public Uni<Response> hexists(String arg0, String arg1) {
return redisAPI.hexists(arg0, arg1);
}
@Override
public Response hexistsAndAwait(String arg0, String arg1) {
return redisAPI.hexistsAndAwait(arg0, arg1);
}
@Override
public Uni<Response> hget(String arg0, String arg1) {
return redisAPI.hget(arg0, arg1);
}
@Override
public Response hgetAndAwait(String arg0, String arg1) {
return redisAPI.hgetAndAwait(arg0, arg1);
}
@Override
public Uni<Response> hgetall(String arg0) {
return redisAPI.hgetall(arg0);
}
@Override
public Response hgetallAndAwait(String arg0) {
return redisAPI.hgetallAndAwait(arg0);
}
@Override
public Uni<Response> hincrby(String arg0, String arg1, String arg2) {
return redisAPI.hincrby(arg0, arg1, arg2);
}
@Override
public Response hincrbyAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.hincrbyAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> hincrbyfloat(String arg0, String arg1, String arg2) {
return redisAPI.hincrbyfloat(arg0, arg1, arg2);
}
@Override
public Response hincrbyfloatAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.hincrbyfloatAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> hkeys(String arg0) {
return redisAPI.hkeys(arg0);
}
@Override
public Response hkeysAndAwait(String arg0) {
return redisAPI.hkeysAndAwait(arg0);
}
@Override
public Uni<Response> hlen(String arg0) {
return redisAPI.hlen(arg0);
}
@Override
public Response hlenAndAwait(String arg0) {
return redisAPI.hlenAndAwait(arg0);
}
@Override
public Uni<Response> hmget(List<String> args) {
return redisAPI.hmget(args);
}
@Override
public Response hmgetAndAwait(List<String> args) {
return redisAPI.hmgetAndAwait(args);
}
@Override
public Uni<Response> hmset(List<String> args) {
return redisAPI.hmset(args);
}
@Override
public Response hmsetAndAwait(List<String> args) {
return redisAPI.hmsetAndAwait(args);
}
@Override
public Uni<Response> host(List<String> args) {
return redis.send(Request.cmd(Command.create("host"), args.toArray(new String[0])));
}
@Override
public Response hostAndAwait(List<String> args) {
return host(args).await().indefinitely();
}
@Override
public Uni<Response> hscan(List<String> args) {
return redisAPI.hscan(args);
}
@Override
public Response hscanAndAwait(List<String> args) {
return redisAPI.hscanAndAwait(args);
}
@Override
public Uni<Response> hset(List<String> args) {
return redisAPI.hset(args);
}
@Override
public Response hsetAndAwait(List<String> args) {
return redisAPI.hsetAndAwait(args);
}
@Override
public Uni<Response> hsetnx(String arg0, String arg1, String arg2) {
return redisAPI.hsetnx(arg0, arg1, arg2);
}
@Override
public Response hsetnxAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.hsetnxAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> hstrlen(String arg0, String arg1) {
return redisAPI.hstrlen(arg0, arg1);
}
@Override
public Response hstrlenAndAwait(String arg0, String arg1) {
return redisAPI.hstrlenAndAwait(arg0, arg1);
}
@Override
public Uni<Response> hvals(String arg0) {
return redisAPI.hvals(arg0);
}
@Override
public Response hvalsAndAwait(String arg0) {
return redisAPI.hvalsAndAwait(arg0);
}
@Override
public Uni<Response> incr(String arg0) {
return redisAPI.incr(arg0);
}
@Override
public Response incrAndAwait(String arg0) {
return redisAPI.incrAndAwait(arg0);
}
@Override
public Uni<Response> incrby(String arg0, String arg1) {
return redisAPI.incrby(arg0, arg1);
}
@Override
public Response incrbyAndAwait(String arg0, String arg1) {
return redisAPI.incrbyAndAwait(arg0, arg1);
}
@Override
public Uni<Response> incrbyfloat(String arg0, String arg1) {
return redisAPI.incrbyfloat(arg0, arg1);
}
@Override
public Response incrbyfloatAndAwait(String arg0, String arg1) {
return redisAPI.incrbyfloatAndAwait(arg0, arg1);
}
@Override
public Uni<Response> info(List<String> args) {
return redisAPI.info(args);
}
@Override
public Response infoAndAwait(List<String> args) {
return redisAPI.infoAndAwait(args);
}
@Override
public Uni<Response> keys(String arg0) {
return redisAPI.keys(arg0);
}
@Override
public Response keysAndAwait(String arg0) {
return redisAPI.keysAndAwait(arg0);
}
@Override
public Uni<Response> lastsave() {
return redisAPI.lastsave();
}
@Override
public Response lastsaveAndAwait() {
return redisAPI.lastsaveAndAwait();
}
@Override
public Uni<Response> latency(List<String> args) {
return redisAPI.latency(args);
}
@Override
public Response latencyAndAwait(List<String> args) {
return redisAPI.latencyAndAwait(args);
}
@Override
public Uni<Response> lindex(String arg0, String arg1) {
return redisAPI.lindex(arg0, arg1);
}
@Override
public Response lindexAndAwait(String arg0, String arg1) {
return redisAPI.lindexAndAwait(arg0, arg1);
}
@Override
public Uni<Response> linsert(String arg0, String arg1, String arg2, String arg3) {
return redisAPI.linsert(arg0, arg1, arg2, arg3);
}
@Override
public Response linsertAndAwait(String arg0, String arg1, String arg2, String arg3) {
return redisAPI.linsertAndAwait(arg0, arg1, arg2, arg3);
}
@Override
public Uni<Response> llen(String arg0) {
return redisAPI.llen(arg0);
}
@Override
public Response llenAndAwait(String arg0) {
return redisAPI.llenAndAwait(arg0);
}
@Override
public Uni<Response> lolwut(List<String> args) {
return redisAPI.lolwut(args);
}
@Override
public Response lolwutAndAwait(List<String> args) {
return redisAPI.lolwutAndAwait(args);
}
@Override
public Uni<Response> lpop(String arg0) {
return redisAPI.lpop(List.of(arg0));
}
@Override
public Uni<Response> lpop(List<String> arg0) {
return redisAPI.lpop(arg0);
}
@Override
public Response lpopAndAwait(String arg0) {
return redisAPI.lpopAndAwait(List.of(arg0));
}
@Override
public Response lpopAndAwait(List<String> arg0) {
return redisAPI.lpopAndAwait(arg0);
}
@Override
public Uni<Response> lpush(List<String> args) {
return redisAPI.lpush(args);
}
@Override
public Response lpushAndAwait(List<String> args) {
return redisAPI.lpushAndAwait(args);
}
@Override
public Uni<Response> lpushx(List<String> args) {
return redisAPI.lpushx(args);
}
@Override
public Response lpushxAndAwait(List<String> args) {
return redisAPI.lpushxAndAwait(args);
}
@Override
public Uni<Response> lrange(String arg0, String arg1, String arg2) {
return redisAPI.lrange(arg0, arg1, arg2);
}
@Override
public Response lrangeAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.lrangeAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> lrem(String arg0, String arg1, String arg2) {
return redisAPI.lrem(arg0, arg1, arg2);
}
@Override
public Response lremAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.lremAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> lset(String arg0, String arg1, String arg2) {
return redisAPI.lset(arg0, arg1, arg2);
}
@Override
public Response lsetAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.lsetAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> ltrim(String arg0, String arg1, String arg2) {
return redisAPI.ltrim(arg0, arg1, arg2);
}
@Override
public Response ltrimAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.ltrimAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> memory(List<String> args) {
return redisAPI.memory(args);
}
@Override
public Response memoryAndAwait(List<String> args) {
return redisAPI.memoryAndAwait(args);
}
@Override
public Uni<Response> mget(List<String> args) {
return redisAPI.mget(args);
}
@Override
public Response mgetAndAwait(List<String> args) {
return redisAPI.mgetAndAwait(args);
}
@Override
public Uni<Response> migrate(List<String> args) {
return redisAPI.migrate(args);
}
@Override
public Response migrateAndAwait(List<String> args) {
return redisAPI.migrateAndAwait(args);
}
@Override
public Uni<Response> module(List<String> args) {
return redisAPI.module(args);
}
@Override
public Response moduleAndAwait(List<String> args) {
return redisAPI.moduleAndAwait(args);
}
@Override
public Uni<Response> monitor() {
return redisAPI.monitor();
}
@Override
public Response monitorAndAwait() {
return redisAPI.monitorAndAwait();
}
@Override
public Uni<Response> move(String arg0, String arg1) {
return redisAPI.move(arg0, arg1);
}
@Override
public Response moveAndAwait(String arg0, String arg1) {
return redisAPI.moveAndAwait(arg0, arg1);
}
@Override
public Uni<Response> mset(List<String> args) {
return redisAPI.mset(args);
}
@Override
public Response msetAndAwait(List<String> args) {
return redisAPI.msetAndAwait(args);
}
@Override
public Uni<Response> msetnx(List<String> args) {
return redisAPI.msetnx(args);
}
@Override
public Response msetnxAndAwait(List<String> args) {
return redisAPI.msetnxAndAwait(args);
}
@Override
public Uni<Response> multi() {
return redisAPI.multi();
}
@Override
public Response multiAndAwait() {
return redisAPI.multiAndAwait();
}
@Override
public Uni<Response> object(List<String> args) {
return redisAPI.object(args);
}
@Override
public Response objectAndAwait(List<String> args) {
return redisAPI.objectAndAwait(args);
}
@Override
public Uni<Response> persist(String arg0) {
return redisAPI.persist(arg0);
}
@Override
public Response persistAndAwait(String arg0) {
return redisAPI.persistAndAwait(arg0);
}
@Override
public Uni<Response> pexpire(String arg0, String arg1) {
return redisAPI.pexpire(List.of(arg0, arg1));
}
@Override
public Response pexpireAndAwait(String arg0, String arg1) {
return redisAPI.pexpireAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> pexpireat(String arg0, String arg1) {
return redisAPI.pexpireat(List.of(arg0, arg1));
}
@Override
public Response pexpireatAndAwait(String arg0, String arg1) {
return redisAPI.pexpireatAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> pfadd(List<String> args) {
return redisAPI.pfadd(args);
}
@Override
public Response pfaddAndAwait(List<String> args) {
return redisAPI.pfaddAndAwait(args);
}
@Override
public Uni<Response> pfcount(List<String> args) {
return redisAPI.pfcount(args);
}
@Override
public Response pfcountAndAwait(List<String> args) {
return redisAPI.pfcountAndAwait(args);
}
@Override
public Uni<Response> pfdebug(List<String> args) {
return redisAPI.pfdebug(args.get(0), args.get(1));
}
@Override
public Response pfdebugAndAwait(List<String> args) {
return redisAPI.pfdebugAndAwait(args.get(0), args.get(1));
}
@Override
public Uni<Response> pfdebug(String command, String key) {
return redisAPI.pfdebug(command, key);
}
@Override
public Response pfdebugAndAwait(String command, String key) {
return redisAPI.pfdebugAndAwait(command, key);
}
@Override
public Uni<Response> pfmerge(List<String> args) {
return redisAPI.pfmerge(args);
}
@Override
public Response pfmergeAndAwait(List<String> args) {
return redisAPI.pfmergeAndAwait(args);
}
@Override
public Uni<Response> pfselftest() {
return redisAPI.pfselftest();
}
@Override
public Response pfselftestAndAwait() {
return redisAPI.pfselftestAndAwait();
}
@Override
public Uni<Response> ping(List<String> args) {
return redisAPI.ping(args);
}
@Override
public Response pingAndAwait(List<String> args) {
return redisAPI.pingAndAwait(args);
}
@Override
public Uni<Response> post(List<String> args) {
return redis.send(Request.cmd(Command.create("post"), args.toArray(new String[0])));
}
@Override
public Response postAndAwait(List<String> args) {
return post(args).await().indefinitely();
}
@Override
public Uni<Response> psetex(String arg0, String arg1, String arg2) {
return redisAPI.psetex(arg0, arg1, arg2);
}
@Override
public Response psetexAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.psetexAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> psubscribe(List<String> args) {
return redisAPI.psubscribe(args);
}
@Override
public Response psubscribeAndAwait(List<String> args) {
return redisAPI.psubscribeAndAwait(args);
}
@Override
public Uni<Response> psync(String arg0, String arg1) {
return redisAPI.psync(List.of(arg0, arg1));
}
@Override
public Uni<Response> psync(List<String> args) {
return redisAPI.psync(args);
}
@Override
public Response psyncAndAwait(List<String> args) {
return redisAPI.psyncAndAwait(args);
}
@Override
public Response psyncAndAwait(String arg0, String arg1) {
return redisAPI.psyncAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> pttl(String arg0) {
return redisAPI.pttl(arg0);
}
@Override
public Response pttlAndAwait(String arg0) {
return redisAPI.pttlAndAwait(arg0);
}
@Override
public Uni<Response> publish(String arg0, String arg1) {
return redisAPI.publish(arg0, arg1);
}
@Override
public Response publishAndAwait(String arg0, String arg1) {
return redisAPI.publishAndAwait(arg0, arg1);
}
@Override
public Uni<Response> pubsub(List<String> args) {
return redisAPI.pubsub(args);
}
@Override
public Response pubsubAndAwait(List<String> args) {
return redisAPI.pubsubAndAwait(args);
}
@Override
public Uni<Response> punsubscribe(List<String> args) {
return redisAPI.punsubscribe(args);
}
@Override
public Response punsubscribeAndAwait(List<String> args) {
return redisAPI.punsubscribeAndAwait(args);
}
@Override
public Uni<Response> randomkey() {
return redisAPI.randomkey();
}
@Override
public Response randomkeyAndAwait() {
return redisAPI.randomkeyAndAwait();
}
@Override
public Uni<Response> readonly() {
return redisAPI.readonly();
}
@Override
public Response readonlyAndAwait() {
return redisAPI.readonlyAndAwait();
}
@Override
public Uni<Response> readwrite() {
return redisAPI.readwrite();
}
@Override
public Response readwriteAndAwait() {
return redisAPI.readwriteAndAwait();
}
@Override
public Uni<Response> rename(String arg0, String arg1) {
return redisAPI.rename(arg0, arg1);
}
@Override
public Response renameAndAwait(String arg0, String arg1) {
return redisAPI.renameAndAwait(arg0, arg1);
}
@Override
public Uni<Response> renamenx(String arg0, String arg1) {
return redisAPI.renamenx(arg0, arg1);
}
@Override
public Response renamenxAndAwait(String arg0, String arg1) {
return redisAPI.renamenxAndAwait(arg0, arg1);
}
@Override
public Uni<Response> replconf(List<String> args) {
return redisAPI.replconf(args);
}
@Override
public Response replconfAndAwait(List<String> args) {
return redisAPI.replconfAndAwait(args);
}
@Override
public Uni<Response> replicaof(String arg0, String arg1) {
return redisAPI.replicaof(arg0, arg1);
}
@Override
public Response replicaofAndAwait(String arg0, String arg1) {
return redisAPI.replicaofAndAwait(arg0, arg1);
}
@Override
public Uni<Response> restore(List<String> args) {
return redisAPI.restore(args);
}
@Override
public Response restoreAndAwait(List<String> args) {
return redisAPI.restoreAndAwait(args);
}
@Override
public Uni<Response> restoreAsking(List<String> args) {
return redisAPI.restoreAsking(args);
}
@Override
public Response restoreAskingAndAwait(List<String> args) {
return redisAPI.restoreAskingAndAwait(args);
}
@Override
public Uni<Response> role() {
return redisAPI.role();
}
@Override
public Response roleAndAwait() {
return redisAPI.roleAndAwait();
}
@Override
public Uni<Response> rpop(String arg0) {
return redisAPI.rpop(List.of(arg0));
}
@Override
public Uni<Response> rpop(List<String> args) {
return redisAPI.rpop(args);
}
@Override
public Response rpopAndAwait(List<String> args) {
return redisAPI.rpopAndAwait(args);
}
@Override
public Response rpopAndAwait(String arg0) {
return redisAPI.rpopAndAwait(List.of(arg0));
}
@Override
public Uni<Response> rpoplpush(String arg0, String arg1) {
return redisAPI.rpoplpush(arg0, arg1);
}
@Override
public Response rpoplpushAndAwait(String arg0, String arg1) {
return redisAPI.rpoplpushAndAwait(arg0, arg1);
}
@Override
public Uni<Response> rpush(List<String> args) {
return redisAPI.rpush(args);
}
@Override
public Response rpushAndAwait(List<String> args) {
return redisAPI.rpushAndAwait(args);
}
@Override
public Uni<Response> rpushx(List<String> args) {
return redisAPI.rpushx(args);
}
@Override
public Response rpushxAndAwait(List<String> args) {
return redisAPI.rpushxAndAwait(args);
}
@Override
public Uni<Response> sadd(List<String> args) {
return redisAPI.sadd(args);
}
@Override
public Response saddAndAwait(List<String> args) {
return redisAPI.saddAndAwait(args);
}
@Override
public Uni<Response> save() {
return redisAPI.save();
}
@Override
public Response saveAndAwait() {
return redisAPI.saveAndAwait();
}
@Override
public Uni<Response> scan(List<String> args) {
return redisAPI.scan(args);
}
@Override
public Response scanAndAwait(List<String> args) {
return redisAPI.scanAndAwait(args);
}
@Override
public Uni<Response> scard(String arg0) {
return redisAPI.scard(arg0);
}
@Override
public Response scardAndAwait(String arg0) {
return redisAPI.scardAndAwait(arg0);
}
@Override
public Uni<Response> script(List<String> args) {
return redisAPI.script(args);
}
@Override
public Response scriptAndAwait(List<String> args) {
return redisAPI.scriptAndAwait(args);
}
@Override
public Uni<Response> sdiff(List<String> args) {
return redisAPI.sdiff(args);
}
@Override
public Response sdiffAndAwait(List<String> args) {
return redisAPI.sdiffAndAwait(args);
}
@Override
public Uni<Response> sdiffstore(List<String> args) {
return redisAPI.sdiffstore(args);
}
@Override
public Response sdiffstoreAndAwait(List<String> args) {
return redisAPI.sdiffstoreAndAwait(args);
}
@Override
public Uni<Response> select(String arg0) {
return redisAPI.select(arg0);
}
@Override
public Response selectAndAwait(String arg0) {
return redisAPI.selectAndAwait(arg0);
}
@Override
public Uni<Response> set(List<String> args) {
return redisAPI.set(args);
}
@Override
public Response setAndAwait(List<String> args) {
return redisAPI.setAndAwait(args);
}
@Override
public Uni<Response> setbit(String arg0, String arg1, String arg2) {
return redisAPI.setbit(arg0, arg1, arg2);
}
@Override
public Response setbitAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.setbitAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> setex(String arg0, String arg1, String arg2) {
return redisAPI.setex(arg0, arg1, arg2);
}
@Override
public Response setexAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.setexAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> setnx(String arg0, String arg1) {
return redisAPI.setnx(arg0, arg1);
}
@Override
public Response setnxAndAwait(String arg0, String arg1) {
return redisAPI.setnxAndAwait(arg0, arg1);
}
@Override
public Uni<Response> setrange(String arg0, String arg1, String arg2) {
return redisAPI.setrange(arg0, arg1, arg2);
}
@Override
public Response setrangeAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.setrangeAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> shutdown(List<String> args) {
return redisAPI.shutdown(args);
}
@Override
public Response shutdownAndAwait(List<String> args) {
return redisAPI.shutdownAndAwait(args);
}
@Override
public Uni<Response> sinter(List<String> args) {
return redisAPI.sinter(args);
}
@Override
public Response sinterAndAwait(List<String> args) {
return redisAPI.sinterAndAwait(args);
}
@Override
public Uni<Response> sinterstore(List<String> args) {
return redisAPI.sinterstore(args);
}
@Override
public Response sinterstoreAndAwait(List<String> args) {
return redisAPI.sinterstoreAndAwait(args);
}
@Override
public Uni<Response> sismember(String arg0, String arg1) {
return redisAPI.sismember(arg0, arg1);
}
@Override
public Response sismemberAndAwait(String arg0, String arg1) {
return redisAPI.sismemberAndAwait(arg0, arg1);
}
@Override
public Uni<Response> slaveof(String arg0, String arg1) {
return redisAPI.slaveof(arg0, arg1);
}
@Override
public Response slaveofAndAwait(String arg0, String arg1) {
return redisAPI.slaveofAndAwait(arg0, arg1);
}
@Override
public Uni<Response> slowlog(List<String> args) {
return redisAPI.slowlog(args);
}
@Override
public Response slowlogAndAwait(List<String> args) {
return redisAPI.slowlogAndAwait(args);
}
@Override
public Uni<Response> smembers(String arg0) {
return redisAPI.smembers(arg0);
}
@Override
public Response smembersAndAwait(String arg0) {
return redisAPI.smembersAndAwait(arg0);
}
@Override
public Uni<Response> smove(String arg0, String arg1, String arg2) {
return redisAPI.smove(arg0, arg1, arg2);
}
@Override
public Response smoveAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.smoveAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> sort(List<String> args) {
return redisAPI.sort(args);
}
@Override
public Response sortAndAwait(List<String> args) {
return redisAPI.sortAndAwait(args);
}
@Override
public Uni<Response> spop(List<String> args) {
return redisAPI.spop(args);
}
@Override
public Response spopAndAwait(List<String> args) {
return redisAPI.spopAndAwait(args);
}
@Override
public Uni<Response> srandmember(List<String> args) {
return redisAPI.srandmember(args);
}
@Override
public Response srandmemberAndAwait(List<String> args) {
return redisAPI.srandmemberAndAwait(args);
}
@Override
public Uni<Response> srem(List<String> args) {
return redisAPI.srem(args);
}
@Override
public Response sremAndAwait(List<String> args) {
return redisAPI.sremAndAwait(args);
}
@Override
public Uni<Response> sscan(List<String> args) {
return redisAPI.sscan(args);
}
@Override
public Response sscanAndAwait(List<String> args) {
return redisAPI.sscanAndAwait(args);
}
@Override
public Uni<Response> strlen(String arg0) {
return redisAPI.strlen(arg0);
}
@Override
public Response strlenAndAwait(String arg0) {
return redisAPI.strlenAndAwait(arg0);
}
@Override
public Uni<Response> subscribe(List<String> args) {
return redisAPI.subscribe(args);
}
@Override
public Response subscribeAndAwait(List<String> args) {
return redisAPI.subscribeAndAwait(args);
}
@Override
public Uni<Response> substr(String arg0, String arg1, String arg2) {
return redisAPI.substr(arg0, arg1, arg2);
}
@Override
public Response substrAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.substrAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> sunion(List<String> args) {
return redisAPI.sunion(args);
}
@Override
public Response sunionAndAwait(List<String> args) {
return redisAPI.sunionAndAwait(args);
}
@Override
public Uni<Response> sunionstore(List<String> args) {
return redisAPI.sunionstore(args);
}
@Override
public Response sunionstoreAndAwait(List<String> args) {
return redisAPI.sunionstoreAndAwait(args);
}
@Override
public Uni<Response> swapdb(String arg0, String arg1) {
return redisAPI.swapdb(arg0, arg1);
}
@Override
public Response swapdbAndAwait(String arg0, String arg1) {
return redisAPI.swapdbAndAwait(arg0, arg1);
}
@Override
public Uni<Response> sync() {
return redisAPI.sync();
}
@Override
public Response syncAndAwait() {
return redisAPI.syncAndAwait();
}
@Override
public Uni<Response> time() {
return redisAPI.time();
}
@Override
public Response timeAndAwait() {
return redisAPI.timeAndAwait();
}
@Override
public Uni<Response> touch(List<String> args) {
return redisAPI.touch(args);
}
@Override
public Response touchAndAwait(List<String> args) {
return redisAPI.touchAndAwait(args);
}
@Override
public Uni<Response> ttl(String arg0) {
return redisAPI.ttl(arg0);
}
@Override
public Response ttlAndAwait(String arg0) {
return redisAPI.ttlAndAwait(arg0);
}
@Override
public Uni<Response> type(String arg0) {
return redisAPI.type(arg0);
}
@Override
public Response typeAndAwait(String arg0) {
return redisAPI.typeAndAwait(arg0);
}
@Override
public Uni<Response> unlink(List<String> args) {
return redisAPI.unlink(args);
}
@Override
public Response unlinkAndAwait(List<String> args) {
return redisAPI.unlinkAndAwait(args);
}
@Override
public Uni<Response> unsubscribe(List<String> args) {
return redisAPI.unsubscribe(args);
}
@Override
public Response unsubscribeAndAwait(List<String> args) {
return redisAPI.unsubscribeAndAwait(args);
}
@Override
public Uni<Response> unwatch() {
return redisAPI.unwatch();
}
@Override
public Response unwatchAndAwait() {
return redisAPI.unwatchAndAwait();
}
@Override
public Uni<Response> wait(String arg0, String arg1) {
return redisAPI.wait(arg0, arg1);
}
@Override
public Response waitAndAwait(String arg0, String arg1) {
return redisAPI.waitAndAwait(arg0, arg1);
}
@Override
public Uni<Response> watch(List<String> args) {
return redisAPI.watch(args);
}
@Override
public Response watchAndAwait(List<String> args) {
return redisAPI.watchAndAwait(args);
}
@Override
public Uni<Response> xack(List<String> args) {
return redisAPI.xack(args);
}
@Override
public Response xackAndAwait(List<String> args) {
return redisAPI.xackAndAwait(args);
}
@Override
public Uni<Response> xadd(List<String> args) {
return redisAPI.xadd(args);
}
@Override
public Response xaddAndAwait(List<String> args) {
return redisAPI.xaddAndAwait(args);
}
@Override
public Uni<Response> xclaim(List<String> args) {
return redisAPI.xclaim(args);
}
@Override
public Response xclaimAndAwait(List<String> args) {
return redisAPI.xclaimAndAwait(args);
}
@Override
public Uni<Response> xdel(List<String> args) {
return redisAPI.xdel(args);
}
@Override
public Response xdelAndAwait(List<String> args) {
return redisAPI.xdelAndAwait(args);
}
@Override
public Uni<Response> xgroup(List<String> args) {
return redisAPI.xgroup(args);
}
@Override
public Response xgroupAndAwait(List<String> args) {
return redisAPI.xgroupAndAwait(args);
}
@Override
public Uni<Response> xinfo(List<String> args) {
return redisAPI.xinfo(args);
}
@Override
public Response xinfoAndAwait(List<String> args) {
return redisAPI.xinfoAndAwait(args);
}
@Override
public Uni<Response> xlen(String arg0) {
return redisAPI.xlen(arg0);
}
@Override
public Response xlenAndAwait(String arg0) {
return redisAPI.xlenAndAwait(arg0);
}
@Override
public Uni<Response> xpending(List<String> args) {
return redisAPI.xpending(args);
}
@Override
public Response xpendingAndAwait(List<String> args) {
return redisAPI.xpendingAndAwait(args);
}
@Override
public Uni<Response> xrange(List<String> args) {
return redisAPI.xrange(args);
}
@Override
public Response xrangeAndAwait(List<String> args) {
return redisAPI.xrangeAndAwait(args);
}
@Override
public Uni<Response> xread(List<String> args) {
return redisAPI.xread(args);
}
@Override
public Response xreadAndAwait(List<String> args) {
return redisAPI.xreadAndAwait(args);
}
@Override
public Uni<Response> xreadgroup(List<String> args) {
return redisAPI.xreadgroup(args);
}
@Override
public Response xreadgroupAndAwait(List<String> args) {
return redisAPI.xreadgroupAndAwait(args);
}
@Override
public Uni<Response> xrevrange(List<String> args) {
return redisAPI.xrevrange(args);
}
@Override
public Response xrevrangeAndAwait(List<String> args) {
return redisAPI.xrevrangeAndAwait(args);
}
@Override
public Uni<Response> xsetid(String arg0, String arg1) {
return redisAPI.xsetid(List.of(arg0, arg1));
}
@Override
public Response xsetidAndAwait(String arg0, String arg1) {
return redisAPI.xsetidAndAwait(List.of(arg0, arg1));
}
@Override
public Uni<Response> xtrim(List<String> args) {
return redisAPI.xtrim(args);
}
@Override
public Response xtrimAndAwait(List<String> args) {
return redisAPI.xtrimAndAwait(args);
}
@Override
public Uni<Response> zadd(List<String> args) {
return redisAPI.zadd(args);
}
@Override
public Response zaddAndAwait(List<String> args) {
return redisAPI.zaddAndAwait(args);
}
@Override
public Uni<Response> zcard(String arg0) {
return redisAPI.zcard(arg0);
}
@Override
public Response zcardAndAwait(String arg0) {
return redisAPI.zcardAndAwait(arg0);
}
@Override
public Uni<Response> zcount(String arg0, String arg1, String arg2) {
return redisAPI.zcount(arg0, arg1, arg2);
}
@Override
public Response zcountAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zcountAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zincrby(String arg0, String arg1, String arg2) {
return redisAPI.zincrby(arg0, arg1, arg2);
}
@Override
public Response zincrbyAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zincrbyAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zinterstore(List<String> args) {
return redisAPI.zinterstore(args);
}
@Override
public Response zinterstoreAndAwait(List<String> args) {
return redisAPI.zinterstoreAndAwait(args);
}
@Override
public Uni<Response> zlexcount(String arg0, String arg1, String arg2) {
return redisAPI.zlexcount(arg0, arg1, arg2);
}
@Override
public Response zlexcountAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zlexcountAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zpopmax(List<String> args) {
return redisAPI.zpopmax(args);
}
@Override
public Response zpopmaxAndAwait(List<String> args) {
return redisAPI.zpopmaxAndAwait(args);
}
@Override
public Uni<Response> zpopmin(List<String> args) {
return redisAPI.zpopmin(args);
}
@Override
public Response zpopminAndAwait(List<String> args) {
return redisAPI.zpopminAndAwait(args);
}
@Override
public Uni<Response> zrange(List<String> args) {
return redisAPI.zrange(args);
}
@Override
public Response zrangeAndAwait(List<String> args) {
return redisAPI.zrangeAndAwait(args);
}
@Override
public Uni<Response> zrangebylex(List<String> args) {
return redisAPI.zrangebylex(args);
}
@Override
public Response zrangebylexAndAwait(List<String> args) {
return redisAPI.zrangebylexAndAwait(args);
}
@Override
public Uni<Response> zrangebyscore(List<String> args) {
return redisAPI.zrangebyscore(args);
}
@Override
public Response zrangebyscoreAndAwait(List<String> args) {
return redisAPI.zrangebyscoreAndAwait(args);
}
@Override
public Uni<Response> zrank(String arg0, String arg1) {
return redisAPI.zrank(arg0, arg1);
}
@Override
public Response zrankAndAwait(String arg0, String arg1) {
return redisAPI.zrankAndAwait(arg0, arg1);
}
@Override
public Uni<Response> zrem(List<String> args) {
return redisAPI.zrem(args);
}
@Override
public Response zremAndAwait(List<String> args) {
return redisAPI.zremAndAwait(args);
}
@Override
public Uni<Response> zremrangebylex(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebylex(arg0, arg1, arg2);
}
@Override
public Response zremrangebylexAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebylexAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zremrangebyrank(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebyrank(arg0, arg1, arg2);
}
@Override
public Response zremrangebyrankAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebyrankAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zremrangebyscore(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebyscore(arg0, arg1, arg2);
}
@Override
public Response zremrangebyscoreAndAwait(String arg0, String arg1, String arg2) {
return redisAPI.zremrangebyscoreAndAwait(arg0, arg1, arg2);
}
@Override
public Uni<Response> zrevrange(List<String> args) {
return redisAPI.zrevrange(args);
}
@Override
public Response zrevrangeAndAwait(List<String> args) {
return redisAPI.zrevrangeAndAwait(args);
}
@Override
public Uni<Response> zrevrangebylex(List<String> args) {
return redisAPI.zrevrangebylex(args);
}
@Override
public Response zrevrangebylexAndAwait(List<String> args) {
return redisAPI.zrevrangebylexAndAwait(args);
}
@Override
public Uni<Response> zrevrangebyscore(List<String> args) {
return redisAPI.zrevrangebyscore(args);
}
@Override
public Response zrevrangebyscoreAndAwait(List<String> args) {
return redisAPI.zrevrangebyscoreAndAwait(args);
}
@Override
public Uni<Response> zrevrank(String arg0, String arg1) {
return redisAPI.zrevrank(arg0, arg1);
}
@Override
public Response zrevrankAndAwait(String arg0, String arg1) {
return redisAPI.zrevrankAndAwait(arg0, arg1);
}
@Override
public Uni<Response> zscan(List<String> args) {
return redisAPI.zscan(args);
}
@Override
public Response zscanAndAwait(List<String> args) {
return redisAPI.zscanAndAwait(args);
}
@Override
public Uni<Response> zscore(String arg0, String arg1) {
return redisAPI.zscore(arg0, arg1);
}
@Override
public Response zscoreAndAwait(String arg0, String arg1) {
return redisAPI.zscoreAndAwait(arg0, arg1);
}
@Override
public Uni<Response> zunion(List<String> args) {
return redisAPI.zunion(args);
}
@Override
public Response zunionAndAwait(List<String> args) {
return redisAPI.zunionAndAwait(args);
}
@Override
public Uni<Response> zunionstore(List<String> args) {
return redisAPI.zunionstore(args);
}
@Override
public Response zunionstoreAndAwait(List<String> args) {
return redisAPI.zunionstoreAndAwait(args);
}
}
| ReactiveRedisClientImpl |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/DeltaBatchWrite.java | {
"start": 909,
"end": 1030
} | interface ____ defines how to write a delta of rows during batch processing.
*
* @since 3.4.0
*/
@Experimental
public | that |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/native-agent-integration/src/main/java/org/acme/FruitResource.java | {
"start": 811,
"end": 2662
} | class ____ {
private static final Logger LOGGER = Logger.getLogger(FruitResource.class.getName());
@Inject
EntityManager entityManager;
@GET
public List<Fruit> get() {
return entityManager.createNamedQuery("Fruits.findAll", Fruit.class)
.getResultList();
}
@GET
@Path("{id}")
public Fruit getSingle(Integer id) {
Fruit entity = entityManager.find(Fruit.class, id);
if (entity == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
return entity;
}
@POST
@Transactional
public Response create(Fruit fruit) {
if (fruit.getId() != null) {
throw new WebApplicationException("Id was invalidly set on request.", 422);
}
entityManager.persist(fruit);
return Response.ok(fruit).status(201).build();
}
@PUT
@Path("{id}")
@Transactional
public Fruit update(Integer id, Fruit fruit) {
if (fruit.getName() == null) {
throw new WebApplicationException("Fruit Name was not set on request.", 422);
}
Fruit entity = entityManager.find(Fruit.class, id);
if (entity == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
entity.setName(fruit.getName());
return entity;
}
@DELETE
@Path("{id}")
@Transactional
public Response delete(Integer id) {
Fruit entity = entityManager.getReference(Fruit.class, id);
if (entity == null) {
throw new WebApplicationException("Fruit with id of " + id + " does not exist.", 404);
}
entityManager.remove(entity);
return Response.status(204).build();
}
@Provider
public static | FruitResource |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/MonoDefer.java | {
"start": 1052,
"end": 1868
} | class ____<T> extends Mono<T> implements SourceProducer<T> {
final Supplier<? extends Mono<? extends T>> supplier;
MonoDefer(Supplier<? extends Mono<? extends T>> supplier) {
this.supplier = Objects.requireNonNull(supplier, "supplier");
}
@SuppressWarnings("unchecked")
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
Mono<? extends T> p;
try {
p = Objects.requireNonNull(supplier.get(),
"The Mono returned by the supplier is null");
}
catch (Throwable e) {
Operators.error(actual, Operators.onOperatorError(e, actual.currentContext()));
return;
}
fromDirect(p).subscribe(actual);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return SourceProducer.super.scanUnsafe(key);
}
}
| MonoDefer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.