language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/xml_external_ref/MultipleIncludeTest.java | {
"start": 1344,
"end": 3302
} | class ____ {
@Test
void multipleIncludeXmlConfig() throws Exception {
testMultipleIncludes(getSqlSessionFactoryXmlConfig());
}
@Test
void multipleIncludeJavaConfig() throws Exception {
testMultipleIncludes(getSqlSessionFactoryJavaConfig());
}
private void testMultipleIncludes(SqlSessionFactory sqlSessionFactory) {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
MultipleIncludePersonMapper personMapper = sqlSession.getMapper(MultipleIncludePersonMapper.class);
Person person = personMapper.select(1);
assertEquals((Integer) 1, person.getId());
assertEquals("John", person.getName());
}
}
private SqlSessionFactory getSqlSessionFactoryXmlConfig() throws Exception {
try (Reader configReader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/xml_external_ref/MultipleIncludeMapperConfig.xml")) {
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(configReader);
initDb(sqlSessionFactory);
return sqlSessionFactory;
}
}
private SqlSessionFactory getSqlSessionFactoryJavaConfig() throws Exception {
Configuration configuration = new Configuration();
Environment environment = new Environment("development", new JdbcTransactionFactory(),
new UnpooledDataSource("org.hsqldb.jdbcDriver", "jdbc:hsqldb:mem:xmlextref", null));
configuration.setEnvironment(environment);
configuration.addMapper(MultipleIncludePersonMapper.class);
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(configuration);
initDb(sqlSessionFactory);
return sqlSessionFactory;
}
private static void initDb(SqlSessionFactory sqlSessionFactory) throws IOException, SQLException {
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/xml_external_ref/CreateDB.sql");
}
}
| MultipleIncludeTest |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/utils/AtomicPositiveIntegerTest.java | {
"start": 1283,
"end": 6060
} | class ____ {
private AtomicPositiveInteger i1 = new AtomicPositiveInteger();
private AtomicPositiveInteger i2 = new AtomicPositiveInteger(127);
private AtomicPositiveInteger i3 = new AtomicPositiveInteger(Integer.MAX_VALUE);
@Test
void testGet() {
assertEquals(0, i1.get());
assertEquals(127, i2.get());
assertEquals(Integer.MAX_VALUE, i3.get());
}
@Test
void testSet() {
i1.set(100);
assertEquals(100, i1.get());
try {
i1.set(-1);
fail();
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(), allOf(containsString("new value"), containsString("< 0")));
}
}
@Test
void testGetAndIncrement() {
int get = i1.getAndIncrement();
assertEquals(0, get);
assertEquals(1, i1.get());
get = i2.getAndIncrement();
assertEquals(127, get);
assertEquals(128, i2.get());
get = i3.getAndIncrement();
assertEquals(Integer.MAX_VALUE, get);
assertEquals(0, i3.get());
}
@Test
void testGetAndDecrement() {
int get = i1.getAndDecrement();
assertEquals(0, get);
assertEquals(Integer.MAX_VALUE, i1.get());
get = i2.getAndDecrement();
assertEquals(127, get);
assertEquals(126, i2.get());
get = i3.getAndDecrement();
assertEquals(Integer.MAX_VALUE, get);
assertEquals(Integer.MAX_VALUE - 1, i3.get());
}
@Test
void testIncrementAndGet() {
int get = i1.incrementAndGet();
assertEquals(1, get);
assertEquals(1, i1.get());
get = i2.incrementAndGet();
assertEquals(128, get);
assertEquals(128, i2.get());
get = i3.incrementAndGet();
assertEquals(0, get);
assertEquals(0, i3.get());
}
@Test
void testDecrementAndGet() {
int get = i1.decrementAndGet();
assertEquals(Integer.MAX_VALUE, get);
assertEquals(Integer.MAX_VALUE, i1.get());
get = i2.decrementAndGet();
assertEquals(126, get);
assertEquals(126, i2.get());
get = i3.decrementAndGet();
assertEquals(Integer.MAX_VALUE - 1, get);
assertEquals(Integer.MAX_VALUE - 1, i3.get());
}
@Test
void testGetAndSet() {
int get = i1.getAndSet(100);
assertEquals(0, get);
assertEquals(100, i1.get());
try {
i1.getAndSet(-1);
} catch (IllegalArgumentException expected) {
assertThat(expected.getMessage(), allOf(containsString("new value"), containsString("< 0")));
}
}
@Test
void testGetAndAnd() {
int get = i1.getAndAdd(3);
assertEquals(0, get);
assertEquals(3, i1.get());
get = i2.getAndAdd(3);
assertEquals(127, get);
assertEquals(127 + 3, i2.get());
get = i3.getAndAdd(3);
assertEquals(Integer.MAX_VALUE, get);
assertEquals(2, i3.get());
}
@Test
void testAddAndGet() {
int get = i1.addAndGet(3);
assertEquals(3, get);
assertEquals(3, i1.get());
get = i2.addAndGet(3);
assertEquals(127 + 3, get);
assertEquals(127 + 3, i2.get());
get = i3.addAndGet(3);
assertEquals(2, get);
assertEquals(2, i3.get());
}
@Test
void testCompareAndSet1() {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
i1.compareAndSet(i1.get(), -1);
});
}
@Test
void testCompareAndSet2() {
assertThat(i1.compareAndSet(i1.get(), 2), is(true));
assertThat(i1.get(), is(2));
}
@Test
void testWeakCompareAndSet1() {
Assertions.assertThrows(IllegalArgumentException.class, () -> {
i1.weakCompareAndSet(i1.get(), -1);
});
}
@Test
void testWeakCompareAndSet2() {
assertThat(i1.weakCompareAndSet(i1.get(), 2), is(true));
assertThat(i1.get(), is(2));
}
@Test
void testValues() {
Integer i = i1.get();
assertThat(i1.byteValue(), equalTo(i.byteValue()));
assertThat(i1.shortValue(), equalTo(i.shortValue()));
assertThat(i1.intValue(), equalTo(i.intValue()));
assertThat(i1.longValue(), equalTo(i.longValue()));
assertThat(i1.floatValue(), equalTo(i.floatValue()));
assertThat(i1.doubleValue(), equalTo(i.doubleValue()));
assertThat(i1.toString(), equalTo(i.toString()));
}
@Test
void testEquals() {
assertEquals(new AtomicPositiveInteger(), new AtomicPositiveInteger());
assertEquals(new AtomicPositiveInteger(1), new AtomicPositiveInteger(1));
}
}
| AtomicPositiveIntegerTest |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/rolling/RollingFileAppenderLayoutTest.java | {
"start": 1092,
"end": 1729
} | class ____ {
@Test
void testDefaultLayout() {
// @formatter:off
assertNotNull(RollingFileAppender.newBuilder()
.setName(RollingFileAppenderLayoutTest.class.getName())
.setConfiguration(new DefaultConfiguration())
.setFileName("log.txt")
.setFilePattern("FilePattern")
.setPolicy(OnStartupTriggeringPolicy.createPolicy(1))
.setCreateOnDemand(true) // no need to clutter up test folder with another file
.build()
.getLayout());
// @formatter:on
}
}
| RollingFileAppenderLayoutTest |
java | resilience4j__resilience4j | resilience4j-framework-common/src/main/java/io/github/resilience4j/common/ratelimiter/configuration/CommonRateLimiterConfigurationProperties.java | {
"start": 1387,
"end": 5717
} | class ____ extends CommonProperties {
private static final String DEFAULT = "default";
private Map<String, InstanceProperties> instances = new HashMap<>();
private Map<String, InstanceProperties> configs = new HashMap<>();
public Optional<InstanceProperties> findRateLimiterProperties(String name) {
InstanceProperties instanceProperties = instances.get(name);
if (instanceProperties == null) {
instanceProperties = configs.get(DEFAULT);
} else if (configs.get(DEFAULT) != null) {
ConfigUtils.mergePropertiesIfAny(configs.get(DEFAULT), instanceProperties);
}
return Optional.ofNullable(instanceProperties);
}
public RateLimiterConfig createRateLimiterConfig(
@Nullable InstanceProperties instanceProperties,
CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer,
String instanceName) {
RateLimiterConfig baseConfig = null;
if (instanceProperties != null && StringUtils.isNotEmpty(instanceProperties.getBaseConfig())) {
InstanceProperties baseProperties = configs.get(instanceProperties.getBaseConfig());
if (baseProperties == null) {
throw new ConfigurationNotFoundException(instanceProperties.getBaseConfig());
}
ConfigUtils.mergePropertiesIfAny(baseProperties, instanceProperties);
baseConfig = createRateLimiterConfig(baseProperties, compositeRateLimiterCustomizer, instanceProperties.getBaseConfig());
} else if (!instanceName.equals(DEFAULT) && configs.get(DEFAULT) != null) {
if (instanceProperties != null) {
ConfigUtils.mergePropertiesIfAny(configs.get(DEFAULT), instanceProperties);
}
baseConfig = createRateLimiterConfig(configs.get(DEFAULT), compositeRateLimiterCustomizer, DEFAULT);
}
return buildConfig(baseConfig != null ? from(baseConfig) : custom(), instanceProperties, compositeRateLimiterCustomizer, instanceName);
}
private RateLimiterConfig buildConfig(RateLimiterConfig.Builder builder,
@Nullable InstanceProperties instanceProperties,
CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer,
String instanceName) {
if (instanceProperties != null) {
if (instanceProperties.getLimitForPeriod() != null) {
builder.limitForPeriod(instanceProperties.getLimitForPeriod());
}
if (instanceProperties.getLimitRefreshPeriod() != null) {
builder.limitRefreshPeriod(instanceProperties.getLimitRefreshPeriod());
}
if (instanceProperties.getTimeoutDuration() != null) {
builder.timeoutDuration(instanceProperties.getTimeoutDuration());
}
if (instanceProperties.getWritableStackTraceEnabled() != null) {
builder.writableStackTraceEnabled(instanceProperties.getWritableStackTraceEnabled());
}
}
compositeRateLimiterCustomizer.getCustomizer(instanceName).ifPresent(
rateLimiterConfigCustomizer -> rateLimiterConfigCustomizer.customize(builder));
return builder.build();
}
private InstanceProperties getLimiterProperties(String limiter) {
return instances.get(limiter);
}
public RateLimiterConfig createRateLimiterConfig(String limiter,
CompositeCustomizer<RateLimiterConfigCustomizer> compositeRateLimiterCustomizer) {
return createRateLimiterConfig(getLimiterProperties(limiter),
compositeRateLimiterCustomizer, limiter);
}
@Nullable
public InstanceProperties getInstanceProperties(String instance) {
return instances.get(instance);
}
public Map<String, InstanceProperties> getInstances() {
return instances;
}
/**
* For backwards compatibility when setting limiters in configuration properties.
*/
public Map<String, InstanceProperties> getLimiters() {
return instances;
}
public Map<String, InstanceProperties> getConfigs() {
return configs;
}
/**
* Class storing property values for configuring {@link RateLimiterConfig} instances.
*/
public static | CommonRateLimiterConfigurationProperties |
java | apache__spark | launcher/src/main/java/org/apache/spark/launcher/Main.java | {
"start": 6295,
"end": 6436
} | class ____ user wanted to invoke, since that may require special
* usage strings (handled by SparkSubmitArguments).
*/
private static | the |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/ValueExtractorForInt.java | {
"start": 1566,
"end": 2144
} | class ____ extends ValueExtractorForInt {
private final IntVector vector;
ForVector(TopNEncoder encoder, boolean inKey, IntVector vector) {
super(encoder, inKey);
this.vector = vector;
}
@Override
public void writeValue(BreakingBytesRefBuilder values, int position) {
writeCount(values, 1);
if (inKey) {
// will read results from the key
return;
}
actualWriteValue(values, vector.getInt(position));
}
}
static | ForVector |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleToFlowable.java | {
"start": 1398,
"end": 2291
} | class ____<T> extends DeferredScalarSubscription<T>
implements SingleObserver<T> {
private static final long serialVersionUID = 187782011903685568L;
Disposable upstream;
SingleToFlowableObserver(Subscriber<? super T> downstream) {
super(downstream);
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T value) {
complete(value);
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
}
@Override
public void cancel() {
super.cancel();
upstream.dispose();
}
}
}
| SingleToFlowableObserver |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/config/OneRouteRefOnExceptionAndDLCErrorHandlerTest.java | {
"start": 1086,
"end": 2180
} | class ____ extends SpringTestSupport {
@Test
public void testOneRouteRefNoOnExceptionAndDLCErrorHandler() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:dead").expectedMessageCount(1);
getMockEndpoint("mock:handled").expectedMessageCount(0);
template.sendBody("direct:foo", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testOneRouteRefOnExceptionAndDLCErrorHandler() throws Exception {
getMockEndpoint("mock:bar").expectedMessageCount(1);
getMockEndpoint("mock:dead").expectedMessageCount(0);
getMockEndpoint("mock:handled").expectedMessageCount(1);
template.sendBody("direct:bar", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/spring/config/OneRouteRefOnExceptionAndDLCErrorHandler.xml");
}
}
| OneRouteRefOnExceptionAndDLCErrorHandlerTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java | {
"start": 1320,
"end": 3112
} | class ____
extends ObjectArrayAssertBaseTest {
private ObjectArrays arraysBefore;
private RecursiveComparisonConfiguration recursiveComparisonConfiguration = new RecursiveComparisonConfiguration();
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.usingRecursiveFieldByFieldElementComparator(recursiveComparisonConfiguration);
}
@Override
protected void verify_internal_effects() {
then(arraysBefore).isNotSameAs(getArrays(assertions));
then(getArrays(assertions).getComparisonStrategy()).isInstanceOf(ComparatorBasedComparisonStrategy.class);
then(getObjects(assertions).getComparisonStrategy()).isInstanceOf(ObjectArrayElementComparisonStrategy.class);
ConfigurableRecursiveFieldByFieldComparator expectedComparator = new ConfigurableRecursiveFieldByFieldComparator(recursiveComparisonConfiguration);
then(getArrays(assertions).getComparator()).isEqualTo(expectedComparator);
then(getObjects(assertions).getComparisonStrategy()).extracting("elementComparator").isEqualTo(expectedComparator);
}
@Test
void should_be_able_to_use_specific_RecursiveComparisonConfiguration_when_using_recursive_field_by_field_element_comparator() {
// GIVEN
Foo actual = new Foo("1", new Bar(1));
Foo other = new Foo("2", new Bar(1));
RecursiveComparisonConfiguration configuration = new RecursiveComparisonConfiguration();
configuration.ignoreFields("id");
// WHEN/THEN
then(array(actual)).usingRecursiveFieldByFieldElementComparator(configuration)
.contains(other);
}
public static | ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftErrorTests.java | {
"start": 799,
"end": 1491
} | class ____ extends ErrorsForCasesWithoutExamplesTestCase {
@Override
protected List<TestCaseSupplier> cases() {
return paramsToSuppliers(LeftTests.parameters());
}
@Override
protected Expression build(Source source, List<Expression> args) {
return new Left(source, args.get(0), args.get(1));
}
@Override
protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) {
return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) {
case 0 -> "string";
case 1 -> "integer";
default -> "";
}));
}
}
| LeftErrorTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e4/c/DerivedIdentitySimpleParentSimpleDepSecondPassOrderingTest.java | {
"start": 7213,
"end": 7843
} | class ____ implements Serializable {
@Id
private Integer id;
@ManyToOne
private EntityWithManyToOneDerivedId ref;
public EntityReferencingEntityWithManyToOneDerivedId() {
}
public EntityReferencingEntityWithManyToOneDerivedId(Integer id, EntityWithManyToOneDerivedId ref) {
this.id = id;
this.ref = ref;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public EntityWithManyToOneDerivedId getRef() {
return ref;
}
public void setRef(EntityWithManyToOneDerivedId ref) {
this.ref = ref;
}
}
}
| EntityReferencingEntityWithManyToOneDerivedId |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java | {
"start": 9078,
"end": 9425
} | class ____ extends AbstractRefCounted {
private final Releasable releasable;
RefCountedReleasable(Releasable releasable) {
this.releasable = releasable;
}
@Override
protected void closeInternal() {
Releasables.closeExpectNoException(releasable);
}
}
}
| RefCountedReleasable |
java | quarkusio__quarkus | extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientFilterDeploymentHelper.java | {
"start": 1258,
"end": 2412
} | class ____<T extends AbstractTokensProducer> {
public static final String DEFAULT_OIDC_REQUEST_FILTER_NAME = "default-oidc-request-filter";
private final Map<TokensProducerKey, String> clientNameToGeneratedClass = new HashMap<>();
private final Map<String, Boolean> restClientToIsMethodAnnotated = new HashMap<>();
private final Class<T> baseClass;
private final ClassOutput classOutput;
private final String targetPackage;
private final boolean refreshOnUnauthorized;
private record TokensProducerKey(String clientName, MethodInfo methodInfo) {
}
public OidcClientFilterDeploymentHelper(Class<T> baseClass, BuildProducer<GeneratedBeanBuildItem> generatedBean,
boolean refreshOnUnauthorized) {
this.baseClass = baseClass;
this.classOutput = new GeneratedBeanGizmoAdaptor(generatedBean);
this.targetPackage = DotNames
.internalPackageNameWithTrailingSlash(DotName.createSimple(baseClass.getName()));
this.refreshOnUnauthorized = refreshOnUnauthorized;
}
/**
* For {@code baseClass} Xyz creates tokens producer | OidcClientFilterDeploymentHelper |
java | grpc__grpc-java | interop-testing/src/generated/main/grpc/io/grpc/testing/integration/UnimplementedServiceGrpc.java | {
"start": 267,
"end": 5361
} | class ____ {
private UnimplementedServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "grpc.testing.UnimplementedService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<io.grpc.testing.integration.EmptyProtos.Empty,
io.grpc.testing.integration.EmptyProtos.Empty> getUnimplementedCallMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UnimplementedCall",
requestType = io.grpc.testing.integration.EmptyProtos.Empty.class,
responseType = io.grpc.testing.integration.EmptyProtos.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<io.grpc.testing.integration.EmptyProtos.Empty,
io.grpc.testing.integration.EmptyProtos.Empty> getUnimplementedCallMethod() {
io.grpc.MethodDescriptor<io.grpc.testing.integration.EmptyProtos.Empty, io.grpc.testing.integration.EmptyProtos.Empty> getUnimplementedCallMethod;
if ((getUnimplementedCallMethod = UnimplementedServiceGrpc.getUnimplementedCallMethod) == null) {
synchronized (UnimplementedServiceGrpc.class) {
if ((getUnimplementedCallMethod = UnimplementedServiceGrpc.getUnimplementedCallMethod) == null) {
UnimplementedServiceGrpc.getUnimplementedCallMethod = getUnimplementedCallMethod =
io.grpc.MethodDescriptor.<io.grpc.testing.integration.EmptyProtos.Empty, io.grpc.testing.integration.EmptyProtos.Empty>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UnimplementedCall"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.testing.integration.EmptyProtos.Empty.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.grpc.testing.integration.EmptyProtos.Empty.getDefaultInstance()))
.setSchemaDescriptor(new UnimplementedServiceMethodDescriptorSupplier("UnimplementedCall"))
.build();
}
}
}
return getUnimplementedCallMethod;
}
/**
* Creates a new async stub that supports all call types for the service
*/
public static UnimplementedServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceStub>() {
@java.lang.Override
public UnimplementedServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UnimplementedServiceStub(channel, callOptions);
}
};
return UnimplementedServiceStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports all types of calls on the service
*/
public static UnimplementedServiceBlockingV2Stub newBlockingV2Stub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceBlockingV2Stub>() {
@java.lang.Override
public UnimplementedServiceBlockingV2Stub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UnimplementedServiceBlockingV2Stub(channel, callOptions);
}
};
return UnimplementedServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static UnimplementedServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceBlockingStub>() {
@java.lang.Override
public UnimplementedServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UnimplementedServiceBlockingStub(channel, callOptions);
}
};
return UnimplementedServiceBlockingStub.newStub(factory, channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static UnimplementedServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UnimplementedServiceFutureStub>() {
@java.lang.Override
public UnimplementedServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UnimplementedServiceFutureStub(channel, callOptions);
}
};
return UnimplementedServiceFutureStub.newStub(factory, channel);
}
/**
* <pre>
* A simple service NOT implemented at servers so clients can test for
* that case.
* </pre>
*/
public | UnimplementedServiceGrpc |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/InternalTimersSnapshotReaderWriters.java | {
"start": 3040,
"end": 3364
} | interface ____ {
/**
* Writes the timers snapshot to the output view.
*
* @param out the output view to write to
* @throws IOException
*/
void writeTimersSnapshot(DataOutputView out) throws IOException;
}
private abstract static | InternalTimersSnapshotWriter |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/beanbuilder/TestInterceptorAdapter.java | {
"start": 357,
"end": 1646
} | class ____<T> implements MethodInterceptor<Object, Object>, Supplier<T> {
public final BeanRegistration<T> registration;
private final ExecutableMethod<T, Object> proceedMethod;
Environment inaccessibleEnv;
public Environment accessibleEnv;
private Environment fromMethod;
private String valFromMethod;
public TestInterceptorAdapter(BeanRegistration<T> registration, String methodName) {
this.registration = registration;
this.proceedMethod = registration.getBeanDefinition()
.getRequiredMethod(methodName, CustomInvocationContext.class);
}
public void testMethod(Environment environment, String val) {
this.fromMethod = environment;
this.valFromMethod = val;
}
public Environment getFromMethod() {
return fromMethod;
}
public String getValFromMethod() {
return valFromMethod;
}
@Nullable
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
return proceedMethod.invoke(
registration.getBean(),
(CustomInvocationContext) context::proceed
);
}
@Override
public T get() {
return registration.bean();
}
}
| TestInterceptorAdapter |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java | {
"start": 976,
"end": 5218
} | class ____ extends AbstractMultivalueFunction.AbstractEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(MvAvgLongEvaluator.class);
public MvAvgLongEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext driverContext) {
super(driverContext, field);
}
@Override
public String name() {
return "MvAvg";
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
CompensatedSum work = new CompensatedSum();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
int first = v.getFirstValueIndex(p);
if (valueCount == 1) {
long value = v.getLong(first);
double result = MvAvg.single(value);
builder.appendDouble(result);
continue;
}
int end = first + valueCount;
for (int i = first; i < end; i++) {
long value = v.getLong(i);
MvAvg.process(work, value);
}
double result = MvAvg.finish(work, valueCount);
builder.appendDouble(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing at least one multivalued field.
*/
@Override
public Block evalNotNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) {
CompensatedSum work = new CompensatedSum();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
int first = v.getFirstValueIndex(p);
if (valueCount == 1) {
long value = v.getLong(first);
double result = MvAvg.single(value);
builder.appendDouble(result);
continue;
}
int end = first + valueCount;
for (int i = first; i < end; i++) {
long value = v.getLong(i);
MvAvg.process(work, value);
}
double result = MvAvg.finish(work, valueCount);
builder.appendDouble(result);
}
return builder.build().asBlock();
}
}
/**
* Evaluate blocks containing only single valued fields.
*/
@Override
public Block evalSingleValuedNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleBlock.Builder builder = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
CompensatedSum work = new CompensatedSum();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
if (valueCount == 0) {
builder.appendNull();
continue;
}
assert valueCount == 1;
int first = v.getFirstValueIndex(p);
long value = v.getLong(first);
double result = MvAvg.single(value);
builder.appendDouble(result);
}
return builder.build();
}
}
/**
* Evaluate blocks containing only single valued fields.
*/
@Override
public Block evalSingleValuedNotNullable(Block fieldVal) {
LongBlock v = (LongBlock) fieldVal;
int positionCount = v.getPositionCount();
try (DoubleVector.FixedBuilder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) {
CompensatedSum work = new CompensatedSum();
for (int p = 0; p < positionCount; p++) {
int valueCount = v.getValueCount(p);
assert valueCount == 1;
int first = v.getFirstValueIndex(p);
long value = v.getLong(first);
double result = MvAvg.single(value);
builder.appendDouble(result);
}
return builder.build().asBlock();
}
}
@Override
public long baseRamBytesUsed() {
return BASE_RAM_BYTES_USED + field.baseRamBytesUsed();
}
public static | MvAvgLongEvaluator |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/scope/custom/interceptors/InterceptorScopeTest.java | {
"start": 6089,
"end": 6534
} | class ____ implements MethodInterceptor<Object, Object> {
private static int creationCount = 0;
MyIntercepted2Interceptor() {
creationCount++;
}
@Override
public Object intercept(MethodInvocationContext<Object, Object> context) {
return context.proceed() + " Interceptor2 created: [" + creationCount + "] ";
}
}
@Bean
@MyScope
static | MyIntercepted2Interceptor |
java | google__guava | android/guava/src/com/google/common/collect/ForwardingIterator.java | {
"start": 1694,
"end": 2201
} | class ____<T extends @Nullable Object> extends ForwardingObject
implements Iterator<T> {
/** Constructor for use by subclasses. */
protected ForwardingIterator() {}
@Override
protected abstract Iterator<T> delegate();
@Override
public boolean hasNext() {
return delegate().hasNext();
}
@CanIgnoreReturnValue
@Override
@ParametricNullness
public T next() {
return delegate().next();
}
@Override
public void remove() {
delegate().remove();
}
}
| ForwardingIterator |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/reactive/ServerHttpSecurityConfigurationTests.java | {
"start": 16386,
"end": 16480
} | interface ____ {
String property();
}
@RestController
static | CurrentAuthenticationProperty |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/stream/compact/CompactBucketWriter.java | {
"start": 2187,
"end": 3050
} | class ____<T> implements CompactWriter.Factory<T> {
private final SupplierWithException<BucketWriter<T, String>, IOException> factory;
private BucketWriter<T, String> bucketWriter;
public Factory(SupplierWithException<BucketWriter<T, String>, IOException> factory) {
this.factory = factory;
}
@Override
public CompactWriter<T> create(CompactContext context) throws IOException {
// The writer is not Serializable
if (bucketWriter == null) {
bucketWriter = factory.get();
}
// creationTime are useless
return new CompactBucketWriter<>(
bucketWriter,
bucketWriter.openNewInProgressFile(
context.getPartition(), context.getPath(), 0));
}
}
}
| Factory |
java | google__auto | value/src/test/java/com/google/auto/value/processor/GeneratedDoesNotExistTest.java | {
"start": 7784,
"end": 8108
} | class ____ {",
" public static Baz create() {",
" return new AutoValue_Baz();",
" }",
"}");
JavaFileObject expectedOutput =
JavaFileObjects.forSourceLines(
"foo.bar.AutoValue_Baz",
"package foo.bar;",
"",
"final | Baz |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/discovery/ClassNameFilterTests.java | {
"start": 813,
"end": 1618
} | class ____ {
@SuppressWarnings("DataFlowIssue")
@Test
void includeClassNamePatternsChecksPreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> ClassNameFilter.includeClassNamePatterns((String[]) null));
assertPreconditionViolationNotNullOrEmptyFor("patterns array",
() -> ClassNameFilter.includeClassNamePatterns(new String[0]));
assertPreconditionViolationFor(() -> ClassNameFilter.includeClassNamePatterns(new String[] { null }))//
.withMessage("patterns array must not contain null elements");
}
@Test
void includeClassNamePatternsWithSinglePattern() {
var regex = "^java\\.lang\\..*";
var filter = ClassNameFilter.includeClassNamePatterns(regex);
assertThat(filter).hasToString(
"IncludeClassNameFilter that includes | ClassNameFilterTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/jaxb/internal/stax/HbmEventReader.java | {
"start": 840,
"end": 3253
} | class ____ extends EventReaderDelegate {
private static final List<String> NAMESPACE_URIS_TO_MAP = Collections.singletonList(
// we need to recognize the initial, prematurely-chosen hbm.xml xsd namespace
"http://www.hibernate.org/xsd/hibernate-mapping"
);
private final XMLEventFactory xmlEventFactory;
public HbmEventReader(XMLEventReader reader) {
this( reader, XMLEventFactory.newInstance() );
}
public HbmEventReader(XMLEventReader reader, XMLEventFactory xmlEventFactory) {
super( reader );
this.xmlEventFactory = xmlEventFactory;
}
@Override
public XMLEvent peek() throws XMLStreamException {
return wrap( super.peek() );
}
@Override
public XMLEvent nextEvent() throws XMLStreamException {
return wrap( super.nextEvent() );
}
private XMLEvent wrap(XMLEvent event) {
if ( event != null && event.isStartElement() ) {
return applyNamespace( event.asStartElement() );
}
return event;
}
@SuppressWarnings("unchecked")
private StartElement applyNamespace(StartElement startElement) {
final List<Namespace> targetNamespaces = new ArrayList<>();
if ( StringHelper.isEmpty( startElement.getName().getNamespaceURI() ) ) {
// add the default namespace mapping
targetNamespaces.add( xmlEventFactory.createNamespace( MappingXsdSupport.INSTANCE.hbmXsd().getNamespaceUri() ) );
}
// transfer any namespaces directly, unless it is in the "to map" list in which case
// we transfer a mapped copy pointing to the new namespace
final Iterator<Namespace> originalNamespaces = startElement.getNamespaces();
while ( originalNamespaces.hasNext() ) {
Namespace namespace = originalNamespaces.next();
if ( NAMESPACE_URIS_TO_MAP.contains( namespace.getNamespaceURI() ) ) {
// this is a namespace "to map" so map it
namespace = xmlEventFactory.createNamespace( namespace.getPrefix(), MappingXsdSupport.INSTANCE.hbmXsd().getNamespaceUri() );
}
targetNamespaces.add( namespace );
}
// Transfer the location info from the incoming event to the event factory
// so that the event we ask it to generate for us has the same location info
xmlEventFactory.setLocation( startElement.getLocation() );
return xmlEventFactory.createStartElement(
new QName( MappingXsdSupport.INSTANCE.hbmXsd().getNamespaceUri(), startElement.getName().getLocalPart() ),
startElement.getAttributes(),
targetNamespaces.iterator()
);
}
}
| HbmEventReader |
java | google__dagger | javatests/dagger/hilt/processor/internal/definecomponent/DefineComponentProcessorTest.java | {
"start": 13978,
"end": 14423
} | interface ____ {",
" Foo build();",
"}");
HiltCompilerTests.hiltCompiler(component)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"@DefineComponent.Builder method, test.FooComponentBuilder#build(), must return "
+ "a @DefineComponent type. Found: test.Foo");
});
}
}
| FooComponentBuilder |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/util/ConcurrentLruCache.java | {
"start": 10154,
"end": 13808
} | class ____<K, V> {
private static final int BUFFER_COUNT = detectNumberOfBuffers();
private static int detectNumberOfBuffers() {
int availableProcessors = Runtime.getRuntime().availableProcessors();
int nextPowerOfTwo = 1 << (Integer.SIZE - Integer.numberOfLeadingZeros(availableProcessors - 1));
return Math.min(4, nextPowerOfTwo);
}
private static final int BUFFERS_MASK = BUFFER_COUNT - 1;
private static final int MAX_PENDING_OPERATIONS = 32;
private static final int MAX_DRAIN_COUNT = 2 * MAX_PENDING_OPERATIONS;
private static final int BUFFER_SIZE = 2 * MAX_DRAIN_COUNT;
private static final int BUFFER_INDEX_MASK = BUFFER_SIZE - 1;
/*
* Number of operations recorded, for each buffer
*/
private final AtomicLongArray recordedCount = new AtomicLongArray(BUFFER_COUNT);
/*
* Number of operations read, for each buffer
*/
private final long[] readCount = new long[BUFFER_COUNT];
/*
* Number of operations processed, for each buffer
*/
private final AtomicLongArray processedCount = new AtomicLongArray(BUFFER_COUNT);
private final AtomicReferenceArray<Node<K, V>>[] buffers = new AtomicReferenceArray[BUFFER_COUNT];
private final EvictionQueue<K, V> evictionQueue;
ReadOperations(EvictionQueue<K, V> evictionQueue) {
this.evictionQueue = evictionQueue;
for (int i = 0; i < BUFFER_COUNT; i++) {
this.buffers[i] = new AtomicReferenceArray<>(BUFFER_SIZE);
}
}
private static int getBufferIndex() {
return ((int) Thread.currentThread().getId()) & BUFFERS_MASK;
}
boolean recordRead(Node<K, V> node) {
int bufferIndex = getBufferIndex();
final long writeCount = this.recordedCount.get(bufferIndex);
this.recordedCount.lazySet(bufferIndex, writeCount + 1);
final int index = (int) (writeCount & BUFFER_INDEX_MASK);
this.buffers[bufferIndex].lazySet(index, node);
final long pending = (writeCount - this.processedCount.get(bufferIndex));
return (pending < MAX_PENDING_OPERATIONS);
}
void drain() {
final int start = (int) Thread.currentThread().getId();
final int end = start + BUFFER_COUNT;
for (int i = start; i < end; i++) {
drainReadBuffer(i & BUFFERS_MASK);
}
}
void clear() {
for (int i = 0; i < BUFFER_COUNT; i++) {
AtomicReferenceArray<Node<K, V>> buffer = this.buffers[i];
for (int j = 0; j < BUFFER_SIZE; j++) {
buffer.lazySet(j, null);
}
}
}
private void drainReadBuffer(int bufferIndex) {
final long writeCount = this.recordedCount.get(bufferIndex);
for (int i = 0; i < MAX_DRAIN_COUNT; i++) {
final int index = (int) (this.readCount[bufferIndex] & BUFFER_INDEX_MASK);
final AtomicReferenceArray<Node<K, V>> buffer = this.buffers[bufferIndex];
final Node<K, V> node = buffer.get(index);
if (node == null) {
break;
}
buffer.lazySet(index, null);
this.evictionQueue.moveToBack(node);
this.readCount[bufferIndex]++;
}
this.processedCount.lazySet(bufferIndex, writeCount);
}
}
private static final | ReadOperations |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KeycloakComponentBuilderFactory.java | {
"start": 20685,
"end": 26237
} | class ____
extends AbstractComponentBuilder<KeycloakComponent>
implements KeycloakComponentBuilder {
@Override
protected KeycloakComponent buildConcreteComponent() {
return new KeycloakComponent();
}
private org.apache.camel.component.keycloak.KeycloakConfiguration getOrCreateConfiguration(KeycloakComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.keycloak.KeycloakConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "accessToken": getOrCreateConfiguration((KeycloakComponent) component).setAccessToken((java.lang.String) value); return true;
case "authClient": getOrCreateConfiguration((KeycloakComponent) component).setAuthClient((java.lang.String) value); return true;
case "authIpAddress": getOrCreateConfiguration((KeycloakComponent) component).setAuthIpAddress((java.lang.String) value); return true;
case "authRealm": getOrCreateConfiguration((KeycloakComponent) component).setAuthRealm((java.lang.String) value); return true;
case "authRealmFilter": getOrCreateConfiguration((KeycloakComponent) component).setAuthRealmFilter((java.lang.String) value); return true;
case "authUser": getOrCreateConfiguration((KeycloakComponent) component).setAuthUser((java.lang.String) value); return true;
case "client": getOrCreateConfiguration((KeycloakComponent) component).setClient((java.lang.String) value); return true;
case "clientId": getOrCreateConfiguration((KeycloakComponent) component).setClientId((java.lang.String) value); return true;
case "clientSecret": getOrCreateConfiguration((KeycloakComponent) component).setClientSecret((java.lang.String) value); return true;
case "configuration": ((KeycloakComponent) component).setConfiguration((org.apache.camel.component.keycloak.KeycloakConfiguration) value); return true;
case "dateFrom": getOrCreateConfiguration((KeycloakComponent) component).setDateFrom((java.lang.String) value); return true;
case "dateTo": getOrCreateConfiguration((KeycloakComponent) component).setDateTo((java.lang.String) value); return true;
case "eventType": getOrCreateConfiguration((KeycloakComponent) component).setEventType((java.lang.String) value); return true;
case "first": getOrCreateConfiguration((KeycloakComponent) component).setFirst((int) value); return true;
case "introspectionCacheEnabled": getOrCreateConfiguration((KeycloakComponent) component).setIntrospectionCacheEnabled((boolean) value); return true;
case "introspectionCacheTtl": getOrCreateConfiguration((KeycloakComponent) component).setIntrospectionCacheTtl((long) value); return true;
case "ipAddress": getOrCreateConfiguration((KeycloakComponent) component).setIpAddress((java.lang.String) value); return true;
case "keycloakClient": getOrCreateConfiguration((KeycloakComponent) component).setKeycloakClient((org.keycloak.admin.client.Keycloak) value); return true;
case "maxResults": getOrCreateConfiguration((KeycloakComponent) component).setMaxResults((int) value); return true;
case "operation": getOrCreateConfiguration((KeycloakComponent) component).setOperation((org.apache.camel.component.keycloak.KeycloakOperations) value); return true;
case "operationTypes": getOrCreateConfiguration((KeycloakComponent) component).setOperationTypes((java.lang.String) value); return true;
case "password": getOrCreateConfiguration((KeycloakComponent) component).setPassword((java.lang.String) value); return true;
case "pojoRequest": getOrCreateConfiguration((KeycloakComponent) component).setPojoRequest((boolean) value); return true;
case "realm": getOrCreateConfiguration((KeycloakComponent) component).setRealm((java.lang.String) value); return true;
case "resourcePath": getOrCreateConfiguration((KeycloakComponent) component).setResourcePath((java.lang.String) value); return true;
case "serverUrl": getOrCreateConfiguration((KeycloakComponent) component).setServerUrl((java.lang.String) value); return true;
case "types": getOrCreateConfiguration((KeycloakComponent) component).setTypes((java.lang.String) value); return true;
case "user": getOrCreateConfiguration((KeycloakComponent) component).setUser((java.lang.String) value); return true;
case "username": getOrCreateConfiguration((KeycloakComponent) component).setUsername((java.lang.String) value); return true;
case "useTokenIntrospection": getOrCreateConfiguration((KeycloakComponent) component).setUseTokenIntrospection((boolean) value); return true;
case "bridgeErrorHandler": ((KeycloakComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((KeycloakComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((KeycloakComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | KeycloakComponentBuilderImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/GenericGenerator.java | {
"start": 947,
"end": 2734
} | class ____:
* <ul>
* <li>defining a named generator using this annotation, specifying an
* implementation of {@code IdentifierGenerator} using {@link #type},
* then
* <li>annotating the identifier property of the entity with the JPA-defined
* {@link jakarta.persistence.GeneratedValue @GeneratedValue} annotation,
* and
* <li>using {@link jakarta.persistence.GeneratedValue#generator() generator}
* to specify the {@link #name()} of the generator defined using this
* annotation.
* </ul>
* <p>
* If neither {@link #type} not {@link #strategy} is specified, Hibernate asks
* {@linkplain org.hibernate.dialect.Dialect#getNativeIdentifierGeneratorStrategy
* the dialect} to decide an appropriate strategy. This is equivalent to using
* {@link jakarta.persistence.GenerationType#AUTO AUTO} in JPA.
* <p>
* For example, if we define a generator using:
* <pre>
* @GenericGenerator(name = "custom-generator",
* type = org.hibernate.eg.CustomStringGenerator.class)
* }</pre>
* <p>
* Then we may make use of it by annotating an identifier field as follows:
* <pre>
* @Id @GeneratedValue(generator = "custom-generator")
* private String id;
* </pre>
* <p>
* The disadvantage of this approach is the use of stringly-typed names. An
* alternative, completely typesafe, way to declare a generator and associate
* it with an entity is provided by the {@link IdGeneratorType @IdGeneratorType}
* meta-annotation.
*
* @see jakarta.persistence.GeneratedValue
*
* @deprecated Use the new approach based on {@link IdGeneratorType}.
*
* @author Emmanuel Bernard
*/
@Target({PACKAGE, TYPE, METHOD, FIELD})
@Retention(RUNTIME)
@Repeatable(GenericGenerators.class)
@Deprecated(since = "6.5", forRemoval = true)
public @ | by |
java | google__guava | android/guava-tests/test/com/google/common/primitives/BooleansTest.java | {
"start": 1544,
"end": 25565
} | class ____ extends TestCase {
private static final boolean[] EMPTY = {};
private static final boolean[] ARRAY_FALSE = {false};
private static final boolean[] ARRAY_TRUE = {true};
private static final boolean[] ARRAY_FALSE_FALSE = {false, false};
private static final boolean[] ARRAY_FALSE_TRUE = {false, true};
private static final boolean[] VALUES = {false, true};
@SuppressWarnings("InlineMeInliner") // We need to test our method.
public void testHashCode() {
assertThat(Booleans.hashCode(true)).isEqualTo(Boolean.TRUE.hashCode());
assertThat(Booleans.hashCode(false)).isEqualTo(Boolean.FALSE.hashCode());
}
public void testTrueFirst() {
assertThat(Booleans.trueFirst().compare(true, true)).isEqualTo(0);
assertThat(Booleans.trueFirst().compare(false, false)).isEqualTo(0);
assertThat(Booleans.trueFirst().compare(true, false)).isLessThan(0);
assertThat(Booleans.trueFirst().compare(false, true)).isGreaterThan(0);
}
public void testFalseFirst() {
assertThat(Booleans.falseFirst().compare(true, true)).isEqualTo(0);
assertThat(Booleans.falseFirst().compare(false, false)).isEqualTo(0);
assertThat(Booleans.falseFirst().compare(false, true)).isLessThan(0);
assertThat(Booleans.falseFirst().compare(true, false)).isGreaterThan(0);
}
// We need to test that our method behaves like the JDK method.
@SuppressWarnings("InlineMeInliner")
public void testCompare() {
for (boolean x : VALUES) {
for (boolean y : VALUES) {
// note: spec requires only that the sign is the same
assertWithMessage(x + ", " + y)
.that(Booleans.compare(x, y))
.isEqualTo(Boolean.valueOf(x).compareTo(y));
}
}
}
public void testContains() {
assertThat(Booleans.contains(EMPTY, false)).isFalse();
assertThat(Booleans.contains(ARRAY_FALSE, true)).isFalse();
assertThat(Booleans.contains(ARRAY_FALSE, false)).isTrue();
assertThat(Booleans.contains(ARRAY_FALSE_TRUE, false)).isTrue();
assertThat(Booleans.contains(ARRAY_FALSE_TRUE, true)).isTrue();
}
public void testIndexOf() {
assertThat(Booleans.indexOf(EMPTY, ARRAY_FALSE)).isEqualTo(-1);
assertThat(Booleans.indexOf(ARRAY_FALSE, ARRAY_FALSE_TRUE)).isEqualTo(-1);
assertThat(Booleans.indexOf(ARRAY_FALSE_FALSE, ARRAY_FALSE)).isEqualTo(0);
assertThat(Booleans.indexOf(ARRAY_FALSE, ARRAY_FALSE)).isEqualTo(0);
assertThat(Booleans.indexOf(ARRAY_FALSE_TRUE, ARRAY_FALSE)).isEqualTo(0);
assertThat(Booleans.indexOf(ARRAY_FALSE_TRUE, ARRAY_TRUE)).isEqualTo(1);
assertThat(Booleans.indexOf(ARRAY_TRUE, new boolean[0])).isEqualTo(0);
}
public void testIndexOf_arrays() {
assertThat(Booleans.indexOf(EMPTY, false)).isEqualTo(-1);
assertThat(Booleans.indexOf(ARRAY_FALSE, true)).isEqualTo(-1);
assertThat(Booleans.indexOf(ARRAY_FALSE_FALSE, true)).isEqualTo(-1);
assertThat(Booleans.indexOf(ARRAY_FALSE, false)).isEqualTo(0);
assertThat(Booleans.indexOf(ARRAY_FALSE_TRUE, false)).isEqualTo(0);
assertThat(Booleans.indexOf(ARRAY_FALSE_TRUE, true)).isEqualTo(1);
assertThat(Booleans.indexOf(new boolean[] {false, false, true}, true)).isEqualTo(2);
}
public void testLastIndexOf() {
assertThat(Booleans.lastIndexOf(EMPTY, false)).isEqualTo(-1);
assertThat(Booleans.lastIndexOf(ARRAY_FALSE, true)).isEqualTo(-1);
assertThat(Booleans.lastIndexOf(ARRAY_FALSE_FALSE, true)).isEqualTo(-1);
assertThat(Booleans.lastIndexOf(ARRAY_FALSE, false)).isEqualTo(0);
assertThat(Booleans.lastIndexOf(ARRAY_FALSE_TRUE, false)).isEqualTo(0);
assertThat(Booleans.lastIndexOf(ARRAY_FALSE_TRUE, true)).isEqualTo(1);
assertThat(Booleans.lastIndexOf(new boolean[] {false, true, true}, true)).isEqualTo(2);
}
public void testConcat() {
assertThat(Booleans.concat()).isEqualTo(EMPTY);
assertThat(Booleans.concat(EMPTY)).isEqualTo(EMPTY);
assertThat(Booleans.concat(EMPTY, EMPTY, EMPTY)).isEqualTo(EMPTY);
assertThat(Booleans.concat(ARRAY_FALSE)).isEqualTo(ARRAY_FALSE);
assertThat(Booleans.concat(ARRAY_FALSE)).isNotSameInstanceAs(ARRAY_FALSE);
assertThat(Booleans.concat(EMPTY, ARRAY_FALSE, EMPTY)).isEqualTo(ARRAY_FALSE);
assertThat(Booleans.concat(ARRAY_FALSE, ARRAY_FALSE, ARRAY_FALSE))
.isEqualTo(new boolean[] {false, false, false});
assertThat(Booleans.concat(ARRAY_FALSE, ARRAY_FALSE_TRUE))
.isEqualTo(new boolean[] {false, false, true});
}
@GwtIncompatible // different overflow behavior; could probably be made to work by using ~~
public void testConcat_overflow_negative() {
int dim1 = 1 << 16;
int dim2 = 1 << 15;
assertThat(dim1 * dim2).isLessThan(0);
testConcatOverflow(dim1, dim2);
}
@GwtIncompatible // different overflow behavior; could probably be made to work by using ~~
public void testConcat_overflow_nonNegative() {
int dim1 = 1 << 16;
int dim2 = 1 << 16;
assertThat(dim1 * dim2).isAtLeast(0);
testConcatOverflow(dim1, dim2);
}
private static void testConcatOverflow(int arraysDim1, int arraysDim2) {
assertThat((long) arraysDim1 * arraysDim2).isNotEqualTo((long) (arraysDim1 * arraysDim2));
boolean[][] arrays = new boolean[arraysDim1][];
// it's shared to avoid using too much memory in tests
boolean[] sharedArray = new boolean[arraysDim2];
Arrays.fill(arrays, sharedArray);
try {
Booleans.concat(arrays);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testEnsureCapacity() {
assertThat(Booleans.ensureCapacity(EMPTY, 0, 1)).isSameInstanceAs(EMPTY);
assertThat(Booleans.ensureCapacity(ARRAY_FALSE, 0, 1)).isSameInstanceAs(ARRAY_FALSE);
assertThat(Booleans.ensureCapacity(ARRAY_FALSE, 1, 1)).isSameInstanceAs(ARRAY_FALSE);
assertThat(Booleans.ensureCapacity(new boolean[] {true}, 2, 1))
.isEqualTo(new boolean[] {true, false, false});
}
public void testEnsureCapacity_fail() {
assertThrows(IllegalArgumentException.class, () -> Booleans.ensureCapacity(ARRAY_FALSE, -1, 1));
assertThrows(IllegalArgumentException.class, () -> Booleans.ensureCapacity(ARRAY_FALSE, 1, -1));
}
public void testJoin() {
assertThat(Booleans.join(",", EMPTY)).isEmpty();
assertThat(Booleans.join(",", ARRAY_FALSE)).isEqualTo("false");
assertThat(Booleans.join(",", false, true)).isEqualTo("false,true");
assertThat(Booleans.join("", false, true, false)).isEqualTo("falsetruefalse");
}
public void testLexicographicalComparator() {
List<boolean[]> ordered =
Arrays.asList(
new boolean[] {},
new boolean[] {false},
new boolean[] {false, false},
new boolean[] {false, true},
new boolean[] {true},
new boolean[] {true, false},
new boolean[] {true, true},
new boolean[] {true, true, true});
Comparator<boolean[]> comparator = Booleans.lexicographicalComparator();
Helpers.testComparator(comparator, ordered);
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testLexicographicalComparatorSerializable() {
Comparator<boolean[]> comparator = Booleans.lexicographicalComparator();
assertThat(SerializableTester.reserialize(comparator)).isSameInstanceAs(comparator);
}
public void testReverse() {
testReverse(new boolean[] {}, new boolean[] {});
testReverse(new boolean[] {true}, new boolean[] {true});
testReverse(new boolean[] {false, true}, new boolean[] {true, false});
testReverse(new boolean[] {true, false, false}, new boolean[] {false, false, true});
testReverse(new boolean[] {true, true, false, false}, new boolean[] {false, false, true, true});
}
private static void testReverse(boolean[] input, boolean[] expectedOutput) {
input = Arrays.copyOf(input, input.length);
Booleans.reverse(input);
assertThat(input).isEqualTo(expectedOutput);
}
private static void testReverse(
boolean[] input, int fromIndex, int toIndex, boolean[] expectedOutput) {
input = Arrays.copyOf(input, input.length);
Booleans.reverse(input, fromIndex, toIndex);
assertThat(input).isEqualTo(expectedOutput);
}
public void testReverseIndexed() {
testReverse(new boolean[] {}, 0, 0, new boolean[] {});
testReverse(new boolean[] {true}, 0, 1, new boolean[] {true});
testReverse(new boolean[] {false, true}, 0, 2, new boolean[] {true, false});
testReverse(new boolean[] {true, false, false}, 0, 2, new boolean[] {false, true, false});
testReverse(new boolean[] {true, false, false}, 0, 1, new boolean[] {true, false, false});
testReverse(
new boolean[] {true, true, false, false}, 1, 3, new boolean[] {true, false, true, false});
}
private static void testRotate(boolean[] input, int distance, boolean[] expectedOutput) {
input = Arrays.copyOf(input, input.length);
Booleans.rotate(input, distance);
assertThat(input).isEqualTo(expectedOutput);
}
private static void testRotate(
boolean[] input, int distance, int fromIndex, int toIndex, boolean[] expectedOutput) {
input = Arrays.copyOf(input, input.length);
Booleans.rotate(input, distance, fromIndex, toIndex);
assertThat(input).isEqualTo(expectedOutput);
}
public void testRotate() {
testRotate(new boolean[] {}, -1, new boolean[] {});
testRotate(new boolean[] {}, 0, new boolean[] {});
testRotate(new boolean[] {}, 1, new boolean[] {});
testRotate(new boolean[] {true}, -2, new boolean[] {true});
testRotate(new boolean[] {true}, -1, new boolean[] {true});
testRotate(new boolean[] {true}, 0, new boolean[] {true});
testRotate(new boolean[] {true}, 1, new boolean[] {true});
testRotate(new boolean[] {true}, 2, new boolean[] {true});
testRotate(new boolean[] {true, false}, -3, new boolean[] {false, true});
testRotate(new boolean[] {true, false}, -1, new boolean[] {false, true});
testRotate(new boolean[] {true, false}, -2, new boolean[] {true, false});
testRotate(new boolean[] {true, false}, 0, new boolean[] {true, false});
testRotate(new boolean[] {true, false}, 1, new boolean[] {false, true});
testRotate(new boolean[] {true, false}, 2, new boolean[] {true, false});
testRotate(new boolean[] {true, false}, 3, new boolean[] {false, true});
testRotate(new boolean[] {true, false, true}, -5, new boolean[] {true, true, false});
testRotate(new boolean[] {true, false, true}, -4, new boolean[] {false, true, true});
testRotate(new boolean[] {true, false, true}, -3, new boolean[] {true, false, true});
testRotate(new boolean[] {true, false, true}, -2, new boolean[] {true, true, false});
testRotate(new boolean[] {true, false, true}, -1, new boolean[] {false, true, true});
testRotate(new boolean[] {true, false, true}, 0, new boolean[] {true, false, true});
testRotate(new boolean[] {true, false, true}, 1, new boolean[] {true, true, false});
testRotate(new boolean[] {true, false, true}, 2, new boolean[] {false, true, true});
testRotate(new boolean[] {true, false, true}, 3, new boolean[] {true, false, true});
testRotate(new boolean[] {true, false, true}, 4, new boolean[] {true, true, false});
testRotate(new boolean[] {true, false, true}, 5, new boolean[] {false, true, true});
testRotate(
new boolean[] {true, false, true, false}, -9, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false}, -5, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false}, -1, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false}, 0, new boolean[] {true, false, true, false});
testRotate(
new boolean[] {true, false, true, false}, 1, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false}, 5, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false}, 9, new boolean[] {false, true, false, true});
testRotate(
new boolean[] {true, false, true, false, true},
-6,
new boolean[] {false, true, false, true, true});
testRotate(
new boolean[] {true, false, true, false, true},
-4,
new boolean[] {true, true, false, true, false});
testRotate(
new boolean[] {true, false, true, false, true},
-3,
new boolean[] {false, true, true, false, true});
testRotate(
new boolean[] {true, false, true, false, true},
-1,
new boolean[] {false, true, false, true, true});
testRotate(
new boolean[] {true, false, true, false, true},
0,
new boolean[] {true, false, true, false, true});
testRotate(
new boolean[] {true, false, true, false, true},
1,
new boolean[] {true, true, false, true, false});
testRotate(
new boolean[] {true, false, true, false, true},
3,
new boolean[] {true, false, true, true, false});
testRotate(
new boolean[] {true, false, true, false, true},
4,
new boolean[] {false, true, false, true, true});
testRotate(
new boolean[] {true, false, true, false, true},
6,
new boolean[] {true, true, false, true, false});
}
public void testRotateIndexed() {
testRotate(new boolean[] {}, 0, 0, 0, new boolean[] {});
testRotate(new boolean[] {true}, 0, 0, 1, new boolean[] {true});
testRotate(new boolean[] {true}, 1, 0, 1, new boolean[] {true});
testRotate(new boolean[] {true}, 1, 1, 1, new boolean[] {true});
// Rotate the central 5 elements, leaving the ends as-is
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-6,
1,
6,
new boolean[] {false, false, true, false, true, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-1,
1,
6,
new boolean[] {false, false, true, false, true, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
0,
1,
6,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
5,
1,
6,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
14,
1,
6,
new boolean[] {false, false, true, false, true, true, false});
// Rotate the first three elements
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-2,
0,
3,
new boolean[] {false, false, true, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-1,
0,
3,
new boolean[] {true, false, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
0,
0,
3,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
1,
0,
3,
new boolean[] {false, false, true, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
2,
0,
3,
new boolean[] {true, false, false, true, false, true, false});
// Rotate the last four elements
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-6,
3,
7,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-5,
3,
7,
new boolean[] {false, true, false, false, true, false, true});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-4,
3,
7,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-3,
3,
7,
new boolean[] {false, true, false, false, true, false, true});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-2,
3,
7,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
-1,
3,
7,
new boolean[] {false, true, false, false, true, false, true});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
0,
3,
7,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
1,
3,
7,
new boolean[] {false, true, false, false, true, false, true});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
2,
3,
7,
new boolean[] {false, true, false, true, false, true, false});
testRotate(
new boolean[] {false, true, false, true, false, true, false},
3,
3,
7,
new boolean[] {false, true, false, false, true, false, true});
}
public void testToArray() {
// need explicit type parameter to avoid javac warning!?
List<Boolean> none = Arrays.<Boolean>asList();
assertThat(Booleans.toArray(none)).isEqualTo(EMPTY);
List<Boolean> one = Arrays.asList(false);
assertThat(Booleans.toArray(one)).isEqualTo(ARRAY_FALSE);
boolean[] array = {false, false, true};
List<Boolean> three = Arrays.asList(false, false, true);
assertThat(Booleans.toArray(three)).isEqualTo(array);
assertThat(Booleans.toArray(Booleans.asList(array))).isEqualTo(array);
}
public void testToArray_threadSafe() {
// Only for booleans, we lengthen VALUES
boolean[] VALUES = BooleansTest.VALUES;
VALUES = Booleans.concat(VALUES, VALUES);
for (int delta : new int[] {+1, 0, -1}) {
for (int i = 0; i < VALUES.length; i++) {
List<Boolean> list = Booleans.asList(VALUES).subList(0, i);
Collection<Boolean> misleadingSize = Helpers.misleadingSizeCollection(delta);
misleadingSize.addAll(list);
boolean[] arr = Booleans.toArray(misleadingSize);
assertThat(arr).hasLength(i);
for (int j = 0; j < i; j++) {
assertThat(arr[j]).isEqualTo(VALUES[j]);
}
}
}
}
public void testToArray_withNull() {
List<@Nullable Boolean> list = Arrays.asList(false, true, null);
assertThrows(NullPointerException.class, () -> Booleans.toArray(list));
}
@SuppressWarnings({"CollectionIsEmptyTruth", "CollectionIsNotEmptyTruth"})
public void testAsListIsEmpty() {
assertThat(Booleans.asList(EMPTY).isEmpty()).isTrue();
assertThat(Booleans.asList(ARRAY_FALSE).isEmpty()).isFalse();
}
@SuppressWarnings("CollectionSizeTruth")
public void testAsListSize() {
assertThat(Booleans.asList(EMPTY).size()).isEqualTo(0);
assertThat(Booleans.asList(ARRAY_FALSE).size()).isEqualTo(1);
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).size()).isEqualTo(2);
}
@SuppressWarnings("BooleanArrayIndexOfBoolean")
public void testAsListIndexOf() {
assertThat(Booleans.asList(EMPTY).indexOf((Object) "wrong type")).isEqualTo(-1);
assertThat(Booleans.asList(EMPTY).indexOf(true)).isEqualTo(-1);
assertThat(Booleans.asList(ARRAY_FALSE).indexOf(true)).isEqualTo(-1);
assertThat(Booleans.asList(ARRAY_FALSE).indexOf(false)).isEqualTo(0);
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).indexOf(true)).isEqualTo(1);
}
public void testAsListLastIndexOf() {
assertThat(Booleans.asList(EMPTY).lastIndexOf((Object) "wrong type")).isEqualTo(-1);
assertThat(Booleans.asList(EMPTY).lastIndexOf(true)).isEqualTo(-1);
assertThat(Booleans.asList(ARRAY_FALSE).lastIndexOf(true)).isEqualTo(-1);
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).lastIndexOf(true)).isEqualTo(1);
assertThat(Booleans.asList(ARRAY_FALSE_FALSE).lastIndexOf(false)).isEqualTo(1);
}
@SuppressWarnings({"BooleanArrayContainsBoolean", "CollectionDoesNotContainTruth"})
public void testAsListContains() {
assertThat(Booleans.asList(EMPTY).contains((Object) "wrong type")).isFalse();
assertThat(Booleans.asList(EMPTY).contains(true)).isFalse();
assertThat(Booleans.asList(ARRAY_FALSE).contains(true)).isFalse();
assertThat(Booleans.asList(ARRAY_TRUE).contains(true)).isTrue();
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).contains(false)).isTrue();
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).contains(true)).isTrue();
}
public void testAsListEquals() {
assertThat(Booleans.asList(EMPTY).equals(ImmutableList.of())).isTrue();
assertThat(Booleans.asList(ARRAY_FALSE).equals(Booleans.asList(ARRAY_FALSE))).isTrue();
@SuppressWarnings("EqualsIncompatibleType")
boolean listEqualsArray = Booleans.asList(ARRAY_FALSE).equals(ARRAY_FALSE);
assertThat(listEqualsArray).isFalse();
assertThat(Booleans.asList(ARRAY_FALSE).equals(null)).isFalse();
assertThat(Booleans.asList(ARRAY_FALSE).equals(Booleans.asList(ARRAY_FALSE_TRUE))).isFalse();
assertThat(Booleans.asList(ARRAY_FALSE_FALSE).equals(Booleans.asList(ARRAY_FALSE_TRUE)))
.isFalse();
assertEquals(1, Booleans.asList(ARRAY_FALSE_TRUE).lastIndexOf(true));
List<Boolean> reference = Booleans.asList(ARRAY_FALSE);
assertEquals(Booleans.asList(ARRAY_FALSE), reference);
// Explicitly call `equals`; `assertEquals` might return fast
assertThat(reference.equals(reference)).isTrue();
}
public void testAsListHashcode() {
assertThat(Booleans.asList(EMPTY).hashCode()).isEqualTo(1);
assertThat(Booleans.asList(ARRAY_FALSE).hashCode())
.isEqualTo(Booleans.asList(ARRAY_FALSE).hashCode());
List<Boolean> reference = Booleans.asList(ARRAY_FALSE);
assertThat(reference.hashCode()).isEqualTo(Booleans.asList(ARRAY_FALSE).hashCode());
}
public void testAsListToString() {
assertThat(Booleans.asList(ARRAY_FALSE).toString()).isEqualTo("[false]");
assertThat(Booleans.asList(ARRAY_FALSE_TRUE).toString()).isEqualTo("[false, true]");
}
public void testAsListSet() {
List<Boolean> list = Booleans.asList(ARRAY_FALSE);
assertThat(list.set(0, true)).isFalse();
assertThat(list.set(0, false)).isTrue();
assertThrows(NullPointerException.class, () -> list.set(0, null));
assertThrows(IndexOutOfBoundsException.class, () -> list.set(1, true));
}
public void testAsListCanonicalValues() {
List<Boolean> list = Booleans.asList(true, false);
assertThat(list.get(0)).isSameInstanceAs(true);
assertThat(list.get(1)).isSameInstanceAs(false);
@SuppressWarnings("deprecation")
Boolean anotherTrue = new Boolean(true);
@SuppressWarnings("deprecation")
Boolean anotherFalse = new Boolean(false);
list.set(0, anotherTrue);
assertThat(list.get(0)).isSameInstanceAs(true);
list.set(1, anotherFalse);
assertThat(list.get(1)).isSameInstanceAs(false);
}
public void testCountTrue() {
assertThat(Booleans.countTrue()).isEqualTo(0);
assertThat(Booleans.countTrue(false)).isEqualTo(0);
assertThat(Booleans.countTrue(true)).isEqualTo(1);
assertThat(Booleans.countTrue(false, true, false, true, false, true)).isEqualTo(3);
assertThat(Booleans.countTrue(false, false, true, false, false)).isEqualTo(1);
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNulls() {
new NullPointerTester().testAllPublicStaticMethods(Booleans.class);
}
}
| BooleansTest |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/suppress/TimeDefinitions.java | {
"start": 1772,
"end": 2358
} | class ____<K extends Windowed<?>> implements TimeDefinition<K> {
private static final WindowEndTimeDefinition<?> INSTANCE = new WindowEndTimeDefinition<>();
private WindowEndTimeDefinition() {}
@SuppressWarnings("unchecked")
static <K extends Windowed<?>> WindowEndTimeDefinition<K> instance() {
return (WindowEndTimeDefinition<K>) WindowEndTimeDefinition.INSTANCE;
}
@Override
public long time(final RecordContext context, final K key) {
return key.window().end();
}
}
}
| WindowEndTimeDefinition |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/MaxWithRetractAggFunctionTest.java | {
"start": 7119,
"end": 8076
} | class ____
extends MaxWithRetractAggFunctionTestBase<Boolean> {
@Override
protected List<List<Boolean>> getInputValueSets() {
return Arrays.asList(
Arrays.asList(false, false, false),
Arrays.asList(true, true, true),
Arrays.asList(true, false, null, true, false, true, null),
Arrays.asList(null, null, null),
Arrays.asList(null, true));
}
@Override
protected List<Boolean> getExpectedResults() {
return Arrays.asList(false, true, true, null, true);
}
@Override
protected AggregateFunction<Boolean, MaxWithRetractAccumulator<Boolean>> getAggregator() {
return new MaxWithRetractAggFunction<>(DataTypes.BOOLEAN().getLogicalType());
}
}
/** Test for {@link DecimalType}. */
@Nested
final | BooleanMaxWithRetractAggFunctionTest |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/context/support/ServletContextResourceTests.java | {
"start": 1151,
"end": 3567
} | class ____ {
private static final String TEST_RESOURCE_PATH = "org/springframework/web/context/support/resource.txt";
private final MockServletContext servletContext = new MockServletContext();
@Test
void resourceShouldHaveExpectedProperties() throws IOException {
Resource resource = new ServletContextResource(this.servletContext, TEST_RESOURCE_PATH);
assertThat(resource.getFile()).isNotNull();
assertThat(resource.exists()).isTrue();
assertThat(resource.isFile()).isTrue();
assertThat(resource.getFilename()).isEqualTo("resource.txt");
assertThat(resource.getURL().getFile()).endsWith("resource.txt");
}
@Test
void relativeResourcesShouldHaveExpectedProperties() throws IOException {
Resource resource = new ServletContextResource(this.servletContext, TEST_RESOURCE_PATH);
Resource relative1 = resource.createRelative("relative.txt");
assertThat(relative1.getFilename()).isEqualTo("relative.txt");
assertThat(relative1.getURL().getFile()).endsWith("relative.txt");
assertThat(relative1.exists()).isTrue();
Resource relative2 = resource.createRelative("folder/other.txt");
assertThat(relative2.getFilename()).isEqualTo("other.txt");
assertThat(relative2.getURL().getFile()).endsWith("other.txt");
assertThat(relative2.exists()).isTrue();
}
@Test
void resourceWithDotPathShouldBeEqual() {
Resource resource = new ServletContextResource(this.servletContext, TEST_RESOURCE_PATH);
assertThat(new ServletContextResource(servletContext, "org/springframework/web/context/../context/support/./resource.txt")).isEqualTo(resource);
}
@Test
void resourceWithRelativePathShouldBeEqual() throws IOException {
Resource resource = new ServletContextResource(this.servletContext, "dir/");
Resource relative = resource.createRelative("subdir");
assertThat(relative).isEqualTo(new ServletContextResource(this.servletContext, "dir/subdir"));
}
@Test
void missingResourceShouldHaveExpectedProperties() {
MockServletContext context = mock();
given(context.getRealPath(eq("/org/springframework/web/context/support/missing.txt")))
.willReturn(this.servletContext.getRealPath("org/springframework/web/context/support/") + "missing.txt");
Resource missing = new ServletContextResource(context, "org/springframework/web/context/support/missing.txt");
assertThat(missing.exists()).isFalse();
assertThat(missing.isFile()).isFalse();
}
}
| ServletContextResourceTests |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunctionSupplier.java | {
"start": 648,
"end": 1595
} | class ____ implements AggregatorFunctionSupplier {
public MinDoubleAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return MinDoubleAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return MinDoubleGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public MinDoubleAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return MinDoubleAggregatorFunction.create(driverContext, channels);
}
@Override
public MinDoubleGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return MinDoubleGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "min of doubles";
}
}
| MinDoubleAggregatorFunctionSupplier |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/util/TestSubpartitionProducer.java | {
"start": 1601,
"end": 3679
} | class ____ implements Callable<Boolean> {
public static final int MAX_SLEEP_TIME_MS = 20;
/** The subpartition to add data to. */
private final ResultSubpartition subpartition;
/**
* Flag indicating whether the consumer is slow. If true, the consumer will sleep a random
* number of milliseconds between adding data.
*/
private final boolean isSlowProducer;
/** The source data. */
private final TestProducerSource source;
/** Random source for sleeps. */
private final Random random;
public TestSubpartitionProducer(
ResultSubpartition subpartition, boolean isSlowProducer, TestProducerSource source) {
this.subpartition = checkNotNull(subpartition);
this.isSlowProducer = isSlowProducer;
this.random = isSlowProducer ? new Random() : null;
this.source = checkNotNull(source);
}
@Override
public Boolean call() throws Exception {
boolean success = false;
try {
BufferAndChannel bufferAndChannel;
while ((bufferAndChannel = source.getNextBuffer()) != null) {
MemorySegment segment = MemorySegmentFactory.wrap(bufferAndChannel.getBuffer());
subpartition.add(
new BufferConsumer(
new NetworkBuffer(
segment, MemorySegment::free, Buffer.DataType.DATA_BUFFER),
segment.size()));
// Check for interrupted flag after adding data to prevent resource leaks
if (Thread.interrupted()) {
throw new InterruptedException();
}
if (isSlowProducer) {
Thread.sleep(random.nextInt(MAX_SLEEP_TIME_MS + 1));
}
}
subpartition.finish();
success = true;
return true;
} finally {
if (!success) {
subpartition.release();
}
}
}
}
| TestSubpartitionProducer |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/SynonymMapParser.java | {
"start": 1002,
"end": 3930
} | class ____<K, V> implements ComplexDataParser<Map<V, List<V>>> {
private final RedisCodec<K, V> codec;
public SynonymMapParser(RedisCodec<K, V> codec) {
this.codec = codec;
}
/**
* Parse the FT.SYNDUMP response data, automatically detecting RESP2 vs RESP3 format.
*
* @param data the response data from Redis
* @return a map where keys are terms and values are lists of synonyms for each term
*/
@Override
public Map<V, List<V>> parse(ComplexData data) {
if (data == null) {
return new LinkedHashMap<>();
}
if (data.isList()) {
return parseResp2(data);
}
return parseResp3(data);
}
/**
* Parse FT.SYNDUMP response in RESP2 format (array-based with alternating key-value pairs).
*/
private Map<V, List<V>> parseResp2(ComplexData data) {
List<Object> synonymArray = data.getDynamicList();
Map<V, List<V>> synonymMap = new LinkedHashMap<>();
// RESP2: Parse alternating key-value pairs
// Structure: [term1, [synonym1, synonym2], term2, [synonym3, synonym4], ...]
for (int i = 0; i < synonymArray.size();) {
if (i + 2 > synonymArray.size()) {
break; // Incomplete pair, skip
}
// Decode the term (key)
V term = codec.decodeValue((ByteBuffer) synonymArray.get(i++));
// Decode the synonyms (value - should be a list)
ComplexData synonymsData = (ComplexData) synonymArray.get(i++);
List<Object> synonims = synonymsData.getDynamicList();
List<V> decodedSynonyms = synonims.stream().map(synonym -> codec.decodeValue((ByteBuffer) synonym))
.collect(Collectors.toList());
synonymMap.put(term, decodedSynonyms);
}
return synonymMap;
}
/**
* Parse FT.SYNDUMP response in RESP3 format (map-based).
*/
private Map<V, List<V>> parseResp3(ComplexData data) {
Map<Object, Object> synonymMapRaw = data.getDynamicMap();
Map<V, List<V>> synonymMap = new LinkedHashMap<>();
// RESP3: Parse native map structure
// Structure: {term1: [synonym1, synonym2], term2: [synonym3, synonym4], ...}
for (Map.Entry<Object, Object> entry : synonymMapRaw.entrySet()) {
// Decode the term (key)
V term = codec.decodeValue((ByteBuffer) entry.getKey());
// Decode the synonyms (value - should be a list)
Object synonymsData = entry.getValue();
List<Object> synonymsList = ((ComplexData) synonymsData).getDynamicList();
List<V> synonyms = synonymsList.stream().map(synonym -> codec.decodeValue((ByteBuffer) synonym))
.collect(Collectors.toList());
synonymMap.put(term, synonyms);
}
return synonymMap;
}
}
| SynonymMapParser |
java | apache__camel | components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/map/HazelcastMapEndpoint.java | {
"start": 1760,
"end": 2524
} | class ____ extends HazelcastDefaultEndpoint {
public HazelcastMapEndpoint(HazelcastInstance hazelcastInstance, String uri, String cacheName,
HazelcastDefaultComponent component) {
super(hazelcastInstance, uri, component, cacheName);
setCommand(HazelcastCommand.map);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
HazelcastMapConsumer answer = new HazelcastMapConsumer(hazelcastInstance, this, processor, cacheName);
configureConsumer(answer);
return answer;
}
@Override
public Producer createProducer() throws Exception {
return new HazelcastMapProducer(hazelcastInstance, this, cacheName);
}
}
| HazelcastMapEndpoint |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/request/SingleRequest.java | {
"start": 1424,
"end": 1895
} | class ____<R> implements Request, SizeReadyCallback, ResourceCallback {
/** Tag for logging internal events, not generally suitable for public use. */
private static final String TAG = "GlideRequest";
/** Tag for logging externally useful events (request completion, timing etc). */
private static final String GLIDE_TAG = "Glide";
private static final boolean IS_VERBOSE_LOGGABLE = Log.isLoggable(TAG, Log.VERBOSE);
private int cookie;
private | SingleRequest |
java | quarkusio__quarkus | integration-tests/grpc-hibernate-reactive/src/main/java/com/example/reactive/ContextChecker.java | {
"start": 211,
"end": 852
} | class ____ {
private final Map<Integer, String> requestContexts = new ConcurrentHashMap<>();
@Inject
RequestScopeBean requestScopeBean;
int newContextId(String caller) {
String original;
int contextId = requestScopeBean.getId();
if ((original = requestContexts.put(contextId, caller)) != null) {
throw new RuntimeException(
"request context reused from a different call, original usage: " + original + ", duplicate: " + caller);
}
return contextId;
}
public int requestContextId() {
return requestScopeBean.getId();
}
}
| ContextChecker |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/AsyncExecutionTests.java | {
"start": 16655,
"end": 17632
} | class ____ {
public void doNothing(int i) {
assertThat(Thread.currentThread().getName()).isEqualTo(originalThreadName);
}
@Async
public void doSomething(int i) {
assertThat(Thread.currentThread().getName()).isNotEqualTo(originalThreadName);
}
@Async
@SuppressWarnings("deprecation")
public Future<String> returnSomething(int i) {
assertThat(Thread.currentThread().getName()).isNotEqualTo(originalThreadName);
if (i == 0) {
throw new IllegalArgumentException();
}
else if (i < 0) {
return AsyncResult.forExecutionException(new IOException());
}
return AsyncResult.forValue(Integer.toString(i));
}
@Async
public CompletableFuture<String> returnSomethingCompletable(int i) {
assertThat(Thread.currentThread().getName()).isNotEqualTo(originalThreadName);
if (i == 0) {
throw new IllegalArgumentException();
}
return CompletableFuture.completedFuture(Integer.toString(i));
}
}
public static | AsyncMethodBean |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/AwsBedrockAgentComponentBuilderFactory.java | {
"start": 1387,
"end": 1901
} | interface ____ {
/**
* AWS Bedrock Agent (camel-aws-bedrock)
* Operate on AWS Bedrock through its Agent.
*
* Category: ai,cloud
* Since: 4.5
* Maven coordinates: org.apache.camel:camel-aws-bedrock
*
* @return the dsl builder
*/
static AwsBedrockAgentComponentBuilder awsBedrockAgent() {
return new AwsBedrockAgentComponentBuilderImpl();
}
/**
* Builder for the AWS Bedrock Agent component.
*/
| AwsBedrockAgentComponentBuilderFactory |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/commands/StreamCommandIntegrationTests.java | {
"start": 2383,
"end": 50907
} | class ____ extends TestSupport {
private final RedisCommands<String, String> redis;
@Inject
protected StreamCommandIntegrationTests(RedisCommands<String, String> redis) {
this.redis = redis;
}
@BeforeEach
void setUp() {
this.redis.flushall();
}
@Test
void xadd() {
assertThat(redis.xadd(key, Collections.singletonMap("key", "value"))).endsWith("-0");
assertThat(redis.xadd(key, "foo", "bar")).isNotEmpty();
}
@Test
void xaddMaxLen() {
String id = redis.xadd(key, XAddArgs.Builder.maxlen(5), "foo", "bar");
for (int i = 0; i < 5; i++) {
redis.xadd(key, XAddArgs.Builder.maxlen(5), "foo", "bar");
}
List<StreamMessage<String, String>> messages = redis.xrange(key,
Range.from(Range.Boundary.including(id), Range.Boundary.unbounded()));
assertThat(messages).hasSize(5);
}
@Test
void xaddMaxLenEfficientTrimming() {
String id = redis.xadd(key, XAddArgs.Builder.maxlen(5).approximateTrimming(), "foo", "bar");
assertThat(id).isNotNull();
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
public void xaddMinidLimit() {
redis.xadd(key, XAddArgs.Builder.minId("2").id("3"), "foo", "bar");
redis.xadd(key, XAddArgs.Builder.minId("2").id("4"), "foo", "bar");
assertThat(redis.xlen(key)).isEqualTo(2);
redis.xadd(key, XAddArgs.Builder.minId("4").id("5"), "foo", "bar");
assertThat(redis.xlen(key)).isEqualTo(2);
redis.del(key);
redis.configSet("stream-node-max-entries", "1");
for (int i = 0; i < 5; i++) {
redis.xadd(key, "foo", "bar");
}
redis.xadd(key, XAddArgs.Builder.maxlen(2).approximateTrimming().limit(5l), "foo", "bar");
assertThat(redis.xlen(key)).isEqualTo(2);
redis.configSet("stream-node-max-entries", "100");
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xaddWithNomkstream() {
String id = redis.xadd(key, XAddArgs.Builder.nomkstream(), Collections.singletonMap("foo", "bar"));
assertThat(id).isNull();
assertThat(redis.exists(key)).isEqualTo(0L);
}
@Test
void xdel() {
List<String> ids = new ArrayList<>();
for (int i = 0; i < 2; i++) {
ids.add(redis.xadd(key, Collections.singletonMap("key", "value")));
}
Long deleted = redis.xdel(key, ids.get(0), "123456-0");
assertThat(deleted).isEqualTo(1);
List<StreamMessage<String, String>> messages = redis.xrange(key, Range.unbounded());
assertThat(messages).hasSize(1);
}
@Test
void xtrim() {
List<String> ids = new ArrayList<>();
for (int i = 0; i < 10; i++) {
ids.add(redis.xadd(key, Collections.singletonMap("key", "value")));
}
redis.xdel(key, ids.get(0), ids.get(2));
assertThat(redis.xlen(key)).isBetween(8L, 10L);
redis.xtrim(key, true, 8);
assertThat(redis.xlen(key)).isLessThanOrEqualTo(10);
redis.xtrim(key, XTrimArgs.Builder.maxlen(0).limit(0).approximateTrimming());
assertThat(redis.xlen(key)).isLessThanOrEqualTo(10);
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xtrimMinidLimit() {
redis.xadd(key, XAddArgs.Builder.maxlen(3).id("3"), "foo", "bar");
redis.xtrim(key, XTrimArgs.Builder.minId("4"));
assertThat(redis.xlen(key)).isZero();
List<String> ids = new ArrayList<>();
for (int i = 0; i < 10; i++) {
ids.add(redis.xadd(key, Collections.singletonMap("key", "value")));
}
redis.xtrim(key, XTrimArgs.Builder.maxlen(8));
assertThat(redis.xlen(key)).isEqualTo(8);
}
@Test
void xrange() {
List<String> ids = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Map<String, String> body = new HashMap<>();
body.put("key-1", "value-1-" + i);
body.put("key-2", "value-2-" + i);
ids.add(redis.xadd(key, body));
}
List<StreamMessage<String, String>> messages = redis.xrange(key, Range.unbounded());
assertThat(messages).hasSize(5);
StreamMessage<String, String> message = messages.get(0);
Map<String, String> expectedBody = new HashMap<>();
expectedBody.put("key-1", "value-1-0");
expectedBody.put("key-2", "value-2-0");
assertThat(message.getId()).contains("-");
assertThat(message.getStream()).isEqualTo(key);
assertThat(message.getBody()).isEqualTo(expectedBody);
assertThat(redis.xrange(key, Range.unbounded(), Limit.from(2))).hasSize(2);
List<StreamMessage<String, String>> range = redis.xrange(key, Range.create(ids.get(0), ids.get(1)));
assertThat(range).hasSize(2);
assertThat(range.get(0).getBody()).isEqualTo(expectedBody);
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xrangeRanges() {
String id1 = redis.xadd(key, Collections.singletonMap("key", "value"));
String id2 = redis.xadd(key, Collections.singletonMap("key", "value"));
String id3 = redis.xadd(key, Collections.singletonMap("key", "value"));
assertThat(redis.xrange(key, Range.unbounded())).hasSize(3);
assertThat(redis.xrange(key, Range.from(Range.Boundary.including(id1), Range.Boundary.excluding(id3)))).hasSize(2);
assertThat(redis.xrange(key, Range.from(Range.Boundary.excluding(id1), Range.Boundary.excluding(id3)))).hasSize(1);
assertThat(redis.xrange(key, Range.from(Range.Boundary.excluding(id1), Range.Boundary.including(id3)))).hasSize(2);
}
@Test
void xrevrange() {
for (int i = 0; i < 5; i++) {
Map<String, String> body = new HashMap<>();
body.put("key-1", "value-1-" + i);
body.put("key-2", "value-2-" + i);
redis.xadd(key, body);
}
List<StreamMessage<String, String>> messages = redis.xrevrange(key, Range.unbounded());
assertThat(messages).hasSize(5);
StreamMessage<String, String> message = messages.get(0);
Map<String, String> expectedBody = new HashMap<>();
expectedBody.put("key-1", "value-1-4");
expectedBody.put("key-2", "value-2-4");
assertThat(message.getId()).contains("-");
assertThat(message.getStream()).isEqualTo(key);
assertThat(message.getBody()).isEqualTo(expectedBody);
}
@Test
void xreadSingleStream() {
redis.xadd("stream-1", Collections.singletonMap("key1", "value1"));
redis.xadd("stream-1", Collections.singletonMap("key2", "value2"));
List<StreamMessage<String, String>> messages = redis.xread(XReadArgs.Builder.count(2),
StreamOffset.from("stream-1", "0-0"));
assertThat(messages).hasSize(2);
StreamMessage<String, String> firstMessage = messages.get(0);
assertThat(firstMessage.getStream()).isEqualTo("stream-1");
assertThat(firstMessage.getBody()).hasSize(1).containsEntry("key1", "value1");
assertThat(firstMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(firstMessage.getDeliveredCount()).isNull();
StreamMessage<String, String> nextMessage = messages.get(1);
assertThat(nextMessage.getStream()).isEqualTo("stream-1");
assertThat(nextMessage.getBody()).hasSize(1).containsEntry("key2", "value2");
}
@Test
void xreadMultipleStreams() {
Map<String, String> biggerBody = new LinkedHashMap<>();
biggerBody.put("key4", "value4");
biggerBody.put("key5", "value5");
String initial1 = redis.xadd("{s1}stream-1", Collections.singletonMap("key1", "value1"));
String initial2 = redis.xadd("{s1}stream-2", Collections.singletonMap("key2", "value2"));
String message1 = redis.xadd("{s1}stream-1", Collections.singletonMap("key3", "value3"));
String message2 = redis.xadd("{s1}stream-2", biggerBody);
List<StreamMessage<String, String>> messages = redis.xread(StreamOffset.from("{s1}stream-1", "0-0"),
StreamOffset.from("{s1}stream-2", "0-0"));
assertThat(messages).hasSize(4);
StreamMessage<String, String> firstMessage = messages.get(0);
assertThat(firstMessage.getId()).isEqualTo(initial1);
assertThat(firstMessage.getStream()).isEqualTo("{s1}stream-1");
assertThat(firstMessage.getBody()).hasSize(1).containsEntry("key1", "value1");
assertThat(firstMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(firstMessage.getDeliveredCount()).isNull();
StreamMessage<String, String> secondMessage = messages.get(3);
assertThat(secondMessage.getId()).isEqualTo(message2);
assertThat(secondMessage.getStream()).isEqualTo("{s1}stream-2");
assertThat(secondMessage.getBody()).hasSize(2).containsEntry("key4", "value4");
assertThat(secondMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(secondMessage.getDeliveredCount()).isNull();
}
@Test
public void xreadTransactional() {
String initial1 = redis.xadd("stream-1", Collections.singletonMap("key1", "value1"));
String initial2 = redis.xadd("stream-2", Collections.singletonMap("key2", "value2"));
redis.multi();
redis.xadd("stream-1", Collections.singletonMap("key3", "value3"));
redis.xadd("stream-2", Collections.singletonMap("key4", "value4"));
redis.xread(StreamOffset.from("stream-1", initial1), StreamOffset.from("stream-2", initial2));
TransactionResult exec = redis.exec();
String message1 = exec.get(0);
String message2 = exec.get(1);
List<StreamMessage<String, String>> messages = exec.get(2);
StreamMessage<String, String> firstMessage = messages.get(0);
assertThat(firstMessage.getId()).isEqualTo(message1);
assertThat(firstMessage.getStream()).isEqualTo("stream-1");
assertThat(firstMessage.getBody()).containsEntry("key3", "value3");
assertThat(firstMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(firstMessage.getDeliveredCount()).isNull();
StreamMessage<String, String> secondMessage = messages.get(1);
assertThat(secondMessage.getId()).isEqualTo(message2);
assertThat(secondMessage.getStream()).isEqualTo("stream-2");
assertThat(secondMessage.getBody()).containsEntry("key4", "value4");
assertThat(secondMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(secondMessage.getDeliveredCount()).isNull();
}
@Test
public void xreadLastVsLatest() {
// Redis 7.4 - you can use the + sign as a special ID to read the last message in the stream.
assumeTrue(RedisConditions.of(redis).hasVersionGreaterOrEqualsTo("7.4"));
redis.xadd("stream-1", Collections.singletonMap("key1", "value1"));
redis.xadd("stream-1", Collections.singletonMap("key2", "value2"));
List<StreamMessage<String, String>> lastMessages = redis.xread(StreamOffset.last("stream-1"));
List<StreamMessage<String, String>> latestMessages = redis.xread(StreamOffset.latest("stream-1"));
assertThat(lastMessages).hasSize(1);
StreamMessage<String, String> lastMessage = lastMessages.get(0);
assertThat(lastMessage.getStream()).isEqualTo("stream-1");
assertThat(lastMessage.getBody()).hasSize(1).containsEntry("key2", "value2");
assertThat(lastMessage.getMillisElapsedFromDelivery()).isNull();
assertThat(lastMessage.getDeliveredCount()).isNull();
assertThat(latestMessages).isEmpty();
}
@Test
void xinfoStream() {
redis.xadd(key, Collections.singletonMap("key1", "value1"));
List<Object> objects = redis.xinfoStream(key);
assertThat(objects).containsSequence("length", 1L);
}
@Test
void xinfoGroups() {
assertThat(redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream())).isEqualTo("OK");
List<Object> objects = redis.xinfoGroups(key);
assertThat((List<Object>) objects.get(0)).containsSequence("name", "group");
}
@Test
void xinfoConsumers() {
assertThat(redis.xgroupCreate(StreamOffset.from(key, "0-0"), "group", XGroupCreateArgs.Builder.mkstream()))
.isEqualTo("OK");
redis.xadd(key, Collections.singletonMap("key1", "value1"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
List<Object> objects = redis.xinfoConsumers(key, "group");
assertThat((List<Object>) objects.get(0)).containsSequence("name", "consumer1");
}
@Test
void xgroupCreate() {
assertThat(redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream())).isEqualTo("OK");
List<Object> groups = redis.dispatch(XINFO, new NestedMultiOutput<>(StringCodec.UTF8),
new CommandArgs<>(StringCodec.UTF8).add("GROUPS").add(key));
assertThat(groups).isNotEmpty();
assertThat(redis.type(key)).isEqualTo("stream");
}
@Test
@EnabledOnCommand("EVAL_RO") // Redis 7.0
void xgroupCreateEntriesRead_pre822() {
assumeTrue(RedisConditions.of(redis).getRedisVersion().isLessThan(RedisConditions.Version.parse("8.2.2")),
"Redis 8.2.2+ has different behavior for entries-read");
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.entriesRead(5).mkstream(true));
List<List<Object>> group = (List) redis.xinfoGroups("key");
assertThat(group.get(0)).containsSequence("entries-read", 5L, "lag");
}
@Test
@EnabledOnCommand("EVAL_RO") // Redis 7.0
void xgroupCreateEntriesRead_post822() {
assumeTrue(RedisConditions.of(redis).hasVersionGreaterOrEqualsTo("8.2.2"),
"Redis 8.2.2+ has different behavior for entries-read");
redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.entriesRead(5).mkstream(true));
List<List<Object>> group = (List) redis.xinfoGroups("key");
assertThat(group.get(0)).containsSequence("entries-read", 2L, "lag");
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xgroupCreateconsumer() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
redis.xadd(key, Collections.singletonMap("key", "value"));
assertThat(redis.xgroupCreateconsumer(key, Consumer.from("group", "consumer1"))).isTrue();
assertThat(redis.xgroupCreateconsumer(key, Consumer.from("group", "consumer1"))).isFalse();
}
@Test
void xreadgroup() {
redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xgroupCreate(StreamOffset.latest(key), "group");
redis.xadd(key, Collections.singletonMap("key", "value"));
List<StreamMessage<String, String>> read1 = redis.xreadgroup(Consumer.from("group", "consumer1"),
StreamOffset.lastConsumed(key));
assertThat(read1).hasSize(1);
}
@Test
void xreadgroupDeletedMessage() {
redis.xgroupCreate(StreamOffset.latest(key), "del-group", XGroupCreateArgs.Builder.mkstream());
redis.xadd(key, Collections.singletonMap("key", "value1"));
redis.xreadgroup(Consumer.from("del-group", "consumer1"), StreamOffset.lastConsumed(key));
redis.xadd(key, XAddArgs.Builder.maxlen(1), Collections.singletonMap("key", "value2"));
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("del-group", "consumer1"),
StreamOffset.from(key, "0-0"));
assertThat(messages).hasSize(1);
assertThat(messages.get(0).getBody()).isEmpty();
}
@Test
void xreadgroupTrimmedMessage() {
for (int i = 0; i < 10; i++) {
redis.xadd(key, Collections.singletonMap("key", "value1"));
}
redis.xgroupCreate(StreamOffset.from(key, "0-0"), "del-group", XGroupCreateArgs.Builder.mkstream());
redis.xreadgroup(Consumer.from("del-group", "consumer1"), XReadArgs.Builder.count(10), StreamOffset.lastConsumed(key));
redis.xtrim(key, 1);
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("del-group", "consumer1"),
XReadArgs.Builder.count(10), StreamOffset.from(key, "0-0"));
assertThat(messages).hasSize(10);
}
@Test
void xpendingWithoutRead() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
PendingMessages pendingEntries = redis.xpending(key, "group");
assertThat(pendingEntries.getCount()).isEqualTo(0);
assertThat(pendingEntries.getConsumerMessageCount()).isEmpty();
}
@Test
void xpendingWithGroup() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
PendingMessages pendingEntries = redis.xpending(key, "group");
assertThat(pendingEntries.getCount()).isEqualTo(1);
assertThat(pendingEntries.getMessageIds()).isEqualTo(Range.create(id, id));
}
@Test
void xpending() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
List<PendingMessage> pendingEntries = redis.xpending(key, "group", Range.unbounded(), Limit.from(10));
PendingMessage message = pendingEntries.get(0);
assertThat(message.getId()).isEqualTo(id);
assertThat(message.getConsumer()).isEqualTo("consumer1");
assertThat(message.getRedeliveryCount()).isEqualTo(1);
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xpendingRanges() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id1 = redis.xadd(key, Collections.singletonMap("key", "value"));
String id2 = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
assertThat(redis.xpending(key, "group", Range.unbounded(), Limit.from(10))).hasSize(2);
assertThat(redis.xpending(key, "group", Range.from(Range.Boundary.including(id1), Range.Boundary.excluding(id2)),
Limit.from(10))).hasSize(1);
assertThat(redis.xpending(key, "group", Range.from(Range.Boundary.including(id1), Range.Boundary.including(id2)),
Limit.from(10))).hasSize(2);
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xpendingWithArgs() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
List<PendingMessage> pendingEntries = redis.xpending(key,
XPendingArgs.Builder.xpending(Consumer.from("group", "consumer1"), Range.unbounded(), Limit.from(10)));
PendingMessage message = pendingEntries.get(0);
assertThat(message.getId()).isEqualTo(id);
assertThat(message.getConsumer()).isEqualTo("consumer1");
assertThat(message.getRedeliveryCount()).isEqualTo(1);
pendingEntries = redis.xpending(key,
XPendingArgs.Builder.xpending("group", Range.unbounded(), Limit.from(10)).idle(Duration.ofMinutes(1)));
assertThat(pendingEntries).isEmpty();
pendingEntries = redis.xpending(key,
XPendingArgs.Builder.xpending("group", Range.unbounded(), Limit.from(10)).idle(Duration.ZERO));
assertThat(pendingEntries).hasSize(1);
message = pendingEntries.get(0);
assertThat(message.getId()).isEqualTo(id);
assertThat(message.getConsumer()).isEqualTo("consumer1");
assertThat(message.getRedeliveryCount()).isEqualTo(1);
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xpendingWithIdle() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xpending(key,
XPendingArgs.Builder.xpending(Consumer.from("group", "consumer1"), Range.unbounded(), Limit.unlimited())
.idle(Duration.ofMinutes(1)));
}
@Test
void xpendingWithoutMessages() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
List<PendingMessage> pendingEntries = redis.xpending(key, "group", Range.unbounded(), Limit.from(10));
assertThat(pendingEntries).isEmpty();
}
@Test
void xpendingGroup() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
PendingMessages pendingMessages = redis.xpending(key, "group");
assertThat(pendingMessages.getCount()).isEqualTo(1);
assertThat(pendingMessages.getMessageIds()).isEqualTo(Range.create(id, id));
assertThat(pendingMessages.getConsumerMessageCount()).containsEntry("consumer1", 1L);
assertThat(pendingMessages.getCount()).isEqualTo(1);
}
@Test
void xpendingExtended() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
List<PendingMessage> pendingMessages = redis.xpending(key, "group", Range.unbounded(), Limit.unlimited());
assertThat(pendingMessages).hasSize(1);
PendingMessage message = pendingMessages.get(0);
assertThat(message.getId()).isEqualTo(id);
assertThat(message.getConsumer()).isEqualTo("consumer1");
assertThat(message.getRedeliveryCount()).isEqualTo(1);
}
@Test
void xack() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
redis.xadd(key, Collections.singletonMap("key", "value"));
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("group", "consumer1"),
StreamOffset.lastConsumed(key));
Long ackd = redis.xack(key, "group", messages.get(0).getId());
assertThat(ackd).isEqualTo(1);
List<PendingMessage> pendingEntries = redis.xpending(key, "group", Range.unbounded(), Limit.from(10));
assertThat(pendingEntries).isEmpty();
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xautoclaim() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id1 = redis.xadd(key, Collections.singletonMap("key1", "value1"));
redis.xadd(key, Collections.singletonMap("key2", "value2"));
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("group", "consumer1"),
StreamOffset.lastConsumed(key));
ClaimedMessages<String, String> claimedMessages = redis.xautoclaim(key,
XAutoClaimArgs.Builder.xautoclaim(Consumer.from("group", "consumer2"), Duration.ZERO, id1).count(20));
assertThat(claimedMessages.getId()).isNotNull();
assertThat(claimedMessages.getMessages()).hasSize(2).contains(messages.get(0));
}
@Test
@EnabledOnCommand("XAUTOCLAIM") // Redis 6.2
void xautoclaimJustId() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
String id1 = redis.xadd(key, Collections.singletonMap("key1", "value1"));
redis.xadd(key, Collections.singletonMap("key2", "value2"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
ClaimedMessages<String, String> claimedMessages = redis.xautoclaim(key,
XAutoClaimArgs.Builder.xautoclaim(Consumer.from("group", "consumer2"), Duration.ZERO, id1).justid().count(20));
assertThat(claimedMessages.getId()).isNotNull();
assertThat(claimedMessages.getMessages()).hasSize(2);
StreamMessage<String, String> message = claimedMessages.getMessages().get(0);
assertThat(message.getBody()).isNull();
assertThat(message.getStream()).isEqualTo("key");
assertThat(message.getId()).isEqualTo(id1);
}
@Test
void xclaim() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
redis.xadd(key, Collections.singletonMap("key", "value"));
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("group", "consumer1"),
StreamOffset.lastConsumed(key));
List<StreamMessage<String, String>> claimedMessages = redis.xclaim(key, Consumer.from("group", "consumer2"), 0,
messages.get(0).getId());
assertThat(claimedMessages).hasSize(1).contains(messages.get(0));
assertThat(redis.xpending(key, Consumer.from("group", "consumer1"), Range.unbounded(), Limit.from(10))).isEmpty();
assertThat(redis.xpending(key, Consumer.from("group", "consumer2"), Range.unbounded(), Limit.from(10))).hasSize(1);
}
@Test
void xclaimWithArgs() {
String id1 = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xgroupCreate(StreamOffset.latest(key), "group");
String id2 = redis.xadd(key, Collections.singletonMap("key", "value"));
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("group", "consumer1"),
StreamOffset.lastConsumed(key));
List<StreamMessage<String, String>> claimedMessages = redis.xclaim(key, Consumer.from("group", "consumer2"),
XClaimArgs.Builder.minIdleTime(0).time(Instant.now().minusSeconds(60)), id1, id2);
assertThat(claimedMessages).hasSize(1).contains(messages.get(0));
List<PendingMessage> pendingMessages = redis.xpending(key, Consumer.from("group", "consumer2"), Range.unbounded(),
Limit.from(10));
PendingMessage message = pendingMessages.get(0);
assertThat(message.getMsSinceLastDelivery()).isBetween(50000L, 80000L);
}
@Test
void xclaimJustId() {
String id1 = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xgroupCreate(StreamOffset.latest(key), "group");
String id2 = redis.xadd(key, Collections.singletonMap("key", "value"));
String id3 = redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
List<StreamMessage<String, String>> claimedMessages = redis.xclaim(key, Consumer.from("group", "consumer2"),
XClaimArgs.Builder.justid(), id1, id2, id3);
assertThat(claimedMessages).hasSize(2);
StreamMessage<String, String> message = claimedMessages.get(0);
assertThat(message.getBody()).isNull();
assertThat(message.getStream()).isEqualTo("key");
assertThat(message.getId()).isEqualTo(id2);
}
@Test
void xgroupDestroy() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
assertThat(redis.xgroupDestroy(key, "group")).isTrue();
assertThat(redis.xgroupDestroy(key, "group")).isFalse();
}
@Test
void xgroupDelconsumer() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
redis.xadd(key, Collections.singletonMap("key", "value"));
redis.xreadgroup(Consumer.from("group", "consumer1"), StreamOffset.lastConsumed(key));
assertThat(redis.xgroupDelconsumer(key, Consumer.from("group", "consumer1"))).isOne();
assertThat(redis.xgroupDelconsumer(key, Consumer.from("group", "consumer1"))).isZero();
}
@Test
void xgroupSetid() {
redis.xgroupCreate(StreamOffset.latest(key), "group", XGroupCreateArgs.Builder.mkstream());
assertThat(redis.xgroupSetid(StreamOffset.latest(key), "group")).isEqualTo("OK");
}
// Redis 8.2 Stream Commands Tests
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xdelex() {
// Add some entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
String id2 = redis.xadd(key, Collections.singletonMap("field2", "value2"));
String nonExistentId = "999999-0";
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(2L);
// Test XDELEX
List<StreamEntryDeletionResult> results = redis.xdelex(key, id1, id2, nonExistentId);
assertThat(results).hasSize(3);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
assertThat(results.get(1)).isEqualTo(StreamEntryDeletionResult.DELETED);
assertThat(results.get(2)).isEqualTo(StreamEntryDeletionResult.NOT_FOUND);
// Verify entries were deleted
assertThat(redis.xlen(key)).isEqualTo(0L);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xdelexWithPolicy() {
// Add some entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
String id2 = redis.xadd(key, Collections.singletonMap("field2", "value2"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(2L);
// Test XDELEX with KEEP_REFERENCES policy
List<StreamEntryDeletionResult> results = redis.xdelex(key, StreamDeletionPolicy.KEEP_REFERENCES, id1, id2);
assertThat(results).hasSize(2);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
assertThat(results.get(1)).isEqualTo(StreamEntryDeletionResult.DELETED);
// Verify entries were deleted
assertThat(redis.xlen(key)).isEqualTo(0L);
}
@Test
@EnabledOnCommand("XACKDEL") // Redis 8.2
void xackdel() {
// Set up stream with consumer group
String groupName = "test-group";
String consumerName = "test-consumer";
// Add entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
String id2 = redis.xadd(key, Collections.singletonMap("field2", "value2"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(2L);
// Create consumer group
redis.xgroupCreate(StreamOffset.from(key, "0-0"), groupName, XGroupCreateArgs.Builder.mkstream());
// Read messages to create pending entries
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from(groupName, consumerName),
StreamOffset.lastConsumed(key));
assertThat(messages).hasSize(2);
// Test XACKDEL
List<StreamEntryDeletionResult> results = redis.xackdel(key, groupName, id1, id2);
assertThat(results).hasSize(2);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
assertThat(results.get(1)).isEqualTo(StreamEntryDeletionResult.DELETED);
// Verify no pending messages remain
List<PendingMessage> pending = redis.xpending(key, groupName, Range.unbounded(), io.lettuce.core.Limit.from(10));
assertThat(pending).isEmpty();
}
@Test
@EnabledOnCommand("XACKDEL") // Redis 8.2
void xackdelWithPolicy() {
// Set up stream with consumer group
String groupName = "test-group";
String consumerName = "test-consumer";
// Add entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(1L);
// Create consumer group
redis.xgroupCreate(StreamOffset.from(key, "0-0"), groupName, XGroupCreateArgs.Builder.mkstream());
// Read message to create pending entry
redis.xreadgroup(Consumer.from(groupName, consumerName), StreamOffset.lastConsumed(key));
// Test XACKDEL with DELETE_REFERENCES policy
List<StreamEntryDeletionResult> results = redis.xackdel(key, groupName, StreamDeletionPolicy.DELETE_REFERENCES, id1);
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
}
@Test
@EnabledOnCommand("XACKDEL") // Redis 8.2
void xackdelNotFound() {
String groupName = "test-group";
String nonExistentId = "999999-0";
// Create consumer group on empty stream
redis.xgroupCreate(StreamOffset.from(key, "0-0"), groupName, XGroupCreateArgs.Builder.mkstream());
// Test XACKDEL with non-existent ID
List<StreamEntryDeletionResult> results = redis.xackdel(key, groupName, nonExistentId);
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.NOT_FOUND);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xdelexEmptyStream() {
String nonExistentId = "999999-0";
// Test XDELEX on empty stream
List<StreamEntryDeletionResult> results = redis.xdelex(key, nonExistentId);
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.NOT_FOUND);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xdelexWithDelrefPolicy() {
// Add entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
String id2 = redis.xadd(key, Collections.singletonMap("field2", "value2"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(2L);
// Test XDELEX with DELETE_REFERENCES policy
List<StreamEntryDeletionResult> results = redis.xdelex(key, StreamDeletionPolicy.DELETE_REFERENCES, id1, id2);
assertThat(results).hasSize(2);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
assertThat(results.get(1)).isEqualTo(StreamEntryDeletionResult.DELETED);
// Verify entries were deleted
assertThat(redis.xlen(key)).isEqualTo(0L);
}
@Test
@EnabledOnCommand("XACKDEL") // Redis 8.2
void xackdelWithAckedPolicy() {
// Set up stream with consumer group
String groupName = "test-group";
String consumerName = "test-consumer";
// Add entries to the stream
String id1 = redis.xadd(key, Collections.singletonMap("field1", "value1"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(1L);
// Create consumer group
redis.xgroupCreate(StreamOffset.from(key, "0-0"), groupName, XGroupCreateArgs.Builder.mkstream());
// Read message to create pending entry
redis.xreadgroup(Consumer.from(groupName, consumerName), StreamOffset.lastConsumed(key));
// Test XACKDEL with ACKNOWLEDGED policy on pending entry
// The ACKNOWLEDGED policy behavior: it deletes the entry from the stream and acknowledges it
List<StreamEntryDeletionResult> results = redis.xackdel(key, groupName, StreamDeletionPolicy.ACKNOWLEDGED, id1);
assertThat(results).hasSize(1);
assertThat(results.get(0)).isEqualTo(StreamEntryDeletionResult.DELETED);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xaddWithTrimmingMode() {
// Add initial entries to the stream
redis.xadd(key, Collections.singletonMap("field1", "value1"));
redis.xadd(key, Collections.singletonMap("field2", "value2"));
redis.xadd(key, Collections.singletonMap("field3", "value3"));
redis.xadd(key, Collections.singletonMap("field4", "value4"));
redis.xadd(key, Collections.singletonMap("field5", "value5"));
// Verify initial state
assertThat(redis.xlen(key)).isEqualTo(5L);
// Create consumer group and read messages to create PEL entries
redis.xgroupCreate(StreamOffset.from(key, "0-0"), "test-group", XGroupCreateArgs.Builder.mkstream());
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("test-group", "test-consumer"),
XReadArgs.Builder.count(3), StreamOffset.lastConsumed(key));
assertThat(messages).hasSize(3);
// Add new entry with maxLen=3 and KEEP_REFERENCES mode - should preserve PEL references
String newId = redis.xadd(key, XAddArgs.Builder.maxlen(3).trimmingMode(StreamDeletionPolicy.KEEP_REFERENCES),
Collections.singletonMap("field6", "value6"));
assertThat(newId).isNotNull();
// Stream should be trimmed to 3 entries
assertThat(redis.xlen(key)).isEqualTo(3L);
// PEL should still contain references to read messages
PendingMessages pending = redis.xpending(key, "test-group");
assertThat(pending.getCount()).isEqualTo(3L);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xaddWithTrimmingModeDelref() {
// Add initial entries to the stream
redis.xadd(key, Collections.singletonMap("field1", "value1"));
redis.xadd(key, Collections.singletonMap("field2", "value2"));
redis.xadd(key, Collections.singletonMap("field3", "value3"));
redis.xadd(key, Collections.singletonMap("field4", "value4"));
redis.xadd(key, Collections.singletonMap("field5", "value5"));
// Create consumer group and read messages
redis.xgroupCreate(StreamOffset.from(key, "0-0"), "test-group", XGroupCreateArgs.Builder.mkstream());
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("test-group", "test-consumer"),
XReadArgs.Builder.count(3), StreamOffset.lastConsumed(key));
assertThat(messages).hasSize(3);
// Add new entry with maxLen=3 and DELETE_REFERENCES mode - should remove PEL references
String newId = redis.xadd(key, XAddArgs.Builder.maxlen(3).trimmingMode(StreamDeletionPolicy.DELETE_REFERENCES),
Collections.singletonMap("field6", "value6"));
assertThat(newId).isNotNull();
// Stream should be trimmed to 3 entries
assertThat(redis.xlen(key)).isEqualTo(3L);
// PEL should have fewer references due to DELREF policy
PendingMessages pending = redis.xpending(key, "test-group");
assertThat(pending.getCount()).isLessThan(3L);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xtrimWithTrimmingMode() {
// Add initial entries to the stream
redis.xadd(key, Collections.singletonMap("field1", "value1"));
redis.xadd(key, Collections.singletonMap("field2", "value2"));
redis.xadd(key, Collections.singletonMap("field3", "value3"));
redis.xadd(key, Collections.singletonMap("field4", "value4"));
redis.xadd(key, Collections.singletonMap("field5", "value5"));
// Create consumer group and read messages
redis.xgroupCreate(StreamOffset.from(key, "0-0"), "test-group", XGroupCreateArgs.Builder.mkstream());
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("test-group", "test-consumer"),
XReadArgs.Builder.count(3), StreamOffset.lastConsumed(key));
assertThat(messages).hasSize(3);
// Trim with KEEP_REFERENCES mode
Long trimmed = redis.xtrim(key, XTrimArgs.Builder.maxlen(3).trimmingMode(StreamDeletionPolicy.KEEP_REFERENCES));
assertThat(trimmed).isEqualTo(2L);
// Stream should be trimmed to 3 entries
assertThat(redis.xlen(key)).isEqualTo(3L);
// PEL should still contain references
PendingMessages pending = redis.xpending(key, "test-group");
assertThat(pending.getCount()).isEqualTo(3L);
}
@Test
@EnabledOnCommand("XDELEX") // Redis 8.2
void xtrimWithTrimmingModeDelref() {
// Add initial entries to the stream
redis.xadd(key, Collections.singletonMap("field1", "value1"));
redis.xadd(key, Collections.singletonMap("field2", "value2"));
redis.xadd(key, Collections.singletonMap("field3", "value3"));
redis.xadd(key, Collections.singletonMap("field4", "value4"));
redis.xadd(key, Collections.singletonMap("field5", "value5"));
// Create consumer group and read messages
redis.xgroupCreate(StreamOffset.from(key, "0-0"), "test-group", XGroupCreateArgs.Builder.mkstream());
List<StreamMessage<String, String>> messages = redis.xreadgroup(Consumer.from("test-group", "test-consumer"),
XReadArgs.Builder.count(3), StreamOffset.lastConsumed(key));
assertThat(messages).hasSize(3);
// Trim with DELETE_REFERENCES mode
Long trimmed = redis.xtrim(key, XTrimArgs.Builder.maxlen(3).trimmingMode(StreamDeletionPolicy.DELETE_REFERENCES));
assertThat(trimmed).isEqualTo(2L);
// Stream should be trimmed to 3 entries
assertThat(redis.xlen(key)).isEqualTo(3L);
// PEL should have fewer references due to DELREF policy
PendingMessages pending = redis.xpending(key, "test-group");
assertThat(pending.getCount()).isLessThan(3L);
}
// XREADGORUP CLAIM Tests - 8.4 OSS
// since: 7.1
private static final String KEY = "it:stream:claim:move:" + UUID.randomUUID();
private static final String GROUP = "g";
private static final String C1 = "c1";
private static final String C2 = "c2";
private static final Map<String, String> BODY = new HashMap<String, String>() {
{
put("f", "v");
}
};
private static final long IDLE_TIME_MS = 5;
private void beforeEachClaimTest() throws InterruptedException {
assumeTrue(RedisConditions.of(redis).hasVersionGreaterOrEqualsTo("8.4"), "Redis 8.4+ required for XREADGROUP CLAIM");
// Produce two entries
redis.xadd(KEY, BODY);
redis.xadd(KEY, BODY);
// Create group and consume with c1 so entries become pending for c1
redis.xgroupCreate(XReadArgs.StreamOffset.from(KEY, "0-0"), GROUP);
redis.xreadgroup(Consumer.from(GROUP, C1), XReadArgs.Builder.count(10), XReadArgs.StreamOffset.lastConsumed(KEY));
// Ensure idle time so entries are claimable
Thread.sleep(IDLE_TIME_MS);
}
@Test
void xreadgroupClaim_returnsMetadataOrdered() throws Exception {
beforeEachClaimTest();
// Produce fresh entries that are NOT claimed (not pending)
redis.xadd(KEY, BODY);
redis.xadd(KEY, BODY);
List<StreamMessage<String, String>> consumer2 = redis.xreadgroup(Consumer.from(GROUP, C2),
XReadArgs.Builder.claim(Duration.ofMillis(IDLE_TIME_MS)).count(10), XReadArgs.StreamOffset.lastConsumed(KEY));
long claimedCount = consumer2.stream().filter(StreamMessage::isClaimed).count();
long freshCount = consumer2.size() - claimedCount;
StreamMessage<String, String> first = consumer2.get(0);
StreamMessage<String, String> second = consumer2.get(1);
StreamMessage<String, String> third = consumer2.get(2);
StreamMessage<String, String> fourth = consumer2.get(3);
// Assertions
assertThat(consumer2).isNotNull();
assertThat(consumer2).isNotEmpty();
assertThat(claimedCount).isEqualTo(2);
assertThat(freshCount).isEqualTo(2);
// Assert order: pending entries are first
assertThat(first.isClaimed()).isTrue();
assertThat(second.isClaimed()).isTrue();
assertThat(third.isClaimed()).isFalse();
assertThat(fourth.isClaimed()).isFalse();
// Assert claimed message structure
assertThat(first.getMillisElapsedFromDelivery()).isGreaterThanOrEqualTo(5);
assertThat(first.getDeliveredCount()).isGreaterThanOrEqualTo(1);
assertThat(first.getBody()).containsEntry("f", "v");
assertThat(fourth.getMillisElapsedFromDelivery()).isEqualTo(0);
assertThat(fourth.getDeliveredCount()).isEqualTo(0);
assertThat(fourth.getBody()).containsEntry("f", "v");
}
@Test
void xreadgroupClaim_movesPendingFromC1ToC2AndRemainsPendingUntilAck() throws Exception {
beforeEachClaimTest();
PendingMessages before = redis.xpending(KEY, GROUP);
List<StreamMessage<String, String>> res = redis.xreadgroup(Consumer.from(GROUP, C2),
XReadArgs.Builder.claim(Duration.ofMillis(IDLE_TIME_MS)).count(10), XReadArgs.StreamOffset.lastConsumed(KEY));
PendingMessages afterClaim = redis.xpending(KEY, GROUP);
long acked = redis.xack(KEY, GROUP, res.get(0).getId(), res.get(1).getId());
PendingMessages afterAck = redis.xpending(KEY, GROUP);
// Verify pending belongs to c1
assertThat(before.getCount()).isEqualTo(2);
assertThat(before.getConsumerMessageCount().getOrDefault(C1, 0L)).isEqualTo(2);
// Verify claim withv c2
assertThat(res).isNotNull();
assertThat(res).isNotEmpty();
long claimed = res.stream().filter(StreamMessage::isClaimed).count();
assertThat(claimed).isEqualTo(2);
// After claim: entries are pending for c2 (moved), not acked yet
assertThat(afterClaim.getCount()).isEqualTo(2);
assertThat(afterClaim.getConsumerMessageCount().getOrDefault(C1, 0L)).isEqualTo(0);
assertThat(afterClaim.getConsumerMessageCount().getOrDefault(C2, 0L)).isEqualTo(2);
// XACK the claimed entries -> PEL should become empty
assertThat(acked).isEqualTo(2);
assertThat(afterAck.getCount()).isEqualTo(0);
}
@Test
void xreadgroupClaim_claimWithNoackDoesNotCreatePendingAndRemovesClaimedFromPel() throws Exception {
beforeEachClaimTest();
PendingMessages before = redis.xpending(KEY, GROUP);
// Also produce fresh entries that should not be added to PEL when NOACK is set
redis.xadd(KEY, BODY);
redis.xadd(KEY, BODY);
// Claim with NOACK using c2
List<StreamMessage<String, String>> res = redis.xreadgroup(Consumer.from(GROUP, C2),
XReadArgs.Builder.claim(Duration.ofMillis(IDLE_TIME_MS)).noack(true).count(10),
XReadArgs.StreamOffset.lastConsumed(KEY));
PendingMessages afterNoack = redis.xpending(KEY, GROUP);
assertThat(res).isNotNull();
assertThat(res).isNotEmpty();
long claimedCount = res.stream().filter(StreamMessage::isClaimed).count();
long freshCount = res.size() - claimedCount;
assertThat(claimedCount).isEqualTo(2);
assertThat(freshCount).isEqualTo(2);
// After NOACK read, previously pending entries remain pending (NOACK does not remove them)
assertThat(afterNoack.getCount()).isEqualTo(2);
// Before claim: entries are pending for c1
assertThat(before.getCount()).isEqualTo(2);
assertThat(before.getConsumerMessageCount().getOrDefault(C1, 0L)).isEqualTo(2);
assertThat(before.getConsumerMessageCount().getOrDefault(C2, 0L)).isEqualTo(0);
// Claimed entries remain pending and are now owned by c2 (CLAIM reassigns ownership). Fresh entries were not added
// to PEL.
assertThat(afterNoack.getConsumerMessageCount().getOrDefault(C1, 0L)).isEqualTo(0);
assertThat(afterNoack.getConsumerMessageCount().getOrDefault(C2, 0L)).isEqualTo(2);
}
}
| StreamCommandIntegrationTests |
java | google__auto | common/src/main/java/com/google/auto/common/AnnotationMirrors.java | {
"start": 1589,
"end": 8500
} | class ____ {
private static final Equivalence<AnnotationMirror> ANNOTATION_MIRROR_EQUIVALENCE =
new Equivalence<AnnotationMirror>() {
@Override
protected boolean doEquivalent(AnnotationMirror left, AnnotationMirror right) {
return MoreTypes.equivalence()
.equivalent(left.getAnnotationType(), right.getAnnotationType())
&& AnnotationValues.equivalence()
.pairwise()
.equivalent(
getAnnotationValuesWithDefaults(left).values(),
getAnnotationValuesWithDefaults(right).values());
}
@Override
protected int doHash(AnnotationMirror annotation) {
DeclaredType type = annotation.getAnnotationType();
Iterable<AnnotationValue> annotationValues =
getAnnotationValuesWithDefaults(annotation).values();
return Arrays.hashCode(
new int[] {
MoreTypes.equivalence().hash(type),
AnnotationValues.equivalence().pairwise().hash(annotationValues)
});
}
@Override
public String toString() {
return "AnnotationMirrors.equivalence()";
}
};
/**
* Returns an {@link Equivalence} for {@link AnnotationMirror} as some implementations delegate
* equality tests to {@link Object#equals} whereas the documentation explicitly states that
* instance/reference equality is not the proper test.
*/
public static Equivalence<AnnotationMirror> equivalence() {
return ANNOTATION_MIRROR_EQUIVALENCE;
}
/**
* Returns the {@link AnnotationMirror}'s map of {@link AnnotationValue} indexed by {@link
* ExecutableElement}, supplying default values from the annotation if the annotation property has
* not been set. This is equivalent to {@link
* Elements#getElementValuesWithDefaults(AnnotationMirror)} but can be called statically without
* an {@link Elements} instance.
*
* <p>The iteration order of elements of the returned map will be the order in which the {@link
* ExecutableElement}s are defined in {@code annotation}'s {@linkplain
* AnnotationMirror#getAnnotationType() type}.
*/
public static ImmutableMap<ExecutableElement, AnnotationValue> getAnnotationValuesWithDefaults(
AnnotationMirror annotation) {
ImmutableMap.Builder<ExecutableElement, AnnotationValue> values = ImmutableMap.builder();
// Use unmodifiableMap to eliminate wildcards, which cause issues for our nullness checker.
@SuppressWarnings("GetElementValues")
Map<ExecutableElement, AnnotationValue> declaredValues =
unmodifiableMap(annotation.getElementValues());
for (ExecutableElement method :
ElementFilter.methodsIn(annotation.getAnnotationType().asElement().getEnclosedElements())) {
// Must iterate and put in this order, to ensure consistency in generated code.
if (declaredValues.containsKey(method)) {
values.put(method, declaredValues.get(method));
} else if (method.getDefaultValue() != null) {
values.put(method, method.getDefaultValue());
} else {
throw new IllegalStateException(
"Unset annotation value without default should never happen: "
+ MoreElements.asType(method.getEnclosingElement()).getQualifiedName()
+ '.'
+ method.getSimpleName()
+ "()");
}
}
return values.build();
}
/**
* Returns an {@link AnnotationValue} for the named element if such an element was either declared
* in the usage represented by the provided {@link AnnotationMirror}, or if such an element was
* defined with a default.
*
* @throws IllegalArgumentException if no element is defined with the given elementName.
*/
public static AnnotationValue getAnnotationValue(
AnnotationMirror annotationMirror, String elementName) {
return getAnnotationElementAndValue(annotationMirror, elementName).getValue();
}
/**
* Returns a {@link ExecutableElement} and its associated {@link AnnotationValue} if such an
* element was either declared in the usage represented by the provided {@link AnnotationMirror},
* or if such an element was defined with a default.
*
* @throws IllegalArgumentException if no element is defined with the given elementName.
*/
public static Map.Entry<ExecutableElement, AnnotationValue> getAnnotationElementAndValue(
AnnotationMirror annotationMirror, final String elementName) {
checkNotNull(annotationMirror);
checkNotNull(elementName);
for (Map.Entry<ExecutableElement, AnnotationValue> entry :
getAnnotationValuesWithDefaults(annotationMirror).entrySet()) {
if (entry.getKey().getSimpleName().contentEquals(elementName)) {
return entry;
}
}
throw new IllegalArgumentException(
String.format(
"@%s does not define an element %s()",
MoreElements.asType(annotationMirror.getAnnotationType().asElement())
.getQualifiedName(),
elementName));
}
/**
* Returns all {@linkplain AnnotationMirror annotations} that are present on the given {@link
* Element} which are themselves annotated with {@code annotationClass}.
*/
public static ImmutableSet<AnnotationMirror> getAnnotatedAnnotations(
Element element, Class<? extends Annotation> annotationClass) {
String name = annotationClass.getCanonicalName();
if (name == null) {
return ImmutableSet.of();
}
return getAnnotatedAnnotations(element, name);
}
/**
* Returns all {@linkplain AnnotationMirror annotations} that are present on the given {@link
* Element} which are themselves annotated with {@code annotation}.
*/
public static ImmutableSet<AnnotationMirror> getAnnotatedAnnotations(
Element element, TypeElement annotation) {
return element.getAnnotationMirrors().stream()
.filter(input -> isAnnotationPresent(input.getAnnotationType().asElement(), annotation))
.collect(toImmutableSet());
}
/**
* Returns all {@linkplain AnnotationMirror annotations} that are present on the given {@link
* Element} which are themselves annotated with an annotation whose type's canonical name is
* {@code annotationName}.
*/
public static ImmutableSet<AnnotationMirror> getAnnotatedAnnotations(
Element element, String annotationName) {
return element.getAnnotationMirrors().stream()
.filter(input -> isAnnotationPresent(input.getAnnotationType().asElement(), annotationName))
.collect(toImmutableSet());
}
/**
* Returns a string representation of the given annotation mirror, suitable for inclusion in a
* Java source file to reproduce the annotation in source form.
*
* <p>Fully qualified names are used for types in annotations, | AnnotationMirrors |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/EsqlClientException.java | {
"start": 360,
"end": 755
} | class ____ extends QlClientException {
public EsqlClientException(String message, Object... args) {
super(message, args);
}
protected EsqlClientException(String message, Throwable cause) {
super(message, cause);
}
protected EsqlClientException(Throwable cause, String message, Object... args) {
super(cause, message, args);
}
}
| EsqlClientException |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/CacheLoaderNullTest.java | {
"start": 860,
"end": 1216
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(CacheLoaderNull.class, getClass());
@Test
public void positive() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.cache.CacheLoader;
| CacheLoaderNullTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java | {
"start": 1193,
"end": 10708
} | class ____ extends AbstractActionTestCase<AllocateAction> {
@Override
protected AllocateAction doParseInstance(XContentParser parser) {
return AllocateAction.parse(parser);
}
@Override
protected AllocateAction createTestInstance() {
return randomInstance();
}
static AllocateAction randomInstance() {
boolean hasAtLeastOneMap = false;
Map<String, String> includes;
if (randomBoolean()) {
includes = randomAllocationRoutingMap(1, 100);
hasAtLeastOneMap = true;
} else {
includes = randomBoolean() ? null : Map.of();
}
Map<String, String> excludes;
if (randomBoolean()) {
hasAtLeastOneMap = true;
excludes = randomAllocationRoutingMap(1, 100);
} else {
excludes = randomBoolean() ? null : Map.of();
}
Map<String, String> requires;
if (hasAtLeastOneMap == false || randomBoolean()) {
requires = randomAllocationRoutingMap(1, 100);
} else {
requires = randomBoolean() ? null : Map.of();
}
Integer numberOfReplicas = randomBoolean() ? null : randomIntBetween(0, 10);
Integer totalShardsPerNode = randomBoolean() ? null : randomIntBetween(-1, 10);
return new AllocateAction(numberOfReplicas, totalShardsPerNode, includes, excludes, requires);
}
@Override
protected Reader<AllocateAction> instanceReader() {
return AllocateAction::new;
}
@Override
protected AllocateAction mutateInstance(AllocateAction instance) {
Map<String, String> include = instance.getInclude();
Map<String, String> exclude = instance.getExclude();
Map<String, String> require = instance.getRequire();
Integer numberOfReplicas = instance.getNumberOfReplicas();
Integer totalShardsPerNode = instance.getTotalShardsPerNode();
switch (randomIntBetween(0, 4)) {
case 0 -> {
include = new HashMap<>(include);
include.put(randomAlphaOfLengthBetween(11, 15), randomAlphaOfLengthBetween(1, 20));
}
case 1 -> {
exclude = new HashMap<>(exclude);
exclude.put(randomAlphaOfLengthBetween(11, 15), randomAlphaOfLengthBetween(1, 20));
}
case 2 -> {
require = new HashMap<>(require);
require.put(randomAlphaOfLengthBetween(11, 15), randomAlphaOfLengthBetween(1, 20));
}
case 3 -> numberOfReplicas = randomIntBetween(11, 20);
case 4 -> totalShardsPerNode = randomIntBetween(11, 20);
default -> throw new AssertionError("Illegal randomisation branch");
}
return new AllocateAction(numberOfReplicas, totalShardsPerNode, include, exclude, require);
}
public void testAllMapsNullOrEmpty() {
Map<String, String> include = randomBoolean() ? null : Map.of();
Map<String, String> exclude = randomBoolean() ? null : Map.of();
Map<String, String> require = randomBoolean() ? null : Map.of();
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> new AllocateAction(null, null, include, exclude, require)
);
assertEquals(
"At least one of "
+ AllocateAction.INCLUDE_FIELD.getPreferredName()
+ ", "
+ AllocateAction.EXCLUDE_FIELD.getPreferredName()
+ " or "
+ AllocateAction.REQUIRE_FIELD.getPreferredName()
+ " must contain attributes for action "
+ AllocateAction.NAME
+ ". Otherwise the "
+ NUMBER_OF_REPLICAS_FIELD.getPreferredName()
+ " or the "
+ TOTAL_SHARDS_PER_NODE_FIELD.getPreferredName()
+ " options must be configured.",
exception.getMessage()
);
}
public void testInvalidNumberOfReplicas() {
Map<String, String> include = randomAllocationRoutingMap(1, 5);
Map<String, String> exclude = randomBoolean() ? null : Map.of();
Map<String, String> require = randomBoolean() ? null : Map.of();
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> new AllocateAction(randomIntBetween(-1000, -1), randomIntBetween(0, 300), include, exclude, require)
);
assertEquals("[" + NUMBER_OF_REPLICAS_FIELD.getPreferredName() + "] must be >= 0", exception.getMessage());
}
public void testInvalidTotalShardsPerNode() {
Map<String, String> include = randomAllocationRoutingMap(1, 5);
Map<String, String> exclude = randomBoolean() ? null : Map.of();
Map<String, String> require = randomBoolean() ? null : Map.of();
IllegalArgumentException exception = expectThrows(
IllegalArgumentException.class,
() -> new AllocateAction(randomIntBetween(0, 300), randomIntBetween(-1000, -2), include, exclude, require)
);
assertEquals("[" + TOTAL_SHARDS_PER_NODE_FIELD.getPreferredName() + "] must be >= -1", exception.getMessage());
}
public static Map<String, String> randomAllocationRoutingMap(int minEntries, int maxEntries) {
Map<String, String> map = new HashMap<>();
int numIncludes = randomIntBetween(minEntries, maxEntries);
for (int i = 0; i < numIncludes; i++) {
String attributeName = randomValueOtherThanMany(
DiscoveryNodeRole.roleNames()::contains,
() -> randomAlphaOfLengthBetween(2, 20)
);
map.put(attributeName, randomAlphaOfLengthBetween(2, 20));
}
return map;
}
public void testToSteps() {
AllocateAction action = createTestInstance();
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
assertNotNull(steps);
assertEquals(2, steps.size());
StepKey expectedFirstStepKey = new StepKey(phase, AllocateAction.NAME, AllocateAction.NAME);
StepKey expectedSecondStepKey = new StepKey(phase, AllocateAction.NAME, AllocationRoutedStep.NAME);
UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0);
assertEquals(expectedFirstStepKey, firstStep.getKey());
assertEquals(expectedSecondStepKey, firstStep.getNextStepKey());
Settings.Builder expectedSettings = Settings.builder();
if (action.getNumberOfReplicas() != null) {
expectedSettings.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, action.getNumberOfReplicas());
}
action.getInclude()
.forEach((key, value) -> expectedSettings.put(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + key, value));
action.getExclude()
.forEach((key, value) -> expectedSettings.put(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + key, value));
action.getRequire()
.forEach((key, value) -> expectedSettings.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + key, value));
if (action.getTotalShardsPerNode() != null) {
expectedSettings.put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), action.getTotalShardsPerNode());
}
assertThat(firstStep.getSettingsSupplier().apply(null), equalTo(expectedSettings.build()));
AllocationRoutedStep secondStep = (AllocationRoutedStep) steps.get(1);
assertEquals(expectedSecondStepKey, secondStep.getKey());
assertEquals(nextStepKey, secondStep.getNextStepKey());
}
public void testTotalNumberOfShards() throws Exception {
Integer totalShardsPerNode = randomIntBetween(-1, 1000);
Integer numberOfReplicas = randomIntBetween(0, 4);
AllocateAction action = new AllocateAction(numberOfReplicas, totalShardsPerNode, null, null, null);
String phase = randomAlphaOfLengthBetween(1, 10);
StepKey nextStepKey = new StepKey(
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10),
randomAlphaOfLengthBetween(1, 10)
);
List<Step> steps = action.toSteps(null, phase, nextStepKey);
UpdateSettingsStep firstStep = (UpdateSettingsStep) steps.get(0);
Settings actualSettings = firstStep.getSettingsSupplier().apply(null);
assertEquals(totalShardsPerNode, actualSettings.getAsInt(INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), null));
totalShardsPerNode = null;
action = new AllocateAction(numberOfReplicas, totalShardsPerNode, null, null, null);
steps = action.toSteps(null, phase, nextStepKey);
firstStep = (UpdateSettingsStep) steps.get(0);
actualSettings = firstStep.getSettingsSupplier().apply(null);
assertNull(actualSettings.get(INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey()));
// allow an allocate action that only specifies total shards per node (don't expect any exceptions in this case)
action = new AllocateAction(null, 5, null, null, null);
assertThat(action.getTotalShardsPerNode(), is(5));
}
}
| AllocateActionTests |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/impl/WorkerTask.java | {
"start": 555,
"end": 797
} | class ____ context worker tasks.
*
* The cancellation / after task uses an atomic integer to avoid race when executing the optional cancel continuation
*
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
*/
public abstract | for |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/ResponseConsumptionTest.java | {
"start": 3237,
"end": 3794
} | class ____ {
public static final int OK_RESPONSE_SIZE = 2 * 1024 * 1024;
@Path("401")
@GET
public RestResponse<String> get401() {
return RestResponse.status(RestResponse.Status.UNAUTHORIZED, "Unauthorized");
}
@Path("200")
@GET
public RestResponse<byte[]> get200() {
byte[] byteArray = new byte[OK_RESPONSE_SIZE];
Random random = new Random();
random.nextBytes(byteArray);
return RestResponse.ok(byteArray);
}
}
}
| Resource |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/util/ClassNamePatternFilterUtils.java | {
"start": 1773,
"end": 2191
} | class ____ matching any of the supplied patterns.
*
* @param patterns a comma-separated list of patterns
*/
public static Predicate<String> excludeMatchingClassNames(@Nullable String patterns) {
return matchingClasses(patterns, Function.identity(), FilterType.EXCLUDE);
}
/**
* Create a {@link Predicate} that can be used to include (i.e., filter in)
* objects of type {@code T} whose fully qualified | names |
java | spring-projects__spring-boot | documentation/spring-boot-docs/src/main/java/org/springframework/boot/docs/web/servlet/springmvc/messageconverters/AnotherHttpMessageConverter.java | {
"start": 706,
"end": 784
} | class ____ extends AdditionalHttpMessageConverter {
}
| AnotherHttpMessageConverter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/archive/spi/AbstractArchiveDescriptor.java | {
"start": 500,
"end": 2465
} | class ____ implements ArchiveDescriptor {
private final ArchiveDescriptorFactory archiveDescriptorFactory;
private final URL archiveUrl;
private final String entryBasePrefix;
protected AbstractArchiveDescriptor(
ArchiveDescriptorFactory archiveDescriptorFactory,
URL archiveUrl,
String entryBasePrefix) {
this.archiveDescriptorFactory = archiveDescriptorFactory;
this.archiveUrl = archiveUrl;
this.entryBasePrefix = normalizeEntryBasePrefix( entryBasePrefix );
}
private static String normalizeEntryBasePrefix(String entryBasePrefix) {
if ( StringHelper.isEmpty( entryBasePrefix ) || entryBasePrefix.length() == 1 ) {
return null;
}
return entryBasePrefix.startsWith( "/" ) ? entryBasePrefix.substring( 1 ) : entryBasePrefix;
}
@SuppressWarnings("unused")
protected ArchiveDescriptorFactory getArchiveDescriptorFactory() {
return archiveDescriptorFactory;
}
protected URL getArchiveUrl() {
return archiveUrl;
}
protected String getEntryBasePrefix() {
return entryBasePrefix;
}
protected String extractRelativeName(ZipEntry zipEntry) {
final String entryName = extractName( zipEntry );
return entryBasePrefix != null && entryName.contains( entryBasePrefix )
? entryName.substring( entryBasePrefix.length() )
: entryName;
}
protected String extractName(ZipEntry zipEntry) {
return normalizePathName( zipEntry.getName() );
}
protected String normalizePathName(String pathName) {
return pathName.startsWith( "/" ) ? pathName.substring( 1 ) : pathName;
}
protected InputStreamAccess buildByteBasedInputStreamAccess(final String name, InputStream inputStream) {
// because of how jar InputStreams work we need to extract the bytes immediately. However, we
// do delay the creation of the ByteArrayInputStreams until needed
final byte[] bytes = ArchiveHelper.getBytesFromInputStreamSafely( inputStream );
return new ByteArrayInputStreamAccess( name, bytes );
}
}
| AbstractArchiveDescriptor |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformMessagesTests.java | {
"start": 494,
"end": 3181
} | class ____ extends ESTestCase {
public void testGetMessage_WithFormatStrings() {
String formattedMessage = TransformMessages.getMessage(
TransformMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT,
"30s",
"my_transform"
);
assertEquals("Timed out after [30s] while waiting for transform [my_transform] to stop", formattedMessage);
}
public void testMessageProperFormat() throws IllegalArgumentException, IllegalAccessException {
Field[] declaredFields = TransformMessages.class.getFields();
int checkedMessages = 0;
for (Field field : declaredFields) {
int modifiers = field.getModifiers();
if (java.lang.reflect.Modifier.isStatic(modifiers)
&& java.lang.reflect.Modifier.isFinal(modifiers)
&& field.getType().isAssignableFrom(String.class)) {
assertSingleMessage((String) field.get(TransformMessages.class));
++checkedMessages;
}
}
assertTrue(checkedMessages > 0);
logger.info("Checked {} messages", checkedMessages);
}
public void testAssertSingleMessage() {
expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("missing zero position {1} {1}"));
expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("incomplete {}"));
expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("count from 1 {1}"));
}
private void assertSingleMessage(String message) {
// for testing the test method, we can not assert directly, but wrap it with an exception, which also
// nicely encapsulate parsing errors thrown by MessageFormat itself
try {
innerAssertSingleMessage(message);
} catch (Exception e) {
fail("message: " + message + " failure: " + e.getMessage());
}
}
private void innerAssertSingleMessage(String message) {
MessageFormat messageWithNoArguments = new MessageFormat(message, Locale.ROOT);
int numberOfArguments = messageWithNoArguments.getFormats().length;
List<String> args = new ArrayList<>();
for (int i = 0; i < numberOfArguments; ++i) {
args.add(randomAlphaOfLength(5));
}
String properFormatedMessage = new MessageFormat(message, Locale.ROOT).format(args.toArray(new String[0]));
for (String arg : args) {
if (properFormatedMessage.contains(arg) == false) {
throw new RuntimeException("Message check: [" + message + "] failed, missing argument");
}
}
}
}
| TransformMessagesTests |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/date/DateTest_ISO8601_ThreeLetterISO8601TimeZone.java | {
"start": 177,
"end": 941
} | class ____ extends TestCase {
public void f_test_date() throws Exception {
String text = "{\"gmtCreate\":\"2014-08-21T09:51:36.2+08:00\"}";
Calendar date = JSON.parseObject(text, VO.class).getGmtCreate();
Assert.assertNotNull(date);
Assert.assertEquals(8, date.getTimeZone().getRawOffset()/(3600*1000));
}
public void test_date_9() throws Exception {
String text = "{\"gmtCreate\":\"2014-08-21T09:51:36.235+09:00\"}";
Calendar date = JSON.parseObject(text, VO.class).getGmtCreate();
Assert.assertNotNull(date);
Assert.assertEquals(9, date.getTimeZone().getRawOffset()/(3600*1000));
}
public static | DateTest_ISO8601_ThreeLetterISO8601TimeZone |
java | alibaba__fastjson | src/test/java/com/alibaba/fastjson/deserializer/issue2779/LargeJavaBean.java | {
"start": 12972,
"end": 13878
} | class ____ {
// provide by zhaiyao, for fastjson test
private List<Double> a;
private List<Double> b;
private List<Double> c;
private List<Double> d;
private List<Double> e;
private List<Double> f;
private List<Double> g;
private List<Double> h;
private List<Double> i;
private List<Double> j;
private List<Double> k;
private List<Double> l;
private List<Double> m;
private List<Double> n;
private List<Double> o;
private List<Double> p;
private List<Double> q;
private List<Double> r;
private List<Double> s;
private List<Double> t;
private List<Double> u;
private List<Double> v;
private List<Double> w;
private List<Double> x;
private List<Double> y;
private List<Double> z;
}
} | Alphabet |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/CoerceIntToFloatTest.java | {
"start": 578,
"end": 7657
} | class ____
{
private final ObjectMapper DEFAULT_MAPPER = newJsonMapper();
private final ObjectMapper MAPPER_TO_FAIL = jsonMapperBuilder()
.withCoercionConfig(LogicalType.Float, cfg ->
cfg.setCoercion(CoercionInputShape.Integer, CoercionAction.Fail))
.build();
private final ObjectMapper MAPPER_TRY_CONVERT = jsonMapperBuilder()
.withCoercionConfig(LogicalType.Float, cfg ->
cfg.setCoercion(CoercionInputShape.Integer, CoercionAction.TryConvert))
.build();
private final ObjectMapper MAPPER_TO_NULL = jsonMapperBuilder()
.withCoercionConfig(LogicalType.Float, cfg ->
cfg.setCoercion(CoercionInputShape.Integer, CoercionAction.AsNull))
.build();
private final ObjectMapper MAPPER_TO_EMPTY = jsonMapperBuilder()
.withCoercionConfig(LogicalType.Float, cfg ->
cfg.setCoercion(CoercionInputShape.Integer, CoercionAction.AsEmpty))
.build();
private final ObjectMapper LEGACY_SCALAR_COERCION_FAIL = jsonMapperBuilder()
.disable(MapperFeature.ALLOW_COERCION_OF_SCALARS)
.build();
@Test
public void testDefaultIntToFloatCoercion() throws Exception
{
assertSuccessfulIntToFloatConversionsWith(DEFAULT_MAPPER);
}
@Test
public void testCoerceConfigToConvert() throws Exception
{
assertSuccessfulIntToFloatConversionsWith(MAPPER_TRY_CONVERT);
}
@Test
public void testCoerceConfigToNull() throws Exception
{
assertNull(MAPPER_TO_NULL.readValue("1", Float.class));
// `null` not possible for primitives, must use empty (aka default) value
assertEquals(0.0f, MAPPER_TO_NULL.readValue("-2", Float.TYPE));
{
FloatWrapper w = MAPPER_TO_NULL.readValue("{\"f\": -5}", FloatWrapper.class);
assertEquals(0.0f, w.f);
float[] arr = MAPPER_TO_NULL.readValue("[ 2 ]", float[].class);
assertEquals(1, arr.length);
assertEquals(0.0f, arr[0]);
}
assertNull(MAPPER_TO_NULL.readValue("-1", Double.class));
assertEquals(0.0d, MAPPER_TO_NULL.readValue("4", Double.TYPE));
{
DoubleWrapper w = MAPPER_TO_NULL.readValue("{\"d\": 2}", DoubleWrapper.class);
assertEquals(0.0d, w.d);
double[] arr = MAPPER_TO_NULL.readValue("[ -7 ]", double[].class);
assertEquals(1, arr.length);
assertEquals(0.0d, arr[0]);
}
assertNull(MAPPER_TO_NULL.readValue("420", BigDecimal.class));
{
BigDecimal[] arr = MAPPER_TO_NULL.readValue("[ 420 ]", BigDecimal[].class);
assertEquals(1, arr.length);
assertNull(arr[0]);
}
}
@Test
public void testCoerceConfigToEmpty() throws Exception
{
assertEquals(0.0f, MAPPER_TO_EMPTY.readValue("3", Float.class));
assertEquals(0.0f, MAPPER_TO_EMPTY.readValue("-2", Float.TYPE));
{
FloatWrapper w = MAPPER_TO_EMPTY.readValue("{\"f\": -5}", FloatWrapper.class);
assertEquals(0.0f, w.f);
float[] arr = MAPPER_TO_EMPTY.readValue("[ 2 ]", float[].class);
assertEquals(1, arr.length);
assertEquals(0.0f, arr[0]);
}
assertEquals(0.0d, MAPPER_TO_EMPTY.readValue("-1", Double.class));
assertEquals(0.0d, MAPPER_TO_EMPTY.readValue("-5", Double.TYPE));
{
DoubleWrapper w = MAPPER_TO_EMPTY.readValue("{\"d\": 2}", DoubleWrapper.class);
assertEquals(0.0d, w.d);
double[] arr = MAPPER_TO_EMPTY.readValue("[ -2 ]", double[].class);
assertEquals(1, arr.length);
assertEquals(0.0d, arr[0]);
}
assertEquals(BigDecimal.valueOf(0), MAPPER_TO_EMPTY.readValue("3643", BigDecimal.class));
}
@Test
public void testCoerceConfigToFail() throws Exception
{
_verifyCoerceFail(MAPPER_TO_FAIL, Float.class, "3");
_verifyCoerceFail(MAPPER_TO_FAIL, Float.TYPE, "-2");
_verifyCoerceFail(MAPPER_TO_FAIL, FloatWrapper.class, "{\"f\": -5}", "float");
_verifyCoerceFail(MAPPER_TO_FAIL, float[].class, "[ 2 ]", "to `float` value");
_verifyCoerceFail(MAPPER_TO_FAIL, Double.class, "-1");
_verifyCoerceFail(MAPPER_TO_FAIL, Double.TYPE, "4");
_verifyCoerceFail(MAPPER_TO_FAIL, DoubleWrapper.class, "{\"d\": 2}", "double");
_verifyCoerceFail(MAPPER_TO_FAIL, double[].class, "[ -2 ]", "to `double` value");
_verifyCoerceFail(MAPPER_TO_FAIL, BigDecimal.class, "73455342");
}
@Test
public void testLegacyConfiguration() throws Exception
{
assertSuccessfulIntToFloatConversionsWith(LEGACY_SCALAR_COERCION_FAIL);
}
/*
/**********************************************************************
/* Helper methods
/**********************************************************************
*/
private void assertSuccessfulIntToFloatConversionsWith(ObjectMapper objectMapper)
throws Exception
{
assertEquals(3.0f, objectMapper.readValue("3", Float.class));
assertEquals(-2.0f, objectMapper.readValue("-2", Float.TYPE));
{
FloatWrapper w = objectMapper.readValue("{\"f\": -5}", FloatWrapper.class);
assertEquals(-5.0f, w.f);
float[] arr = objectMapper.readValue("[ 2 ]", float[].class);
assertEquals(2.0f, arr[0]);
}
assertEquals(-1.0d, objectMapper.readValue("-1", Double.class));
assertEquals(4.0d, objectMapper.readValue("4", Double.TYPE));
{
DoubleWrapper w = objectMapper.readValue("{\"d\": 2}", DoubleWrapper.class);
assertEquals(2.0d, w.d);
double[] arr = objectMapper.readValue("[ -2 ]", double[].class);
assertEquals(-2.0d, arr[0]);
}
BigDecimal biggie = objectMapper.readValue("423451233", BigDecimal.class);
assertEquals(BigDecimal.valueOf(423451233.0d), biggie);
}
private void _verifyCoerceFail(ObjectMapper m, Class<?> targetType,
String doc) throws Exception
{
_verifyCoerceFail(m.reader(), targetType, doc, targetType.getName());
}
private void _verifyCoerceFail(ObjectMapper m, Class<?> targetType,
String doc, String targetTypeDesc) throws Exception
{
_verifyCoerceFail(m.reader(), targetType, doc, targetTypeDesc);
}
private void _verifyCoerceFail(ObjectReader r, Class<?> targetType,
String doc, String targetTypeDesc) throws Exception
{
try {
r.forType(targetType).readValue(doc);
fail("Should not accept Integer for "+targetType.getName()+" when configured to");
} catch (MismatchedInputException e) {
verifyException(e, "Cannot coerce Integer");
verifyException(e, targetTypeDesc);
}
}
}
| CoerceIntToFloatTest |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/EsqlFeatureSetUsage.java | {
"start": 709,
"end": 2147
} | class ____ extends XPackFeatureUsage {
private final Map<String, Object> stats;
public EsqlFeatureSetUsage(StreamInput in) throws IOException {
super(in);
stats = in.readGenericMap();
}
public EsqlFeatureSetUsage(Map<String, Object> stats) {
this(true, true, stats);
}
private EsqlFeatureSetUsage(boolean available, boolean enabled, Map<String, Object> stats) {
super(XPackField.ESQL, available, enabled);
this.stats = stats;
}
/** Returns a feature set usage where the feature is not available or enabled, and has an empty stats. */
public static EsqlFeatureSetUsage unavailable() {
return new EsqlFeatureSetUsage(false, false, Map.of());
}
public Map<String, Object> stats() {
return stats;
}
@Override
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
super.innerXContent(builder, params);
if (enabled) {
for (Map.Entry<String, Object> entry : stats.entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeGenericMap(stats);
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.V_8_11_X;
}
}
| EsqlFeatureSetUsage |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java | {
"start": 52693,
"end": 54642
} | class ____
implements BatchedEntries<CacheDirectiveEntry> {
private final ListCacheDirectivesResponseProto response;
BatchedCacheEntries(
ListCacheDirectivesResponseProto response) {
this.response = response;
}
@Override
public CacheDirectiveEntry get(int i) {
return PBHelperClient.convert(response.getElements(i));
}
@Override
public int size() {
return response.getElementsCount();
}
@Override
public boolean hasMore() {
return response.getHasMore();
}
}
@Override
public BatchedEntries<CacheDirectiveEntry> listCacheDirectives(long prevId,
CacheDirectiveInfo filter) throws IOException {
if (filter == null) {
filter = new CacheDirectiveInfo.Builder().build();
}
CacheDirectiveInfo f = filter;
return new BatchedCacheEntries(
ipc(() -> rpcProxy.listCacheDirectives(null,
ListCacheDirectivesRequestProto.newBuilder().
setPrevId(prevId).
setFilter(PBHelperClient.convert(f)).
build())));
}
@Override
public void addCachePool(CachePoolInfo info) throws IOException {
AddCachePoolRequestProto.Builder builder =
AddCachePoolRequestProto.newBuilder();
builder.setInfo(PBHelperClient.convert(info));
ipc(() -> rpcProxy.addCachePool(null, builder.build()));
}
@Override
public void modifyCachePool(CachePoolInfo req) throws IOException {
ModifyCachePoolRequestProto.Builder builder =
ModifyCachePoolRequestProto.newBuilder();
builder.setInfo(PBHelperClient.convert(req));
ipc(() -> rpcProxy.modifyCachePool(null, builder.build()));
}
@Override
public void removeCachePool(String cachePoolName) throws IOException {
ipc(() -> rpcProxy.removeCachePool(null,
RemoveCachePoolRequestProto.newBuilder().
setPoolName(cachePoolName).build()));
}
protected static | BatchedCacheEntries |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/DiffableUtils.java | {
"start": 30410,
"end": 31859
} | class ____<K, V extends Diffable<V>> implements ValueSerializer<K, V> {
@SuppressWarnings("rawtypes")
private static final DiffableValueSerializer WRITE_ONLY_INSTANCE = new DiffableValueSerializer() {
@Override
public Object read(StreamInput in, Object key) {
throw new UnsupportedOperationException();
}
@Override
public Diff<Object> readDiff(StreamInput in, Object key) {
throw new UnsupportedOperationException();
}
};
@SuppressWarnings("unchecked")
private static <K, V extends Diffable<V>> DiffableValueSerializer<K, V> getWriteOnlyInstance() {
return WRITE_ONLY_INSTANCE;
}
@Override
public boolean supportsDiffableValues() {
return true;
}
@Override
public Diff<V> diff(V value, V beforePart) {
return value.diff(beforePart);
}
@Override
public void write(V value, StreamOutput out) throws IOException {
value.writeTo(out);
}
@Override
public void writeDiff(Diff<V> value, StreamOutput out) throws IOException {
value.writeTo(out);
}
}
/**
* Serializer for non-diffable map values
*
* @param <K> type of map keys
* @param <V> type of map values
*/
public abstract static | DiffableValueSerializer |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/Temporal.java | {
"start": 1379,
"end": 1535
} | interface ____ {
/**
* Defines the {@link TemporalType} to use for the annotated parameter.
*/
TemporalType value() default TemporalType.DATE;
}
| Temporal |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Matcher.java | {
"start": 992,
"end": 2161
} | interface ____ {
/** Given the cluster topology, does the left node match the right node? */
public boolean match(NetworkTopology cluster, Node left, Node right);
/** Match datanodes in the same node group. */
public static final Matcher SAME_NODE_GROUP = new Matcher() {
@Override
public boolean match(NetworkTopology cluster, Node left, Node right) {
return cluster.isOnSameNodeGroup(left, right);
}
@Override
public String toString() {
return "SAME_NODE_GROUP";
}
};
/** Match datanodes in the same rack. */
public static final Matcher SAME_RACK = new Matcher() {
@Override
public boolean match(NetworkTopology cluster, Node left, Node right) {
return cluster.isOnSameRack(left, right);
}
@Override
public String toString() {
return "SAME_RACK";
}
};
/** Match any datanode with any other datanode. */
public static final Matcher ANY_OTHER = new Matcher() {
@Override
public boolean match(NetworkTopology cluster, Node left, Node right) {
return left != right;
}
@Override
public String toString() {
return "ANY_OTHER";
}
};
} | Matcher |
java | google__dagger | javatests/artifacts/dagger-ksp/transitive-annotation-app/library1/src/main/java/library1/MySubcomponentWithBuilder.java | {
"start": 764,
"end": 1324
} | class ____ to test that Dagger won't fail when non-dagger related annotations cannot be
* resolved.
*
* <p>During the compilation of {@code :app}, {@link MyTransitiveAnnotation} will no longer be on
* the classpath. In most cases, Dagger shouldn't care that the annotation isn't on the classpath
*/
// TODO(b/219587431): Support @MyTransitiveAnnotation (Requires generating metadata).
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@MySubcomponentScope
@Subcomponent(modules = MySubcomponentModule.class)
public abstract | used |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/session/MockitoSessionBuilder.java | {
"start": 2231,
"end": 2332
} | class ____.
* <p>
* This method calls {@link #initMocks(Object)} for each passed test | instances |
java | quarkusio__quarkus | extensions/smallrye-metrics/runtime/src/main/java/io/quarkus/smallrye/metrics/runtime/SmallRyeMetricsRecorder.java | {
"start": 2248,
"end": 47720
} | class ____
private static final String CURRENT_LOADED_CLASS_COUNT = "classloader.loadedClasses.count";
private static final String TOTAL_LOADED_CLASS_COUNT = "classloader.loadedClasses.total";
private static final String TOTAL_UNLOADED_CLASS_COUNT = "classloader.unloadedClasses.total";
// runtime
private static final String JVM_UPTIME = "jvm.uptime";
// operating system
private static final String SYSTEM_LOAD_AVERAGE = "cpu.systemLoadAverage";
private static final String CPU_AVAILABLE_PROCESSORS = "cpu.availableProcessors";
private static final String SYSTEM_CPU_LOAD = "cpu.systemCpuLoad";
private static final String PROCESS_CPU_LOAD = "cpu.processCpuLoad";
private static final String PROCESS_CPU_TIME = "cpu.processCpuTime";
private static final String FREE_PHYSICAL_MEM_SIZE = "memory.freePhysicalSize";
private static final String FREE_SWAP_SIZE = "memory.freeSwapSize";
// memory
private static final String MEMORY_COMMITTED_NON_HEAP = "memory.committedNonHeap";
private static final String MEMORY_COMMITTED_HEAP = "memory.committedHeap";
private static final String MEMORY_MAX_HEAP = "memory.maxHeap";
private static final String MEMORY_MAX_NON_HEAP = "memory.maxNonHeap";
private static final String MEMORY_USED_HEAP = "memory.usedHeap";
private static final String MEMORY_USED_NON_HEAP = "memory.usedNonHeap";
private static final SmallRyeMetricsFactory factory = new SmallRyeMetricsFactory();
public SmallRyeMetricsHandler handler(String metricsPath) {
SmallRyeMetricsHandler handler = new SmallRyeMetricsHandler();
handler.setMetricsPath(metricsPath);
// tell the metrics internal handler to not append CORS headers
// these will be handled by the Quarkus CORS filter, if enabled
CDI.current().select(MetricsRequestHandler.class).get().appendCorsHeaders(false);
return handler;
}
public void registerVendorMetrics() {
MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.VENDOR);
memoryPoolMetrics(registry);
vendorSpecificMemoryMetrics(registry);
vendorOperatingSystemMetrics(registry);
}
public void registerBaseMetrics() {
MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.BASE);
garbageCollectionMetrics(registry);
classLoadingMetrics(registry);
baseOperatingSystemMetrics(registry);
threadingMetrics(registry);
runtimeMetrics(registry);
baseMemoryMetrics(registry);
}
public void registerMicrometerJvmMetrics(ShutdownContext shutdown) {
MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.BASE);
micrometerJvmGcMetrics(registry, shutdown);
micrometerJvmThreadMetrics(registry);
micrometerJvmMemoryMetrics(registry);
micrometerJvmClassLoaderMetrics(registry);
micrometerRuntimeMetrics(registry);
}
public void registerMetrics(BeanInfo beanInfo, MemberInfo memberInfo) {
MetricRegistry registry = MetricRegistries.get(MetricRegistry.Type.APPLICATION);
MetricsMetadata.registerMetrics(registry,
new MetricResolver(),
beanInfo,
memberInfo);
}
public void registerMetric(MetricRegistry.Type scope,
MetadataHolder metadataHolder,
TagHolder[] tagHolders,
Object implementor) {
Metadata metadata = metadataHolder.toMetadata();
Tag[] tags = Arrays.stream(tagHolders).map(TagHolder::toTag).toArray(Tag[]::new);
MetricRegistry registry = MetricRegistries.get(scope);
switch (metadata.getTypeRaw()) {
case GAUGE:
registry.register(metadata, (Gauge) implementor, tags);
break;
case TIMER:
if (implementor == null) {
registry.timer(metadata, tags);
} else {
registry.register(metadata, (Timer) implementor);
}
break;
case COUNTER:
if (implementor == null) {
registry.counter(metadata, tags);
} else {
registry.register(metadata, (Counter) implementor, tags);
}
break;
case HISTOGRAM:
if (implementor == null) {
registry.histogram(metadata, tags);
} else {
registry.register(metadata, (Histogram) implementor, tags);
}
break;
case CONCURRENT_GAUGE:
if (implementor == null) {
registry.concurrentGauge(metadata, tags);
} else {
registry.register(metadata, (ConcurrentGauge) implementor, tags);
}
break;
case METERED:
if (implementor == null) {
registry.meter(metadata, tags);
} else {
registry.register(metadata, (Metered) implementor, tags);
}
break;
case INVALID:
break;
default:
break;
}
}
public void registerMetrics(Consumer<MetricsFactory> consumer) {
if (consumer != null) {
consumer.accept(factory);
}
}
public void createRegistries(BeanContainer container) {
MetricRegistries.get(MetricRegistry.Type.APPLICATION);
MetricRegistries.get(MetricRegistry.Type.BASE);
MetricRegistries.get(MetricRegistry.Type.VENDOR);
//HACK: registration is done via statics, but cleanup is done via pre destroy
//however if the bean is not used it will not be created, so no cleanup will be done
//we force bean creation here to make sure the container can restart correctly
container.beanInstance(MetricRegistries.class).getApplicationRegistry();
}
public void dropRegistriesAtShutdown(ShutdownContext shutdownContext) {
shutdownContext.addShutdownTask(MetricRegistries::dropAll);
}
private void garbageCollectionMetrics(MetricRegistry registry) {
List<GarbageCollectorMXBean> gcs = ManagementFactory.getGarbageCollectorMXBeans();
if (gcs.isEmpty()) {
return;
}
Metadata countMetadata = Metadata.builder()
.withName("gc.total")
.withType(MetricType.COUNTER)
.withDisplayName("Garbage Collection Count")
.withUnit("none")
.withDescription(
"Displays the total number of collections that have occurred. This attribute lists -1 if the collection count is undefined for this collector.")
.build();
Metadata timeMetadata = Metadata.builder()
.withName("gc.time")
.withType(MetricType.COUNTER)
.withDisplayName("Garbage Collection Time")
.withUnit("milliseconds")
.withDescription(
"Displays the approximate accumulated collection elapsed time in milliseconds. This attribute " +
"displays -1 if the collection elapsed time is undefined for this collector. The Java " +
"virtual machine implementation may use a high resolution timer to measure the " +
"elapsed time. This attribute may display the same value even if the collection " +
"count has been incremented if the collection elapsed time is very short.")
.build();
for (GarbageCollectorMXBean gc : gcs) {
registry.register(countMetadata, new GetCountOnlyCounter() {
@Override
public long getCount() {
return gc.getCollectionCount();
}
}, new Tag("name", gc.getName()));
registry.register(timeMetadata, new GetCountOnlyCounter() {
@Override
public long getCount() {
return gc.getCollectionTime();
}
}, new Tag("name", gc.getName()));
}
}
private void classLoadingMetrics(MetricRegistry registry) {
ClassLoadingMXBean classLoadingMXBean = ManagementFactory.getClassLoadingMXBean();
Metadata meta = Metadata.builder()
.withName(TOTAL_LOADED_CLASS_COUNT)
.withType(MetricType.COUNTER)
.withDisplayName("Total Loaded Class Count")
.withDescription(
"Displays the total number of classes that have been loaded since the Java virtual machine has started execution.")
.build();
registry.register(meta, new GetCountOnlyCounter() {
@Override
public long getCount() {
return classLoadingMXBean.getTotalLoadedClassCount();
}
});
meta = Metadata.builder()
.withName(TOTAL_UNLOADED_CLASS_COUNT)
.withType(MetricType.COUNTER)
.withDisplayName("Total Unloaded Class Count")
.withDescription(
"Displays the total number of classes unloaded since the Java virtual machine has started execution.")
.build();
registry.register(meta, new GetCountOnlyCounter() {
@Override
public long getCount() {
return classLoadingMXBean.getUnloadedClassCount();
}
});
meta = Metadata.builder()
.withName(CURRENT_LOADED_CLASS_COUNT)
.withType(MetricType.GAUGE)
.withDisplayName("Current Loaded Class Count")
.withDescription("Displays the number of classes that are currently loaded in the Java virtual machine.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return classLoadingMXBean.getLoadedClassCount();
}
});
}
private void baseOperatingSystemMetrics(MetricRegistry registry) {
OperatingSystemMXBean operatingSystemMXBean = ManagementFactory.getOperatingSystemMXBean();
Metadata meta = Metadata.builder()
.withName(SYSTEM_LOAD_AVERAGE)
.withType(MetricType.GAUGE)
.withDisplayName("System Load Average")
.withDescription("Displays the system load average for the last minute. The system load average " +
"is the sum of the number of runnable entities queued to the available processors and the " +
"number of runnable entities running on the available processors averaged over a period of time. " +
"The way in which the load average is calculated is operating system specific but is typically a " +
"damped time-dependent average. If the load average is not available, a negative value is displayed. " +
"This attribute is designed to provide a hint about the system load and may be queried frequently. " +
"The load average may be unavailable on some platforms where it is expensive to implement this method.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return operatingSystemMXBean.getSystemLoadAverage();
}
});
meta = Metadata.builder()
.withName(CPU_AVAILABLE_PROCESSORS)
.withType(MetricType.GAUGE)
.withDisplayName("Available Processors")
.withDescription(
"Displays the number of processors available to the Java virtual machine. This value may change during "
+
"a particular invocation of the virtual machine.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return operatingSystemMXBean.getAvailableProcessors();
}
});
// some metrics are only available in jdk internal class 'com.sun.management.OperatingSystemMXBean': cast to it.
// com.sun.management.OperatingSystemMXBean is not available in SubstratVM
// the cast will fail for some JVM not derived from HotSpot (J9 for example) so we check if it is assignable to it
if (ImageMode.current() == ImageMode.JVM
&& com.sun.management.OperatingSystemMXBean.class.isAssignableFrom(operatingSystemMXBean.getClass())) {
try {
com.sun.management.OperatingSystemMXBean internalOperatingSystemMXBean = (com.sun.management.OperatingSystemMXBean) operatingSystemMXBean;
meta = Metadata.builder()
.withName(PROCESS_CPU_LOAD)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.PERCENT)
.withDisplayName("Process CPU load")
.withDescription("Displays the \"recent cpu usage\" for the Java Virtual Machine process. " +
"This value is a double in the [0.0,1.0] interval. A value of 0.0 means that none of " +
"the CPUs were running threads from the JVM process during the recent period of time " +
"observed, while a value of 1.0 means that all CPUs were actively running threads from " +
"the JVM 100% of the time during the recent period being observed. Threads from the JVM " +
"include the application threads as well as the JVM internal threads. " +
"All values between 0.0 and 1.0 are possible depending of the activities going on in " +
"the JVM process and the whole system. " +
"If the Java Virtual Machine recent CPU usage is not available, the method returns a negative value.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return internalOperatingSystemMXBean.getProcessCpuLoad();
}
});
} catch (ClassCastException cce) {
// this should never occur
log.debug("Unable to cast the OperatingSystemMXBean to com.sun.management.OperatingSystemMXBean, " +
"not registering extended operating system metrics", cce);
}
}
}
private void vendorOperatingSystemMetrics(MetricRegistry registry) {
OperatingSystemMXBean operatingSystemMXBean = ManagementFactory.getOperatingSystemMXBean();
// some metrics are only available in jdk internal class 'com.sun.management.OperatingSystemMXBean': cast to it.
// com.sun.management.OperatingSystemMXBean is not available in SubstratVM
// the cast will fail for some JVM not derived from HotSpot (J9 for example) so we check if it is assignable to it
if (ImageMode.current() == ImageMode.JVM
&& com.sun.management.OperatingSystemMXBean.class.isAssignableFrom(operatingSystemMXBean.getClass())) {
try {
com.sun.management.OperatingSystemMXBean internalOperatingSystemMXBean = (com.sun.management.OperatingSystemMXBean) operatingSystemMXBean;
Metadata meta = Metadata.builder()
.withName(SYSTEM_CPU_LOAD)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.PERCENT)
.withDisplayName("System CPU load")
.withDescription("Displays the \"recent cpu usage\" for the whole system. This value is a double " +
"in the [0.0,1.0] interval. A value of 0.0 means that all CPUs were idle during the recent " +
"period of time observed, while a value of 1.0 means that all CPUs were actively running " +
"100% of the time during the recent period being observed. " +
"All values betweens 0.0 and 1.0 are possible depending of the activities going on in the " +
"system. If the system recent cpu usage is not available, the method returns a negative value.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return internalOperatingSystemMXBean.getSystemCpuLoad();
}
});
meta = Metadata.builder()
.withName(PROCESS_CPU_TIME)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.NANOSECONDS)
.withDisplayName("Process CPU time")
.withDescription(
"Displays the CPU time used by the process on which the Java virtual machine is running " +
"in nanoseconds. The returned value is of nanoseconds precision but not necessarily " +
"nanoseconds accuracy. This method returns -1 if the the platform does not support " +
"this operation.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return internalOperatingSystemMXBean.getProcessCpuTime();
}
});
meta = Metadata.builder()
.withName(FREE_PHYSICAL_MEM_SIZE)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Free physical memory size")
.withDescription("Displays the amount of free physical memory in bytes.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return internalOperatingSystemMXBean.getFreePhysicalMemorySize();
}
});
meta = Metadata.builder()
.withName(FREE_SWAP_SIZE)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Free swap size")
.withDescription("Displays the amount of free swap space in bytes.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return internalOperatingSystemMXBean.getFreeSwapSpaceSize();
}
});
} catch (ClassCastException cce) {
// this should never occur
log.debug("Unable to cast the OperatingSystemMXBean to com.sun.management.OperatingSystemMXBean, " +
"not registering extended operating system metrics", cce);
}
}
}
private void threadingMetrics(MetricRegistry registry) {
ThreadMXBean thread = ManagementFactory.getThreadMXBean();
Metadata meta = Metadata.builder()
.withName(THREAD_COUNT)
.withType(MetricType.GAUGE)
.withDisplayName("Thread Count")
.withDescription("Displays the current number of live threads including both daemon and non-daemon threads")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return thread.getThreadCount();
}
});
meta = Metadata.builder()
.withName(THREAD_DAEMON_COUNT)
.withType(MetricType.GAUGE)
.withDisplayName("Daemon Thread Count")
.withDescription("Displays the current number of live daemon threads.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return thread.getDaemonThreadCount();
}
});
meta = Metadata.builder()
.withName(THREAD_MAX_COUNT)
.withType(MetricType.GAUGE)
.withDisplayName("Peak Thread Count")
.withDescription("Displays the peak live thread count since the Java virtual machine started or peak was " +
"reset. This includes daemon and non-daemon threads.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return thread.getPeakThreadCount();
}
});
}
private void runtimeMetrics(MetricRegistry registry) {
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
Metadata meta = Metadata.builder()
.withName(JVM_UPTIME)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.MILLISECONDS)
.withDisplayName("JVM Uptime")
.withDescription("Displays the time from the start of the Java virtual machine in milliseconds.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return runtimeMXBean.getUptime();
}
});
}
private void baseMemoryMetrics(MetricRegistry registry) {
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
Metadata meta = Metadata.builder()
.withName(MEMORY_COMMITTED_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Committed Heap Memory")
.withDescription(
"Displays the amount of memory in bytes that is committed for the Java virtual machine to use. " +
"This amount of memory is guaranteed for the Java virtual machine to use.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getHeapMemoryUsage().getCommitted();
}
});
meta = Metadata.builder()
.withName(MEMORY_MAX_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Max Heap Memory")
.withDescription("Displays the maximum amount of heap memory in bytes that can be used for memory management. "
+
"This attribute displays -1 if the maximum heap memory size is undefined. This amount of memory is not "
+
"guaranteed to be available for memory management if it is greater than the amount of committed memory. "
+
"The Java virtual machine may fail to allocate memory even if the amount of used memory does " +
"not exceed this maximum size.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getHeapMemoryUsage().getMax();
}
});
meta = Metadata.builder()
.withName(MEMORY_USED_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Used Heap Memory")
.withDescription("Displays the amount of used heap memory in bytes.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getHeapMemoryUsage().getUsed();
}
});
}
private void vendorSpecificMemoryMetrics(MetricRegistry registry) {
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
Metadata meta = Metadata.builder()
.withName(MEMORY_COMMITTED_NON_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Committed Non Heap Memory")
.withDescription(
"Displays the amount of non heap memory in bytes that is committed for the Java virtual machine to use.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getNonHeapMemoryUsage().getCommitted();
}
});
meta = Metadata.builder()
.withName(MEMORY_MAX_NON_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Max Non Heap Memory")
.withDescription("Displays the maximum amount of used non-heap memory in bytes.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getNonHeapMemoryUsage().getMax();
}
});
meta = Metadata.builder()
.withName(MEMORY_USED_NON_HEAP)
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDisplayName("Used Non Heap Memory")
.withDescription("Displays the amount of used non-heap memory in bytes.")
.build();
registry.register(meta, new Gauge() {
@Override
public Number getValue() {
return memoryMXBean.getNonHeapMemoryUsage().getUsed();
}
});
}
private void memoryPoolMetrics(MetricRegistry registry) {
// MemoryPoolMXBean doesn't work in native mode
if (ImageMode.current() == ImageMode.JVM) {
List<MemoryPoolMXBean> mps = ManagementFactory.getMemoryPoolMXBeans();
Metadata usageMetadata = Metadata.builder()
.withName("memoryPool.usage")
.withType(MetricType.GAUGE)
.withDisplayName("Current usage of the memory pool denoted by the 'name' tag")
.withDescription("Current usage of the memory pool denoted by the 'name' tag")
.withUnit(MetricUnits.BYTES)
.build();
Metadata maxMetadata = Metadata.builder()
.withName("memoryPool.usage.max")
.withType(MetricType.GAUGE)
.withDisplayName("Peak usage of the memory pool denoted by the 'name' tag")
.withDescription("Peak usage of the memory pool denoted by the 'name' tag")
.withUnit(MetricUnits.BYTES)
.build();
for (MemoryPoolMXBean mp : mps) {
if (mp.getCollectionUsage() != null && mp.getPeakUsage() != null) {
// this will be the case for the heap memory pools
registry.register(usageMetadata, new Gauge() {
@Override
public Number getValue() {
return mp.getCollectionUsage().getUsed();
}
},
new Tag("name", mp.getName()));
registry.register(maxMetadata, new Gauge() {
@Override
public Number getValue() {
return mp.getPeakUsage().getUsed();
}
},
new Tag("name", mp.getName()));
} else if (mp.getUsage() != null && mp.getPeakUsage() != null) {
// this will be the case for the non-heap memory pools
registry.register(usageMetadata, new Gauge() {
@Override
public Number getValue() {
return mp.getUsage().getUsed();
}
},
new Tag("name", mp.getName()));
registry.register(maxMetadata, new Gauge() {
@Override
public Number getValue() {
return mp.getPeakUsage().getUsed();
}
},
new Tag("name", mp.getName()));
}
}
}
}
private void micrometerJvmGcMetrics(MetricRegistry registry, ShutdownContext shutdownContext) {
if (ImageMode.current() == ImageMode.JVM) {
MicrometerGCMetrics gcMetrics = new MicrometerGCMetrics();
registry.register(new ExtendedMetadataBuilder()
.withName("jvm.gc.max.data.size")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDescription("Max size of old generation memory pool")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(), new Gauge() {
@Override
public Number getValue() {
return gcMetrics.getMaxDataSize();
}
});
registry.register(new ExtendedMetadataBuilder()
.withName("jvm.gc.live.data.size")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.BYTES)
.withDescription("Size of old generation memory pool after a full GC")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(), new Gauge() {
@Override
public Number getValue() {
return gcMetrics.getLiveDataSize();
}
});
registry.register(new ExtendedMetadataBuilder()
.withName("jvm.gc.memory.promoted")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.BYTES)
.withDescription(
"Count of positive increases in the size of the old generation memory pool before GC to after GC")
.skipsScopeInOpenMetricsExportCompletely(true)
.withOpenMetricsKeyOverride("jvm_gc_memory_promoted_bytes_total")
.build(), new GetCountOnlyCounter() {
@Override
public long getCount() {
return gcMetrics.getPromotedBytes();
}
});
registry.register(new ExtendedMetadataBuilder()
.withName("jvm.gc.memory.allocated")
.withType(MetricType.COUNTER)
.withUnit(MetricUnits.BYTES)
.withDescription(
"Incremented for an increase in the size of the young generation memory pool after one GC to before the next")
.skipsScopeInOpenMetricsExportCompletely(true)
.withOpenMetricsKeyOverride("jvm_gc_memory_allocated_bytes_total")
.build(), new GetCountOnlyCounter() {
@Override
public long getCount() {
return gcMetrics.getAllocatedBytes();
}
});
// start updating the metric values in a listener for GC events
// Metrics that mimic the jvm.gc.pause timer will be registered lazily as GC events occur
gcMetrics.startWatchingNotifications();
shutdownContext.addShutdownTask(gcMetrics::cleanUp);
}
}
/**
* Mimics Uptime metrics from Micrometer. Most of the logic here is basically copied from
* {@link <a href=
* "https://github.com/micrometer-metrics/micrometer/blob/main/micrometer-core/src/main/java/io/micrometer/core/instrument/binder/system/UptimeMetrics.java">Micrometer
* Uptime metrics</a>}.
*
* @param registry
*/
private void micrometerRuntimeMetrics(MetricRegistry registry) {
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
registry.register(
new ExtendedMetadataBuilder()
.withName("process.runtime")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.MILLISECONDS)
.withDescription("The uptime of the Java virtual machine")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return runtimeMXBean.getUptime();
}
});
registry.register(
new ExtendedMetadataBuilder()
.withName("process.start.time")
.withType(MetricType.GAUGE)
.withUnit(MetricUnits.MILLISECONDS)
.withDescription("Start time of the process since unix epoch.")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return runtimeMXBean.getStartTime();
}
});
}
private void micrometerJvmThreadMetrics(MetricRegistry registry) {
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.threads.peak")
.withType(MetricType.GAUGE)
.withUnit("threads")
.withDescription("The peak live thread count since the Java virtual machine started or peak was reset")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return threadBean.getPeakThreadCount();
}
});
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.threads.daemon")
.withType(MetricType.GAUGE)
.withUnit("threads")
.withDescription("The current number of live daemon threads")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return threadBean.getDaemonThreadCount();
}
});
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.threads.live")
.withType(MetricType.GAUGE)
.withUnit("threads")
.withDescription("The current number of live threads including both daemon and non-daemon threads")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return threadBean.getThreadCount();
}
});
if (ImageMode.current() == ImageMode.JVM) {
ExtendedMetadata threadStatesMetadata = new ExtendedMetadataBuilder()
.withName("jvm.threads.states")
.withType(MetricType.GAUGE)
.withUnit("threads")
.withDescription("The current number of threads having a particular state")
.skipsScopeInOpenMetricsExportCompletely(true)
.build();
for (Thread.State state : Thread.State.values()) {
registry.register(threadStatesMetadata,
new Gauge() {
@Override
public Number getValue() {
return getThreadStateCount(threadBean, state);
}
},
new Tag("state", state.name().toLowerCase().replace("_", "-")));
}
}
}
private void micrometerJvmMemoryMetrics(MetricRegistry registry) {
if (ImageMode.current() == ImageMode.JVM) {
for (MemoryPoolMXBean memoryPoolMXBean : ManagementFactory.getMemoryPoolMXBeans()) {
String area = MemoryType.HEAP.equals(memoryPoolMXBean.getType()) ? "heap" : "nonheap";
Tag[] tags = new Tag[] { new Tag("id", memoryPoolMXBean.getName()),
new Tag("area", area) };
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.memory.used")
.withType(MetricType.GAUGE)
.withUnit("bytes")
.withDescription("The amount of used memory")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return memoryPoolMXBean.getUsage().getUsed();
}
},
tags);
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.memory.committed")
.withType(MetricType.GAUGE)
.withUnit("bytes")
.withDescription(
"The amount of memory in bytes that is committed for the Java virtual machine to use")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return memoryPoolMXBean.getUsage().getCommitted();
}
},
tags);
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.memory.max")
.withType(MetricType.GAUGE)
.withUnit("bytes")
.withDescription("The maximum amount of memory in bytes that can be used for memory management")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return memoryPoolMXBean.getUsage().getMax();
}
},
tags);
}
for (BufferPoolMXBean bufferPoolBean : ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class)) {
Tag tag = new Tag("id", bufferPoolBean.getName());
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.buffer.count")
.withType(MetricType.GAUGE)
.withUnit("buffers")
.withDescription("An estimate of the number of buffers in the pool")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return bufferPoolBean.getCount();
}
},
tag);
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.buffer.memory.used")
.withType(MetricType.GAUGE)
.withUnit("bytes")
.withDescription(
"An estimate of the memory that the Java virtual machine is using for this buffer pool")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return bufferPoolBean.getMemoryUsed();
}
},
tag);
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.buffer.total.capacity")
.withType(MetricType.GAUGE)
.withUnit("bytes")
.withDescription("An estimate of the total capacity of the buffers in this pool")
.skipsScopeInOpenMetricsExportCompletely(true)
.build(),
new Gauge() {
@Override
public Number getValue() {
return bufferPoolBean.getTotalCapacity();
}
},
tag);
}
}
}
private void micrometerJvmClassLoaderMetrics(MetricRegistry registry) {
// The ClassLoadingMXBean can be used in native mode, but it only returns zeroes, so there's no point in including such metrics.
if (ImageMode.current() == ImageMode.JVM) {
ClassLoadingMXBean classLoadingBean = ManagementFactory.getClassLoadingMXBean();
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.classes.loaded")
.withType(MetricType.GAUGE)
.withUnit("classes")
.withDescription("The number of classes that are currently loaded in the Java virtual machine")
.withOpenMetricsKeyOverride("jvm_classes_loaded_classes")
.build(),
new Gauge() {
@Override
public Number getValue() {
return classLoadingBean.getLoadedClassCount();
}
});
registry.register(
new ExtendedMetadataBuilder()
.withName("jvm.classes.unloaded")
.withType(MetricType.COUNTER)
.withUnit("classes")
.withDescription(
"The total number of classes unloaded since the Java virtual machine has started execution")
.withOpenMetricsKeyOverride("jvm_classes_unloaded_classes_total")
.build(),
new GetCountOnlyCounter() {
@Override
public long getCount() {
return classLoadingBean.getUnloadedClassCount();
}
});
}
}
private long getThreadStateCount(ThreadMXBean threadBean, Thread.State state) {
int count = 0;
for (ThreadInfo threadInfo : threadBean.getThreadInfo(threadBean.getAllThreadIds())) {
if (threadInfo != null && threadInfo.getThreadState() == state) {
count++;
}
}
return count;
}
}
| loading |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/support/BaseDefaultCodecs.java | {
"start": 34564,
"end": 35271
} | class ____ implements CodecConfigurer.MultipartCodecs {
private final List<HttpMessageWriter<?>> writers = new ArrayList<>();
DefaultMultipartCodecs() {
}
DefaultMultipartCodecs(DefaultMultipartCodecs other) {
this.writers.addAll(other.writers);
}
@Override
public CodecConfigurer.MultipartCodecs encoder(Encoder<?> encoder) {
writer(new EncoderHttpMessageWriter<>(encoder));
initTypedWriters();
return this;
}
@Override
public CodecConfigurer.MultipartCodecs writer(HttpMessageWriter<?> writer) {
this.writers.add(writer);
initTypedWriters();
return this;
}
List<HttpMessageWriter<?>> getWriters() {
return this.writers;
}
}
}
| DefaultMultipartCodecs |
java | spring-projects__spring-boot | module/spring-boot-elasticsearch/src/dockerTest/java/org/springframework/boot/elasticsearch/autoconfigure/ElasticsearchRestClientAutoConfigurationIntegrationTests.java | {
"start": 1735,
"end": 2941
} | class ____ {
@Container
static final ElasticsearchContainer elasticsearch = TestImage.container(ElasticsearchContainer.class);
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ElasticsearchRestClientAutoConfiguration.class));
@Test
void restClientCanQueryElasticsearchNode() {
this.contextRunner
.withPropertyValues("spring.elasticsearch.uris=" + elasticsearch.getHttpHostAddress(),
"spring.elasticsearch.connection-timeout=120s", "spring.elasticsearch.socket-timeout=120s")
.run((context) -> {
Rest5Client client = context.getBean(Rest5Client.class);
Request index = new Request("PUT", "/test/_doc/2");
index.setJsonEntity("{" + " \"a\": \"alpha\"," + " \"b\": \"bravo\"" + "}");
client.performRequest(index);
Request getRequest = new Request("GET", "/test/_doc/2");
Response response = client.performRequest(getRequest);
try (InputStream input = response.getEntity().getContent()) {
JsonNode result = new ObjectMapper().readTree(input);
assertThat(result.path("found").asBoolean()).isTrue();
}
});
}
}
| ElasticsearchRestClientAutoConfigurationIntegrationTests |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/SymbolTable.java | {
"start": 15076,
"end": 23877
} | class ____.
*/
int setMajorVersionAndClassName(final int majorVersion, final String className) {
this.majorVersion = majorVersion;
this.className = className;
return addConstantClass(className).index;
}
/**
* Returns the number of items in this symbol table's constant_pool array (plus 1).
*
* @return the number of items in this symbol table's constant_pool array (plus 1).
*/
int getConstantPoolCount() {
return constantPoolCount;
}
/**
* Returns the length in bytes of this symbol table's constant_pool array.
*
* @return the length in bytes of this symbol table's constant_pool array.
*/
int getConstantPoolLength() {
return constantPool.length;
}
/**
* Puts this symbol table's constant_pool array in the given ByteVector, preceded by the
* constant_pool_count value.
*
* @param output where the JVMS ClassFile's constant_pool array must be put.
*/
void putConstantPool(final ByteVector output) {
output.putShort(constantPoolCount).putByteArray(constantPool.data, 0, constantPool.length);
}
/**
* Returns the size in bytes of this symbol table's BootstrapMethods attribute. Also adds the
* attribute name in the constant pool.
*
* @return the size in bytes of this symbol table's BootstrapMethods attribute.
*/
int computeBootstrapMethodsSize() {
if (bootstrapMethods != null) {
addConstantUtf8(Constants.BOOTSTRAP_METHODS);
return 8 + bootstrapMethods.length;
} else {
return 0;
}
}
/**
* Puts this symbol table's BootstrapMethods attribute in the given ByteVector. This includes the
* 6 attribute header bytes and the num_bootstrap_methods value.
*
* @param output where the JVMS BootstrapMethods attribute must be put.
*/
void putBootstrapMethods(final ByteVector output) {
if (bootstrapMethods != null) {
output
.putShort(addConstantUtf8(Constants.BOOTSTRAP_METHODS))
.putInt(bootstrapMethods.length + 2)
.putShort(bootstrapMethodCount)
.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
}
}
// -----------------------------------------------------------------------------------------------
// Generic symbol table entries management.
// -----------------------------------------------------------------------------------------------
/**
* Returns the list of entries which can potentially have the given hash code.
*
* @param hashCode a {@link Entry#hashCode} value.
* @return the list of entries which can potentially have the given hash code. The list is stored
* via the {@link Entry#next} field.
*/
private Entry get(final int hashCode) {
return entries[hashCode % entries.length];
}
/**
* Puts the given entry in the {@link #entries} hash set. This method does <i>not</i> check
* whether {@link #entries} already contains a similar entry or not. {@link #entries} is resized
* if necessary to avoid hash collisions (multiple entries needing to be stored at the same {@link
* #entries} array index) as much as possible, with reasonable memory usage.
*
* @param entry an Entry (which must not already be contained in {@link #entries}).
* @return the given entry
*/
private Entry put(final Entry entry) {
if (entryCount > (entries.length * 3) / 4) {
int currentCapacity = entries.length;
int newCapacity = currentCapacity * 2 + 1;
Entry[] newEntries = new Entry[newCapacity];
for (int i = currentCapacity - 1; i >= 0; --i) {
Entry currentEntry = entries[i];
while (currentEntry != null) {
int newCurrentEntryIndex = currentEntry.hashCode % newCapacity;
Entry nextEntry = currentEntry.next;
currentEntry.next = newEntries[newCurrentEntryIndex];
newEntries[newCurrentEntryIndex] = currentEntry;
currentEntry = nextEntry;
}
}
entries = newEntries;
}
entryCount++;
int index = entry.hashCode % entries.length;
entry.next = entries[index];
return entries[index] = entry;
}
/**
* Adds the given entry in the {@link #entries} hash set. This method does <i>not</i> check
* whether {@link #entries} already contains a similar entry or not, and does <i>not</i> resize
* {@link #entries} if necessary.
*
* @param entry an Entry (which must not already be contained in {@link #entries}).
*/
private void add(final Entry entry) {
entryCount++;
int index = entry.hashCode % entries.length;
entry.next = entries[index];
entries[index] = entry;
}
// -----------------------------------------------------------------------------------------------
// Constant pool entries management.
// -----------------------------------------------------------------------------------------------
/**
* Adds a number or string constant to the constant pool of this symbol table. Does nothing if the
* constant pool already contains a similar item.
*
* @param value the value of the constant to be added to the constant pool. This parameter must be
* an {@link Integer}, {@link Byte}, {@link Character}, {@link Short}, {@link Boolean}, {@link
* Float}, {@link Long}, {@link Double}, {@link String}, {@link Type} or {@link Handle}.
* @return a new or already existing Symbol with the given value.
*/
Symbol addConstant(final Object value) {
if (value instanceof Integer) {
return addConstantInteger(((Integer) value).intValue());
} else if (value instanceof Byte) {
return addConstantInteger(((Byte) value).intValue());
} else if (value instanceof Character) {
return addConstantInteger(((Character) value).charValue());
} else if (value instanceof Short) {
return addConstantInteger(((Short) value).intValue());
} else if (value instanceof Boolean) {
return addConstantInteger(((Boolean) value).booleanValue() ? 1 : 0);
} else if (value instanceof Float) {
return addConstantFloat(((Float) value).floatValue());
} else if (value instanceof Long) {
return addConstantLong(((Long) value).longValue());
} else if (value instanceof Double) {
return addConstantDouble(((Double) value).doubleValue());
} else if (value instanceof String) {
return addConstantString((String) value);
} else if (value instanceof Type) {
Type type = (Type) value;
int typeSort = type.getSort();
if (typeSort == Type.OBJECT) {
return addConstantClass(type.getInternalName());
} else if (typeSort == Type.METHOD) {
return addConstantMethodType(type.getDescriptor());
} else { // type is a primitive or array type.
return addConstantClass(type.getDescriptor());
}
} else if (value instanceof Handle) {
Handle handle = (Handle) value;
return addConstantMethodHandle(
handle.getTag(),
handle.getOwner(),
handle.getName(),
handle.getDesc(),
handle.isInterface());
} else if (value instanceof ConstantDynamic) {
ConstantDynamic constantDynamic = (ConstantDynamic) value;
return addConstantDynamic(
constantDynamic.getName(),
constantDynamic.getDescriptor(),
constantDynamic.getBootstrapMethod(),
constantDynamic.getBootstrapMethodArgumentsUnsafe());
} else {
throw new IllegalArgumentException("value " + value);
}
}
/**
* Adds a CONSTANT_Class_info to the constant pool of this symbol table. Does nothing if the
* constant pool already contains a similar item.
*
* @param value the internal name of a class.
* @return a new or already existing Symbol with the given value.
*/
Symbol addConstantClass(final String value) {
return addConstantUtf8Reference(Symbol.CONSTANT_CLASS_TAG, value);
}
/**
* Adds a CONSTANT_Fieldref_info to the constant pool of this symbol table. Does nothing if the
* constant pool already contains a similar item.
*
* @param owner the internal name of a class.
* @param name a field name.
* @param descriptor a field descriptor.
* @return a new or already existing Symbol with the given value.
*/
Symbol addConstantFieldref(final String owner, final String name, final String descriptor) {
return addConstantMemberReference(Symbol.CONSTANT_FIELDREF_TAG, owner, name, descriptor);
}
/**
* Adds a CONSTANT_Methodref_info or CONSTANT_InterfaceMethodref_info to the constant pool of this
* symbol table. Does nothing if the constant pool already contains a similar item.
*
* @param owner the internal name of a class.
* @param name a method name.
* @param descriptor a method descriptor.
* @param isInterface whether owner is an | name |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/PutSearchApplicationAction.java | {
"start": 1673,
"end": 5021
} | class ____ extends LegacyActionRequest implements ToXContentObject {
private final SearchApplication searchApp;
private final boolean create;
public Request(StreamInput in) throws IOException {
super(in);
this.searchApp = new SearchApplication(in);
this.create = in.readBoolean();
}
public Request(String name, boolean create, BytesReference content, XContentType contentType) {
this.searchApp = SearchApplication.fromXContentBytes(name, content, contentType);
this.create = create;
}
public Request(SearchApplication app, boolean create) {
this.searchApp = app;
this.create = create;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (searchApp.indices().length == 0) {
validationException = addValidationError("indices are missing", validationException);
}
if (searchApp.searchApplicationTemplateOrDefault().script() == null) {
validationException = addValidationError("script required for template", validationException);
}
return validationException;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
searchApp.writeTo(out);
out.writeBoolean(create);
}
public SearchApplication getSearchApplication() {
return searchApp;
}
public boolean create() {
return create;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request that = (Request) o;
return Objects.equals(searchApp, that.searchApp) && create == that.create;
}
@Override
public int hashCode() {
return Objects.hash(searchApp, create);
}
public static final ParseField SEARCH_APPLICATION = new ParseField("searchApp");
public static final ParseField CREATE = new ParseField("create");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Request, String> PARSER = new ConstructingObjectParser<>(
"put_search_application_request",
false,
(params) -> new Request((SearchApplication) params[0], (boolean) params[1])
);
static {
PARSER.declareObject(constructorArg(), (p, c) -> SearchApplication.fromXContent(c, p), SEARCH_APPLICATION);
PARSER.declareBoolean(constructorArg(), CREATE);
}
public static Request parse(XContentParser parser, String resourceName) {
return PARSER.apply(parser, resourceName);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(SEARCH_APPLICATION.getPreferredName(), searchApp);
builder.field(CREATE.getPreferredName(), create);
builder.endObject();
return builder;
}
}
public static | Request |
java | elastic__elasticsearch | x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java | {
"start": 61042,
"end": 63433
} | class ____ {
final String originalName;
final X500Principal x500Principal;
final String filename;
final String error;
private Name(String name, X500Principal x500Principal, String filename, String error) {
this.originalName = name;
this.x500Principal = x500Principal;
this.filename = filename;
this.error = error;
}
static Name fromUserProvidedName(String name, String filename) {
if ("ca".equals(name)) {
return new Name(name, null, null, "[ca] may not be used as an instance name");
}
if (name == null) {
return new Name("", null, null, "instance name may not be null");
}
final X500Principal principal;
try {
if (name.contains("=")) {
principal = new X500Principal(name);
} else {
principal = new X500Principal("CN=" + name);
}
} catch (IllegalArgumentException e) {
String error = "["
+ name
+ "] could not be converted to a valid DN\n"
+ e.getMessage()
+ "\n"
+ ExceptionsHelper.stackTrace(e);
return new Name(name, null, null, error);
}
boolean validFilename = isValidFilename(filename);
if (validFilename == false) {
return new Name(name, principal, null, "[" + filename + "] is not a valid filename");
}
return new Name(name, principal, resolvePath(filename).toString(), null);
}
static boolean isValidFilename(String name) {
return ALLOWED_FILENAME_CHAR_PATTERN.matcher(name).matches()
&& ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches()
&& name.startsWith(".") == false;
}
@Override
public String toString() {
return getClass().getSimpleName()
+ "{original=["
+ originalName
+ "] principal=["
+ x500Principal
+ "] file=["
+ filename
+ "] err=["
+ error
+ "]}";
}
}
static | Name |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobEndNotifier.java | {
"start": 1439,
"end": 4331
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(JobEndNotifier.class.getName());
private static JobEndStatusInfo createNotification(JobConf conf,
JobStatus status) {
JobEndStatusInfo notification = null;
String uri = conf.getJobEndNotificationURI();
if (uri != null) {
int retryAttempts = conf.getInt(JobContext.MR_JOB_END_RETRY_ATTEMPTS, 0);
long retryInterval = conf.getInt(JobContext.MR_JOB_END_RETRY_INTERVAL, 30000);
int timeout = conf.getInt(JobContext.MR_JOB_END_NOTIFICATION_TIMEOUT,
JobContext.DEFAULT_MR_JOB_END_NOTIFICATION_TIMEOUT);
if (uri.contains("$jobId")) {
uri = uri.replace("$jobId", status.getJobID().toString());
}
if (uri.contains("$jobStatus")) {
String statusStr =
(status.getRunState() == JobStatus.SUCCEEDED) ? "SUCCEEDED" :
(status.getRunState() == JobStatus.FAILED) ? "FAILED" : "KILLED";
uri = uri.replace("$jobStatus", statusStr);
}
notification = new JobEndStatusInfo(
uri, retryAttempts, retryInterval, timeout);
}
return notification;
}
private static int httpNotification(String uri, int timeout)
throws IOException, URISyntaxException {
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create();
httpClientBuilder.setDefaultRequestConfig(
RequestConfig.custom()
.setConnectionRequestTimeout(timeout)
.setSocketTimeout(timeout)
.build());
HttpClient client = httpClientBuilder.build();
HttpGet httpGet = new HttpGet(new URI(uri));
httpGet.setHeader("Accept", "*/*");
return client.execute(httpGet).getStatusLine().getStatusCode();
}
// for use by the LocalJobRunner, without using a thread&queue,
// simple synchronous way
public static void localRunnerNotification(JobConf conf, JobStatus status) {
JobEndStatusInfo notification = createNotification(conf, status);
if (notification != null) {
do {
try {
int code = httpNotification(notification.getUri(),
notification.getTimeout());
if (code != 200) {
throw new IOException("Invalid response status code: " + code);
}
else {
break;
}
}
catch (IOException ioex) {
LOG.error("Notification error [" + notification.getUri() + "]", ioex);
}
catch (Exception ex) {
LOG.error("Notification error [" + notification.getUri() + "]", ex);
}
try {
Thread.sleep(notification.getRetryInterval());
}
catch (InterruptedException iex) {
LOG.error("Notification retry error [" + notification + "]", iex);
}
} while (notification.configureForRetry());
}
}
private static | JobEndNotifier |
java | spring-projects__spring-boot | module/spring-boot-http-client/src/main/java/org/springframework/boot/http/client/HttpComponentsHttpAsyncClientBuilder.java | {
"start": 1813,
"end": 9052
} | class ____ {
private final Consumer<HttpAsyncClientBuilder> customizer;
private final Consumer<PoolingAsyncClientConnectionManagerBuilder> connectionManagerCustomizer;
private final Consumer<ConnectionConfig.Builder> connectionConfigCustomizer;
private final Consumer<RequestConfig.Builder> defaultRequestConfigCustomizer;
private final Function<@Nullable SslBundle, @Nullable TlsStrategy> tlsStrategyFactory;
public HttpComponentsHttpAsyncClientBuilder() {
this(Empty.consumer(), Empty.consumer(), Empty.consumer(), Empty.consumer(),
HttpComponentsSslBundleTlsStrategy::get);
}
private HttpComponentsHttpAsyncClientBuilder(Consumer<HttpAsyncClientBuilder> customizer,
Consumer<PoolingAsyncClientConnectionManagerBuilder> connectionManagerCustomizer,
Consumer<ConnectionConfig.Builder> connectionConfigCustomizer,
Consumer<RequestConfig.Builder> defaultRequestConfigCustomizer,
Function<@Nullable SslBundle, @Nullable TlsStrategy> tlsStrategyFactory) {
this.customizer = customizer;
this.connectionManagerCustomizer = connectionManagerCustomizer;
this.connectionConfigCustomizer = connectionConfigCustomizer;
this.defaultRequestConfigCustomizer = defaultRequestConfigCustomizer;
this.tlsStrategyFactory = tlsStrategyFactory;
}
/**
* Return a new {@link HttpComponentsHttpAsyncClientBuilder} that applies additional
* customization to the underlying {@link HttpAsyncClientBuilder}.
* @param customizer the customizer to apply
* @return a new {@link HttpComponentsHttpAsyncClientBuilder} instance
*/
public HttpComponentsHttpAsyncClientBuilder withCustomizer(Consumer<HttpAsyncClientBuilder> customizer) {
Assert.notNull(customizer, "'customizer' must not be null");
return new HttpComponentsHttpAsyncClientBuilder(this.customizer.andThen(customizer),
this.connectionManagerCustomizer, this.connectionConfigCustomizer, this.defaultRequestConfigCustomizer,
this.tlsStrategyFactory);
}
/**
* Return a new {@link HttpComponentsHttpAsyncClientBuilder} that applies additional
* customization to the underlying {@link PoolingAsyncClientConnectionManagerBuilder}.
* @param connectionManagerCustomizer the customizer to apply
* @return a new {@link HttpComponentsHttpAsyncClientBuilder} instance
*/
public HttpComponentsHttpAsyncClientBuilder withConnectionManagerCustomizer(
Consumer<PoolingAsyncClientConnectionManagerBuilder> connectionManagerCustomizer) {
Assert.notNull(connectionManagerCustomizer, "'connectionManagerCustomizer' must not be null");
return new HttpComponentsHttpAsyncClientBuilder(this.customizer,
this.connectionManagerCustomizer.andThen(connectionManagerCustomizer), this.connectionConfigCustomizer,
this.defaultRequestConfigCustomizer, this.tlsStrategyFactory);
}
/**
* Return a new {@link HttpComponentsHttpAsyncClientBuilder} that applies additional
* customization to the underlying
* {@link org.apache.hc.client5.http.config.ConnectionConfig.Builder}.
* @param connectionConfigCustomizer the customizer to apply
* @return a new {@link HttpComponentsHttpAsyncClientBuilder} instance
*/
public HttpComponentsHttpAsyncClientBuilder withConnectionConfigCustomizer(
Consumer<ConnectionConfig.Builder> connectionConfigCustomizer) {
Assert.notNull(connectionConfigCustomizer, "'connectionConfigCustomizer' must not be null");
return new HttpComponentsHttpAsyncClientBuilder(this.customizer, this.connectionManagerCustomizer,
this.connectionConfigCustomizer.andThen(connectionConfigCustomizer),
this.defaultRequestConfigCustomizer, this.tlsStrategyFactory);
}
/**
* Return a new {@link HttpComponentsHttpAsyncClientBuilder} with a replacement
* {@link TlsStrategy} factory.
* @param tlsStrategyFactory the new factory used to create a {@link TlsStrategy} for
* a given {@link SslBundle}
* @return a new {@link HttpComponentsHttpAsyncClientBuilder} instance
*/
public HttpComponentsHttpAsyncClientBuilder withTlsStrategyFactory(
Function<@Nullable SslBundle, @Nullable TlsStrategy> tlsStrategyFactory) {
Assert.notNull(tlsStrategyFactory, "'tlsStrategyFactory' must not be null");
return new HttpComponentsHttpAsyncClientBuilder(this.customizer, this.connectionManagerCustomizer,
this.connectionConfigCustomizer, this.defaultRequestConfigCustomizer, tlsStrategyFactory);
}
/**
* Return a new {@link HttpComponentsHttpAsyncClientBuilder} that applies additional
* customization to the underlying
* {@link org.apache.hc.client5.http.config.RequestConfig.Builder} used for default
* requests.
* @param defaultRequestConfigCustomizer the customizer to apply
* @return a new {@link HttpComponentsHttpAsyncClientBuilder} instance
*/
public HttpComponentsHttpAsyncClientBuilder withDefaultRequestConfigCustomizer(
Consumer<RequestConfig.Builder> defaultRequestConfigCustomizer) {
Assert.notNull(defaultRequestConfigCustomizer, "'defaultRequestConfigCustomizer' must not be null");
return new HttpComponentsHttpAsyncClientBuilder(this.customizer, this.connectionManagerCustomizer,
this.connectionConfigCustomizer,
this.defaultRequestConfigCustomizer.andThen(defaultRequestConfigCustomizer), this.tlsStrategyFactory);
}
/**
* Build a new {@link HttpAsyncClient} instance with the given settings applied.
* @param settings the settings to apply
* @return a new {@link CloseableHttpAsyncClient} instance
*/
public CloseableHttpAsyncClient build(@Nullable HttpClientSettings settings) {
settings = (settings != null) ? settings : HttpClientSettings.defaults();
HttpAsyncClientBuilder builder = HttpAsyncClientBuilder.create()
.useSystemProperties()
.setRedirectStrategy(HttpComponentsRedirectStrategy.get(settings.redirects()))
.setConnectionManager(createConnectionManager(settings))
.setDefaultRequestConfig(createDefaultRequestConfig());
this.customizer.accept(builder);
return builder.build();
}
private PoolingAsyncClientConnectionManager createConnectionManager(HttpClientSettings settings) {
PoolingAsyncClientConnectionManagerBuilder builder = PoolingAsyncClientConnectionManagerBuilder.create()
.useSystemProperties();
PropertyMapper map = PropertyMapper.get();
builder.setDefaultConnectionConfig(createConnectionConfig(settings));
map.from(settings::sslBundle).as(this.tlsStrategyFactory::apply).to(builder::setTlsStrategy);
this.connectionManagerCustomizer.accept(builder);
return builder.build();
}
private ConnectionConfig createConnectionConfig(HttpClientSettings settings) {
ConnectionConfig.Builder builder = ConnectionConfig.custom();
PropertyMapper map = PropertyMapper.get();
map.from(settings::connectTimeout)
.as(Duration::toMillis)
.to((timeout) -> builder.setConnectTimeout(timeout, TimeUnit.MILLISECONDS));
map.from(settings::readTimeout)
.asInt(Duration::toMillis)
.to((timeout) -> builder.setSocketTimeout(timeout, TimeUnit.MILLISECONDS));
this.connectionConfigCustomizer.accept(builder);
return builder.build();
}
private RequestConfig createDefaultRequestConfig() {
RequestConfig.Builder builder = RequestConfig.custom();
this.defaultRequestConfigCustomizer.accept(builder);
return builder.build();
}
}
| HttpComponentsHttpAsyncClientBuilder |
java | spring-projects__spring-security | oauth2/oauth2-core/src/main/java/org/springframework/security/oauth2/core/endpoint/OAuth2AuthorizationExchange.java | {
"start": 1032,
"end": 2518
} | class ____ implements Serializable {
@Serial
private static final long serialVersionUID = 620L;
private final OAuth2AuthorizationRequest authorizationRequest;
private final OAuth2AuthorizationResponse authorizationResponse;
/**
* Constructs a new {@code OAuth2AuthorizationExchange} with the provided
* Authorization Request and Authorization Response.
* @param authorizationRequest the {@link OAuth2AuthorizationRequest Authorization
* Request}
* @param authorizationResponse the {@link OAuth2AuthorizationResponse Authorization
* Response}
*/
public OAuth2AuthorizationExchange(OAuth2AuthorizationRequest authorizationRequest,
OAuth2AuthorizationResponse authorizationResponse) {
Assert.notNull(authorizationRequest, "authorizationRequest cannot be null");
Assert.notNull(authorizationResponse, "authorizationResponse cannot be null");
this.authorizationRequest = authorizationRequest;
this.authorizationResponse = authorizationResponse;
}
/**
* Returns the {@link OAuth2AuthorizationRequest Authorization Request}.
* @return the {@link OAuth2AuthorizationRequest}
*/
public OAuth2AuthorizationRequest getAuthorizationRequest() {
return this.authorizationRequest;
}
/**
* Returns the {@link OAuth2AuthorizationResponse Authorization Response}.
* @return the {@link OAuth2AuthorizationResponse}
*/
public OAuth2AuthorizationResponse getAuthorizationResponse() {
return this.authorizationResponse;
}
}
| OAuth2AuthorizationExchange |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/ActionRequestLazyBuilder.java | {
"start": 830,
"end": 2074
} | class ____<Request extends ActionRequest, Response extends ActionResponse>
implements
RequestBuilder<Request, Response> {
protected final ActionType<Response> action;
protected final ElasticsearchClient client;
protected ActionRequestLazyBuilder(ElasticsearchClient client, ActionType<Response> action) {
Objects.requireNonNull(action, "action must not be null");
this.action = action;
this.client = client;
}
/**
* This method creates the request. The caller of this method is responsible for calling Request#decRef.
* @return A newly-built Request, fully initialized by this builder.
*/
public abstract Request request();
public ActionFuture<Response> execute() {
return client.execute(action, request());
}
/**
* Short version of execute().actionGet().
*/
public Response get() {
return execute().actionGet();
}
/**
* Short version of execute().actionGet().
*/
public Response get(TimeValue timeout) {
return execute().actionGet(timeout);
}
public void execute(ActionListener<Response> listener) {
client.execute(action, request(), listener);
}
}
| ActionRequestLazyBuilder |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 112998,
"end": 113583
} | class ____ implements MyEditableParameterizedControllerIfc<TestBean> {
@Override
public List<TestBean> getTestBeans() {
List<TestBean> list = new ArrayList<>();
list.add(new TestBean("tb1"));
list.add(new TestBean("tb2"));
return list;
}
@Override
public String get(Model model) {
model.addAttribute("object1", new TestBean());
model.addAttribute("object2", new TestBean());
return "page1";
}
@Override
public String post(TestBean object) {
//do something with object1
return "page2";
}
}
@Controller
static | MyParameterizedControllerImpl |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/mixed/ObservableConcatMapCompletable.java | {
"start": 1356,
"end": 2205
} | class ____<T> extends Completable {
final Observable<T> source;
final Function<? super T, ? extends CompletableSource> mapper;
final ErrorMode errorMode;
final int prefetch;
public ObservableConcatMapCompletable(Observable<T> source,
Function<? super T, ? extends CompletableSource> mapper,
ErrorMode errorMode,
int prefetch) {
this.source = source;
this.mapper = mapper;
this.errorMode = errorMode;
this.prefetch = prefetch;
}
@Override
protected void subscribeActual(CompletableObserver observer) {
if (!ScalarXMapZHelper.tryAsCompletable(source, mapper, observer)) {
source.subscribe(new ConcatMapCompletableObserver<>(observer, mapper, errorMode, prefetch));
}
}
static final | ObservableConcatMapCompletable |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/buffer/OutputStreamPublisher.java | {
"start": 1814,
"end": 3632
} | class ____<T> implements Publisher<T> {
private static final int DEFAULT_CHUNK_SIZE = 1024;
private final OutputStreamHandler outputStreamHandler;
private final ByteMapper<T> byteMapper;
private final Executor executor;
private final int chunkSize;
/**
* Create an instance.
* @param outputStreamHandler invoked when the first buffer is requested
* @param byteMapper maps written bytes to {@code T}
* @param executor used to invoke the {@code outputStreamHandler}
* @param chunkSize the chunk sizes to be produced by the publisher
*/
OutputStreamPublisher(
OutputStreamHandler outputStreamHandler, ByteMapper<T> byteMapper,
Executor executor, @Nullable Integer chunkSize) {
Assert.notNull(outputStreamHandler, "OutputStreamHandler must not be null");
Assert.notNull(byteMapper, "ByteMapper must not be null");
Assert.notNull(executor, "Executor must not be null");
Assert.isTrue(chunkSize == null || chunkSize > 0, "ChunkSize must be larger than 0");
this.outputStreamHandler = outputStreamHandler;
this.byteMapper = byteMapper;
this.executor = executor;
this.chunkSize = (chunkSize != null ? chunkSize : DEFAULT_CHUNK_SIZE);
}
@Override
public void subscribe(Subscriber<? super T> subscriber) {
// We don't use Assert.notNull(), because a NullPointerException is required
// for Reactive Streams compliance.
Objects.requireNonNull(subscriber, "Subscriber must not be null");
OutputStreamSubscription<T> subscription = new OutputStreamSubscription<>(
subscriber, this.outputStreamHandler, this.byteMapper, this.chunkSize);
subscriber.onSubscribe(subscription);
this.executor.execute(subscription::invokeHandler);
}
/**
* Contract to provide callback access to the {@link OutputStream}.
*/
@FunctionalInterface
public | OutputStreamPublisher |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java | {
"start": 659,
"end": 4144
} | class ____ extends AbstractVector
implements
BooleanVector,
BytesRefVector,
DoubleVector,
FloatVector,
IntVector,
LongVector {
private ConstantNullVector(int positionCount, BlockFactory blockFactory) {
super(positionCount, blockFactory);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ConstantNullBlock asBlock() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public OrdinalBytesRefVector asOrdinals() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ConstantNullVector filter(int... positions) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ConstantNullVector deepCopy(BlockFactory blockFactory) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ConstantNullBlock keepMask(BooleanVector mask) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ReleasableIterator<ConstantNullBlock> lookup(IntBlock positions, ByteSizeValue targetBlockSize) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public boolean getBoolean(int position) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public BytesRef getBytesRef(int position, BytesRef dest) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public float getFloat(int position) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public double getDouble(int position) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public int getInt(int position) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public long getLong(int position) {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public int min() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public int max() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public boolean allTrue() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public boolean allFalse() {
assert false : "null vector";
throw new UnsupportedOperationException("null vector");
}
@Override
public ElementType elementType() {
return ElementType.NULL;
}
@Override
public boolean isConstant() {
return true;
}
@Override
public long ramBytesUsed() {
return 0;
}
}
| ConstantNullVector |
java | netty__netty | common/src/test/java/io/netty/util/internal/OsClassifiersTest.java | {
"start": 1095,
"end": 3688
} | class ____ {
private static final String OS_CLASSIFIERS_PROPERTY = "io.netty.osClassifiers";
private Properties systemProperties;
@BeforeEach
void setUp() {
systemProperties = System.getProperties();
}
@AfterEach
void tearDown() {
systemProperties.remove(OS_CLASSIFIERS_PROPERTY);
}
@Test
void testOsClassifiersPropertyAbsent() {
Set<String> available = new LinkedHashSet<>(2);
boolean added = PlatformDependent.addPropertyOsClassifiers(available);
assertFalse(added);
assertTrue(available.isEmpty());
}
@Test
void testOsClassifiersPropertyEmpty() {
// empty property -Dio.netty.osClassifiers
systemProperties.setProperty(OS_CLASSIFIERS_PROPERTY, "");
Set<String> available = new LinkedHashSet<>(2);
boolean added = PlatformDependent.addPropertyOsClassifiers(available);
assertTrue(added);
assertTrue(available.isEmpty());
}
@Test
void testOsClassifiersPropertyNotEmptyNoClassifiers() {
// ID
systemProperties.setProperty(OS_CLASSIFIERS_PROPERTY, ",");
final Set<String> available = new LinkedHashSet<>(2);
Assertions.assertThrows(IllegalArgumentException.class,
() -> PlatformDependent.addPropertyOsClassifiers(available));
}
@Test
void testOsClassifiersPropertySingle() {
// ID
systemProperties.setProperty(OS_CLASSIFIERS_PROPERTY, "fedora");
Set<String> available = new LinkedHashSet<>(2);
boolean added = PlatformDependent.addPropertyOsClassifiers(available);
assertTrue(added);
assertEquals(1, available.size());
assertEquals("fedora", available.iterator().next());
}
@Test
void testOsClassifiersPropertyPair() {
// ID, ID_LIKE
systemProperties.setProperty(OS_CLASSIFIERS_PROPERTY, "manjaro,arch");
Set<String> available = new LinkedHashSet<>(2);
boolean added = PlatformDependent.addPropertyOsClassifiers(available);
assertTrue(added);
assertEquals(1, available.size());
assertEquals("arch", available.iterator().next());
}
@Test
void testOsClassifiersPropertyExcessive() {
// ID, ID_LIKE, excessive
systemProperties.setProperty(OS_CLASSIFIERS_PROPERTY, "manjaro,arch,slackware");
final Set<String> available = new LinkedHashSet<>(2);
Assertions.assertThrows(IllegalArgumentException.class,
() -> PlatformDependent.addPropertyOsClassifiers(available));
}
}
| OsClassifiersTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/condition/AllOf_matches_Test.java | {
"start": 1018,
"end": 1713
} | class ____ {
private TestCondition<Object> condition1;
private TestCondition<Object> condition2;
private Condition<Object> allOf;
@BeforeEach
public void setUp() {
condition1 = new TestCondition<>();
condition2 = new TestCondition<>();
allOf = allOf(condition1, condition2);
}
@Test
void should_match_if_all_Condition_match() {
condition1.shouldMatch(true);
condition2.shouldMatch(true);
assertThat(allOf.matches("Yoda")).isTrue();
}
@Test
void should_not_match_if_at_least_one_Condition_does_not_match() {
condition1.shouldMatch(true);
condition2.shouldMatch(false);
assertThat(allOf.matches("Yoda")).isFalse();
}
}
| AllOf_matches_Test |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/object2darrays/Object2DArrays_assertContains_at_Index_Test.java | {
"start": 1128,
"end": 1509
} | class ____ extends Object2DArraysBaseTest {
@Test
void should_delegate_to_Arrays2D() {
// GIVEN
String[] strings = new String[] { "d", "e", "f" };
// WHEN
object2dArrays.assertContains(info, actual, strings, atIndex(1));
// THEN
verify(arrays2d).assertContains(info, failures, actual, strings, atIndex(1));
}
}
| Object2DArrays_assertContains_at_Index_Test |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/catalog/TemporaryOperationListener.java | {
"start": 979,
"end": 1236
} | interface ____ for a {@link Catalog} to listen on temporary object operations. When a catalog
* implements this interface, it'll get informed when certain operations are performed on temporary
* objects belonging to that catalog.
*/
@PublicEvolving
public | is |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java | {
"start": 2830,
"end": 14763
} | class ____ extends ESTestCase {
@SuppressWarnings("unchecked")
public void testPrePostSystemIndexUpgrade_givenNotInUpgradeMode() throws IOException {
ThreadPool threadpool = new TestThreadPool("test");
Client client = mock(Client.class);
when(client.threadPool()).thenReturn(threadpool);
doAnswer(invocationOnMock -> {
ActionListener<AcknowledgedResponse> listener = (ActionListener<AcknowledgedResponse>) invocationOnMock.getArguments()[2];
listener.onResponse(AcknowledgedResponse.TRUE);
return null;
}).when(client).execute(same(SetUpgradeModeAction.INSTANCE), any(SetUpgradeModeAction.Request.class), any(ActionListener.class));
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(Settings.EMPTY)) {
SetOnce<Map<String, Object>> response = new SetOnce<>();
machineLearning.prepareForIndicesMigration(emptyProject(), client, ActionTestUtils.assertNoFailureListener(response::set));
assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", false)));
verify(client).execute(
same(SetUpgradeModeAction.INSTANCE),
eq(new SetUpgradeModeAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, true)),
any(ActionListener.class)
);
machineLearning.indicesMigrationComplete(
response.get(),
client,
ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue)
);
verify(client).execute(
same(SetUpgradeModeAction.INSTANCE),
eq(new SetUpgradeModeAction.Request(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, false)),
any(ActionListener.class)
);
} finally {
threadpool.shutdown();
}
}
public void testPrePostSystemIndexUpgrade_givenAlreadyInUpgradeMode() throws IOException {
final var project = ProjectMetadata.builder(randomProjectIdOrDefault())
.putCustom(MlMetadata.TYPE, new MlMetadata.Builder().isUpgradeMode(true).build())
.build();
Client client = mock(Client.class);
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(Settings.EMPTY)) {
SetOnce<Map<String, Object>> response = new SetOnce<>();
machineLearning.prepareForIndicesMigration(project, client, ActionTestUtils.assertNoFailureListener(response::set));
assertThat(response.get(), equalTo(Collections.singletonMap("already_in_upgrade_mode", true)));
verifyNoMoreInteractions(client);
machineLearning.indicesMigrationComplete(
response.get(),
client,
ActionTestUtils.assertNoFailureListener(ESTestCase::assertTrue)
);
// Neither pre nor post should have called any action
verifyNoMoreInteractions(client);
}
}
public void testMaxOpenWorkersSetting_givenDefault() {
int maxOpenWorkers = MachineLearning.MAX_OPEN_JOBS_PER_NODE.get(Settings.EMPTY);
assertEquals(512, maxOpenWorkers);
}
public void testMaxOpenWorkersSetting_givenSetting() {
Settings.Builder settings = Settings.builder();
settings.put(MachineLearning.MAX_OPEN_JOBS_PER_NODE.getKey(), 7);
int maxOpenWorkers = MachineLearning.MAX_OPEN_JOBS_PER_NODE.get(settings.build());
assertEquals(7, maxOpenWorkers);
}
public void testMaxMachineMemoryPercent_givenDefault() {
int maxMachineMemoryPercent = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(Settings.EMPTY);
assertEquals(30, maxMachineMemoryPercent);
}
public void testMaxMachineMemoryPercent_givenValidSetting() {
Settings.Builder settings = Settings.builder();
int expectedMaxMachineMemoryPercent = randomIntBetween(5, 200);
settings.put(MachineLearning.MAX_MACHINE_MEMORY_PERCENT.getKey(), expectedMaxMachineMemoryPercent);
int maxMachineMemoryPercent = MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings.build());
assertEquals(expectedMaxMachineMemoryPercent, maxMachineMemoryPercent);
}
public void testMaxMachineMemoryPercent_givenInvalidSetting() {
Settings.Builder settings = Settings.builder();
int invalidMaxMachineMemoryPercent = randomFrom(4, 201);
settings.put(MachineLearning.MAX_MACHINE_MEMORY_PERCENT.getKey(), invalidMaxMachineMemoryPercent);
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> MachineLearning.MAX_MACHINE_MEMORY_PERCENT.get(settings.build())
);
assertThat(
e.getMessage(),
startsWith(
"Failed to parse value [" + invalidMaxMachineMemoryPercent + "] for setting [xpack.ml.max_machine_memory_percent] must be"
)
);
}
public void testNoAttributes_givenNoClash() throws IOException {
Settings.Builder builder = Settings.builder();
if (randomBoolean()) {
builder.put("xpack.ml.enabled", randomBoolean());
}
if (randomBoolean()) {
builder.put("xpack.ml.max_open_jobs", randomIntBetween(9, 12));
}
builder.put("node.attr.foo", "abc");
builder.put("node.attr.ml.bar", "def");
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(builder.put("path.home", createTempDir()).build())) {
assertNotNull(machineLearning.additionalSettings());
}
}
public void testNoAttributes_givenSameAndMlEnabled() throws IOException {
Settings.Builder builder = Settings.builder();
if (randomBoolean()) {
builder.put("xpack.ml.enabled", randomBoolean());
}
if (randomBoolean()) {
int maxOpenJobs = randomIntBetween(5, 15);
builder.put("xpack.ml.max_open_jobs", maxOpenJobs);
}
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(builder.put("path.home", createTempDir()).build())) {
assertNotNull(machineLearning.additionalSettings());
}
}
public void testNoAttributes_givenClash() throws IOException {
Settings.Builder builder = Settings.builder();
builder.put("node.attr.ml.max_open_jobs", randomIntBetween(13, 15));
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(builder.put("path.home", createTempDir()).build())) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, machineLearning::additionalSettings);
assertThat(e.getMessage(), startsWith("Directly setting [node.attr.ml."));
assertThat(
e.getMessage(),
containsString(
"] is not permitted - "
+ "it is reserved for machine learning. If your intention was to customize machine learning, set the [xpack.ml."
)
);
}
}
public void testAnomalyDetectionOnly() throws IOException {
Settings settings = Settings.builder().put("path.home", createTempDir()).build();
MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, true, false, false));
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
List<RestHandler> restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null, null);
assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
assertThat(restHandlers, hasItem(instanceOf(RestGetJobsAction.class)));
assertThat(restHandlers, not(hasItem(instanceOf(RestGetTrainedModelsAction.class))));
assertThat(restHandlers, not(hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class))));
assertThat(restHandlers, not(hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class))));
List<Object> actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList();
assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING));
assertThat(actions, hasItem(MlInfoAction.INSTANCE));
assertThat(actions, hasItem(GetJobsAction.INSTANCE));
assertThat(actions, not(hasItem(GetTrainedModelsAction.INSTANCE)));
assertThat(actions, not(hasItem(GetDataFrameAnalyticsAction.INSTANCE)));
assertThat(actions, not(hasItem(StartTrainedModelDeploymentAction.INSTANCE)));
}
}
public void testDataFrameAnalyticsOnly() throws IOException {
Settings settings = Settings.builder().put("path.home", createTempDir()).build();
MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, true, false));
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
List<RestHandler> restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null, null);
assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class))));
assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class)));
assertThat(restHandlers, hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class)));
assertThat(restHandlers, not(hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class))));
List<Object> actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList();
assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING));
assertThat(actions, hasItem(MlInfoAction.INSTANCE));
assertThat(actions, not(hasItem(GetJobsAction.INSTANCE)));
assertThat(actions, hasItem(GetTrainedModelsAction.INSTANCE));
assertThat(actions, hasItem(GetDataFrameAnalyticsAction.INSTANCE));
assertThat(actions, not(hasItem(StartTrainedModelDeploymentAction.INSTANCE)));
}
}
public void testNlpOnly() throws IOException {
Settings settings = Settings.builder().put("path.home", createTempDir()).build();
MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, false, true));
try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) {
List<RestHandler> restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null, null, null);
assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class)));
assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class))));
assertThat(restHandlers, hasItem(instanceOf(RestGetTrainedModelsAction.class)));
assertThat(restHandlers, not(hasItem(instanceOf(RestGetDataFrameAnalyticsAction.class))));
assertThat(restHandlers, hasItem(instanceOf(RestStartTrainedModelDeploymentAction.class)));
List<Object> actions = machineLearning.getActions().stream().map(h -> (Object) h.getAction()).toList();
assertThat(actions, hasItem(XPackUsageFeatureAction.MACHINE_LEARNING));
assertThat(actions, hasItem(MlInfoAction.INSTANCE));
assertThat(actions, not(hasItem(GetJobsAction.INSTANCE)));
assertThat(actions, hasItem(GetTrainedModelsAction.INSTANCE));
assertThat(actions, not(hasItem(GetDataFrameAnalyticsAction.INSTANCE)));
assertThat(actions, hasItem(StartTrainedModelDeploymentAction.INSTANCE));
}
}
public static | MachineLearningTests |
java | apache__camel | dsl/camel-java-joor-dsl/src/test/resources/routes/MyRoutesWithModel.java | {
"start": 857,
"end": 1216
} | class ____ extends RouteBuilder {
@Override
public void configure() throws Exception {
rest("/say")
.get("/emp/{id}")
.outType(org.apache.camel.dsl.java.joor.support.MyUser.class)
.to("direct:getEmployee");
from("direct:getEmployee")
.to("log:getEmployee");
}
} | MyRoutesWithModel |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java | {
"start": 22799,
"end": 23432
} | class ____ {
private final Object actual = BigDecimal.valueOf(0.0);
@Test
void createAssert() {
// WHEN
AbstractBigDecimalAssert<?> result = BIG_DECIMAL.createAssert(actual);
// THEN
result.isEqualTo("0.0");
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
AbstractBigDecimalAssert<?> result = BIG_DECIMAL.createAssert(valueProvider);
// THEN
result.isEqualTo("0.0");
verify(valueProvider).apply(BigDecimal.class);
}
}
@Nested
| BigDecimal_Factory |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/builder/ToStringBuilderTest.java | {
"start": 3647,
"end": 3851
} | class ____ {
ReflectionTestCycleA a;
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
static | ReflectionTestCycleB |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/DefaultHttp2PushPromiseFrameTest.java | {
"start": 7702,
"end": 10971
} | class ____ extends Http2ChannelDuplexHandler {
private final CountDownLatch latch = new CountDownLatch(1);
private volatile ChannelHandlerContext ctx;
@Override
public void channelActive(ChannelHandlerContext ctx) throws InterruptedException {
this.ctx = ctx;
latch.countDown();
}
void write() throws InterruptedException {
latch.await();
Http2Headers http2Headers = new DefaultHttp2Headers();
http2Headers.path("/")
.authority("localhost")
.method("GET")
.scheme("https");
Http2HeadersFrame headersFrame = new DefaultHttp2HeadersFrame(http2Headers, true);
headersFrame.stream(newStream());
ctx.writeAndFlush(headersFrame);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof Http2PushPromiseFrame) {
Http2PushPromiseFrame pushPromiseFrame = (Http2PushPromiseFrame) msg;
assertEquals("/meow", pushPromiseFrame.http2Headers().path().toString());
assertEquals("GET", pushPromiseFrame.http2Headers().method().toString());
assertEquals("https", pushPromiseFrame.http2Headers().scheme().toString());
assertEquals("localhost:5555", pushPromiseFrame.http2Headers().authority().toString());
Http2PriorityFrame priorityFrame = new DefaultHttp2PriorityFrame(pushPromiseFrame.stream().id(),
Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT, true);
priorityFrame.stream(pushPromiseFrame.pushStream());
ctx.writeAndFlush(priorityFrame);
} else if (msg instanceof Http2HeadersFrame) {
Http2HeadersFrame headersFrame = (Http2HeadersFrame) msg;
if (headersFrame.stream().id() == 3) {
assertEquals("200", headersFrame.headers().status().toString());
assertEquals("false", headersFrame.headers().get("push").toString());
} else if (headersFrame.stream().id() == 2) {
assertEquals("200", headersFrame.headers().status().toString());
assertEquals("true", headersFrame.headers().get("push").toString());
} else {
ctx.writeAndFlush(new DefaultHttp2GoAwayFrame(Http2Error.REFUSED_STREAM));
}
} else if (msg instanceof Http2DataFrame) {
Http2DataFrame dataFrame = (Http2DataFrame) msg;
try {
if (dataFrame.stream().id() == 3) {
assertEquals("Meow", dataFrame.content().toString(CharsetUtil.UTF_8));
} else if (dataFrame.stream().id() == 2) {
assertEquals("Meow, I am Pushed via HTTP/2", dataFrame.content().toString(CharsetUtil.UTF_8));
} else {
ctx.writeAndFlush(new DefaultHttp2GoAwayFrame(Http2Error.REFUSED_STREAM));
}
} finally {
ReferenceCountUtil.release(dataFrame);
}
}
}
}
}
| ClientHandler |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextConfiguration.java | {
"start": 10804,
"end": 11161
} | class ____ extends BaseTest {
* // ...
* }
* </pre>
* @since 2.5
*/
boolean inheritLocations() default true;
/**
* Whether {@linkplain #initializers context initializers} from test
* superclasses and enclosing classes should be <em>inherited</em>.
* <p>The default value is {@code true}. This means that an annotated test
* | ExtendedTest |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/interceptor/AbstractTraceInterceptor.java | {
"start": 4025,
"end": 4457
} | class ____ wherever possible. Default is "false".
*/
public void setHideProxyClassNames(boolean hideProxyClassNames) {
this.hideProxyClassNames = hideProxyClassNames;
}
/**
* Set whether to pass an exception to the logger, suggesting inclusion
* of its stack trace into the log. Default is "true"; set this to "false"
* in order to reduce the log output to just the trace message (which may
* include the exception | names |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ExtendsAutoValueTest.java | {
"start": 3136,
"end": 3193
} | class ____ {
@AutoValue.Builder
abstract static | AutoClass |
java | google__guava | android/guava/src/com/google/common/primitives/Ints.java | {
"start": 17141,
"end": 26033
} | enum ____ implements Comparator<int[]> {
INSTANCE;
@Override
// A call to bare "min" or "max" would resolve to our varargs method, not to any static import.
@SuppressWarnings("StaticImportPreferred")
public int compare(int[] left, int[] right) {
int minLength = Math.min(left.length, right.length);
for (int i = 0; i < minLength; i++) {
int result = Integer.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
@Override
public String toString() {
return "Ints.lexicographicalComparator()";
}
}
/**
* Sorts the elements of {@code array} in descending order.
*
* @since 23.1
*/
public static void sortDescending(int[] array) {
checkNotNull(array);
sortDescending(array, 0, array.length);
}
/**
* Sorts the elements of {@code array} between {@code fromIndex} inclusive and {@code toIndex}
* exclusive in descending order.
*
* @since 23.1
*/
public static void sortDescending(int[] array, int fromIndex, int toIndex) {
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
Arrays.sort(array, fromIndex, toIndex);
reverse(array, fromIndex, toIndex);
}
/**
* Reverses the elements of {@code array}. This is equivalent to {@code
* Collections.reverse(Ints.asList(array))}, but is likely to be more efficient.
*
* @since 23.1
*/
public static void reverse(int[] array) {
checkNotNull(array);
reverse(array, 0, array.length);
}
/**
* Reverses the elements of {@code array} between {@code fromIndex} inclusive and {@code toIndex}
* exclusive. This is equivalent to {@code
* Collections.reverse(Ints.asList(array).subList(fromIndex, toIndex))}, but is likely to be more
* efficient.
*
* @throws IndexOutOfBoundsException if {@code fromIndex < 0}, {@code toIndex > array.length}, or
* {@code toIndex > fromIndex}
* @since 23.1
*/
public static void reverse(int[] array, int fromIndex, int toIndex) {
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
for (int i = fromIndex, j = toIndex - 1; i < j; i++, j--) {
int tmp = array[i];
array[i] = array[j];
array[j] = tmp;
}
}
/**
* Performs a right rotation of {@code array} of "distance" places, so that the first element is
* moved to index "distance", and the element at index {@code i} ends up at index {@code (distance
* + i) mod array.length}. This is equivalent to {@code Collections.rotate(Ints.asList(array),
* distance)}, but is considerably faster and avoids allocation and garbage collection.
*
* <p>The provided "distance" may be negative, which will rotate left.
*
* @since 32.0.0
*/
public static void rotate(int[] array, int distance) {
rotate(array, distance, 0, array.length);
}
/**
* Performs a right rotation of {@code array} between {@code fromIndex} inclusive and {@code
* toIndex} exclusive. This is equivalent to {@code
* Collections.rotate(Ints.asList(array).subList(fromIndex, toIndex), distance)}, but is
* considerably faster and avoids allocations and garbage collection.
*
* <p>The provided "distance" may be negative, which will rotate left.
*
* @throws IndexOutOfBoundsException if {@code fromIndex < 0}, {@code toIndex > array.length}, or
* {@code toIndex > fromIndex}
* @since 32.0.0
*/
public static void rotate(int[] array, int distance, int fromIndex, int toIndex) {
// There are several well-known algorithms for rotating part of an array (or, equivalently,
// exchanging two blocks of memory). This classic text by Gries and Mills mentions several:
// https://ecommons.cornell.edu/bitstream/handle/1813/6292/81-452.pdf.
// (1) "Reversal", the one we have here.
// (2) "Dolphin". If we're rotating an array a of size n by a distance of d, then element a[0]
// ends up at a[d], which in turn ends up at a[2d], and so on until we get back to a[0].
// (All indices taken mod n.) If d and n are mutually prime, all elements will have been
// moved at that point. Otherwise, we can rotate the cycle a[1], a[1 + d], a[1 + 2d], etc,
// then a[2] etc, and so on until we have rotated all elements. There are gcd(d, n) cycles
// in all.
// (3) "Successive". We can consider that we are exchanging a block of size d (a[0..d-1]) with a
// block of size n-d (a[d..n-1]), where in general these blocks have different sizes. If we
// imagine a line separating the first block from the second, we can proceed by exchanging
// the smaller of these blocks with the far end of the other one. That leaves us with a
// smaller version of the same problem.
// Say we are rotating abcdefgh by 5. We start with abcde|fgh. The smaller block is [fgh]:
// [abc]de|[fgh] -> [fgh]de|[abc]. Now [fgh] is in the right place, but we need to swap [de]
// with [abc]: fgh[de]|a[bc] -> fgh[bc]|a[de]. Now we need to swap [a] with [bc]:
// fgh[b]c|[a]de -> fgh[a]c|[b]de. Finally we need to swap [c] with [b]:
// fgha[c]|[b]de -> fgha[b]|[c]de. Because these two blocks are the same size, we are done.
// The Dolphin algorithm is attractive because it does the fewest array reads and writes: each
// array slot is read and written exactly once. However, it can have very poor memory locality:
// benchmarking shows it can take 7 times longer than the other two in some cases. The other two
// do n swaps, minus a delta (0 or 2 for Reversal, gcd(d, n) for Successive), so that's about
// twice as many reads and writes. But benchmarking shows that they usually perform better than
// Dolphin. Reversal is about as good as Successive on average, and it is much simpler,
// especially since we already have a `reverse` method.
checkNotNull(array);
checkPositionIndexes(fromIndex, toIndex, array.length);
if (array.length <= 1) {
return;
}
int length = toIndex - fromIndex;
// Obtain m = (-distance mod length), a non-negative value less than "length". This is how many
// places left to rotate.
int m = -distance % length;
m = (m < 0) ? m + length : m;
// The current index of what will become the first element of the rotated section.
int newFirstIndex = m + fromIndex;
if (newFirstIndex == fromIndex) {
return;
}
reverse(array, fromIndex, newFirstIndex);
reverse(array, newFirstIndex, toIndex);
reverse(array, fromIndex, toIndex);
}
/**
* Returns an array containing each value of {@code collection}, converted to a {@code int} value
* in the manner of {@link Number#intValue}.
*
* <p>Elements are copied from the argument collection as if by {@code collection.toArray()}.
* Calling this method is as thread-safe as calling that method.
*
* @param collection a collection of {@code Number} instances
* @return an array containing the same values as {@code collection}, in the same order, converted
* to primitives
* @throws NullPointerException if {@code collection} or any of its elements is null
* @since 1.0 (parameter was {@code Collection<Integer>} before 12.0)
*/
public static int[] toArray(Collection<? extends Number> collection) {
if (collection instanceof IntArrayAsList) {
return ((IntArrayAsList) collection).toIntArray();
}
Object[] boxedArray = collection.toArray();
int len = boxedArray.length;
int[] array = new int[len];
for (int i = 0; i < len; i++) {
// checkNotNull for GWT (do not optimize)
array[i] = ((Number) checkNotNull(boxedArray[i])).intValue();
}
return array;
}
/**
* Returns a fixed-size list backed by the specified array, similar to {@link
* Arrays#asList(Object[])}. The list supports {@link List#set(int, Object)}, but any attempt to
* set a value to {@code null} will result in a {@link NullPointerException}.
*
* <p>The returned list maintains the values, but not the identities, of {@code Integer} objects
* written to or read from it. For example, whether {@code list.get(0) == list.get(0)} is true for
* the returned list is unspecified.
*
* <p>The returned list is serializable.
*
* <p><b>Note:</b> when possible, you should represent your data as an {@link ImmutableIntArray}
* instead, which has an {@link ImmutableIntArray#asList asList} view.
*
* @param backingArray the array to back the list
* @return a list view of the array
*/
public static List<Integer> asList(int... backingArray) {
if (backingArray.length == 0) {
return Collections.emptyList();
}
return new IntArrayAsList(backingArray);
}
private static final | LexicographicalComparator |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-session-jdbc/src/test/java/smoketest/session/SampleSessionJdbcApplicationTests.java | {
"start": 2287,
"end": 5351
} | class ____ {
private static final HttpClientSettings DONT_FOLLOW_REDIRECTS = HttpClientSettings.defaults()
.withRedirects(HttpRedirects.DONT_FOLLOW);
@Autowired
private RestTemplateBuilder restTemplateBuilder;
@Autowired
private TestRestTemplate restTemplate;
@LocalServerPort
@SuppressWarnings("NullAway.Init")
private String port;
private static final URI ROOT_URI = URI.create("/");
@Test
void sessionExpiry() throws Exception {
String cookie = performLogin();
String sessionId1 = performRequest(ROOT_URI, cookie).getBody();
String sessionId2 = performRequest(ROOT_URI, cookie).getBody();
assertThat(sessionId1).isEqualTo(sessionId2);
Thread.sleep(2100);
String loginPage = performRequest(ROOT_URI, cookie).getBody();
assertThat(loginPage).containsIgnoringCase("login");
}
private @Nullable String performLogin() {
RestTemplate restTemplate = this.restTemplateBuilder.clientSettings(DONT_FOLLOW_REDIRECTS).build();
HttpHeaders headers = new HttpHeaders();
headers.setAccept(Collections.singletonList(MediaType.TEXT_HTML));
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
MultiValueMap<String, String> form = new LinkedMultiValueMap<>();
form.set("username", "user");
form.set("password", "password");
ResponseEntity<String> entity = restTemplate.exchange("http://localhost:" + this.port + "/login",
HttpMethod.POST, new HttpEntity<>(form, headers), String.class);
return entity.getHeaders().getFirst("Set-Cookie");
}
@Test
@SuppressWarnings("unchecked")
void sessionsEndpointShouldReturnUserSession() {
performLogin();
ResponseEntity<Map<String, Object>> response = getSessions();
assertThat(response).isNotNull();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
Map<String, Object> body = response.getBody();
assertThat(body).isNotNull();
List<Map<String, Object>> sessions = (List<Map<String, Object>>) body.get("sessions");
assertThat(sessions).hasSize(1);
}
private ResponseEntity<String> performRequest(URI uri, @Nullable String cookie) {
HttpHeaders headers = getHeaders(cookie);
RequestEntity<Object> request = new RequestEntity<>(headers, HttpMethod.GET, uri);
return this.restTemplate.exchange(request, String.class);
}
private HttpHeaders getHeaders(@Nullable String cookie) {
HttpHeaders headers = new HttpHeaders();
if (cookie != null) {
headers.set("Cookie", cookie);
}
else {
headers.set("Authorization", getBasicAuth());
}
return headers;
}
private String getBasicAuth() {
return "Basic " + Base64.getEncoder().encodeToString("user:password".getBytes());
}
private ResponseEntity<Map<String, Object>> getSessions() {
HttpHeaders headers = getHeaders(null);
RequestEntity<Object> request = new RequestEntity<>(headers, HttpMethod.GET,
URI.create("/actuator/sessions?username=user"));
ParameterizedTypeReference<Map<String, Object>> stringObjectMap = new ParameterizedTypeReference<>() {
};
return this.restTemplate.exchange(request, stringObjectMap);
}
}
| SampleSessionJdbcApplicationTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/DiscriminatorOptionsNotNullSingleTableTest.java | {
"start": 4132,
"end": 4456
} | class ____ extends Account {
private BigDecimal overdraftFee;
public BigDecimal getOverdraftFee() {
return overdraftFee;
}
public void setOverdraftFee(BigDecimal overdraftFee) {
this.overdraftFee = overdraftFee;
}
}
@Entity(name = "CreditAccount")
@DiscriminatorValue("Credit")
public static | DebitAccount |
java | quarkusio__quarkus | integration-tests/hibernate-search-orm-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/orm/elasticsearch/propertyaccess/TransientMethodAccessEntity.java | {
"start": 625,
"end": 1729
} | class ____ {
private Long id;
private String property1;
private String property2;
public TransientMethodAccessEntity() {
}
public TransientMethodAccessEntity(Long id, String property1, String property2) {
this.id = id;
this.property1 = property1;
this.property2 = property2;
}
@Id
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getProperty1() {
return property1;
}
public void setProperty1(String property1) {
this.property1 = property1;
}
public String getProperty2() {
return property2;
}
public void setProperty2(String property2) {
this.property2 = property2;
}
@Transient
@FullTextField
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "property1")),
@ObjectPath(@PropertyValue(propertyName = "property2"))
})
public String getProperty() {
return property1 + " " + property2;
}
}
| TransientMethodAccessEntity |
java | quarkusio__quarkus | extensions/redis-client/deployment/src/test/java/io/quarkus/redis/deployment/client/datasource/QuarkusObjectMapperTest.java | {
"start": 1359,
"end": 2689
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClass(CustomCodecTest.Jedi.class).addClass(
CustomCodecTest.Sith.class)
.addClass(CustomCodecTest.CustomJediCodec.class).addClass(CustomCodecTest.CustomSithCodec.class))
.overrideConfigKey("quarkus.redis.hosts", "${quarkus.redis.tr}");
@Inject
RedisDataSource ds;
@Test
public void test() {
String key = UUID.randomUUID().toString();
HashCommands<String, String, List<Person>> h = ds.hash(new TypeReference<>() {
});
h.hset(key, "test", List.of(new Person("foo", 100)));
String stringRetrieved = ds.hash(String.class).hget(key, "test");
assertThat(stringRetrieved).isEqualTo("[{\"nAmE\":\"foo\",\"aGe\":100}]");
List<Person> peopleRetrieved = h.hget(key, "test");
assertThat(peopleRetrieved).singleElement().satisfies(p -> {
assertThat(p.getName()).isEqualTo("foo");
assertThat(p.getAge()).isEqualTo(100);
});
}
// without a custom module, this could not be deserialized as there are 2 constructors
public static | QuarkusObjectMapperTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/blockloader/DateFieldBlockLoaderTests.java | {
"start": 915,
"end": 3298
} | class ____ extends BlockLoaderTestCase {
public DateFieldBlockLoaderTests(Params params) {
super(FieldType.DATE.toString(), params);
}
@Override
@SuppressWarnings("unchecked")
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
var format = (String) fieldMapping.get("format");
var nullValue = fieldMapping.get("null_value") != null ? format(fieldMapping.get("null_value"), format) : null;
if (value instanceof List<?> == false) {
return convert(value, nullValue, format);
}
if ((boolean) fieldMapping.getOrDefault("doc_values", false)) {
// Sorted
var resultList = ((List<Object>) value).stream()
.map(v -> convert(v, nullValue, format))
.filter(Objects::nonNull)
.sorted()
.toList();
return maybeFoldList(resultList);
}
// parsing from source, not sorted
var resultList = ((List<Object>) value).stream().map(v -> convert(v, nullValue, format)).filter(Objects::nonNull).toList();
return maybeFoldList(resultList);
}
private Long convert(Object value, Long nullValue, String format) {
if (value == null) {
return nullValue;
}
return format(value, format);
}
private Long format(Object value, String format) {
if (format == null) {
return switch (value) {
case Integer i -> i.longValue();
case Long l -> l;
case String s -> {
try {
yield Instant.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(s)).toEpochMilli();
} catch (Exception e) {
// malformed
yield null;
}
}
case null -> null;
default -> throw new IllegalStateException("Unexpected value: " + value);
};
}
try {
return Instant.from(
DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).parse((String) value)
).toEpochMilli();
} catch (Exception e) {
// malformed
return null;
}
}
}
| DateFieldBlockLoaderTests |
java | quarkusio__quarkus | extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/MyRevisionListener.java | {
"start": 96,
"end": 315
} | class ____ implements RevisionListener {
@Override
public void newRevision(Object revisionEntity) {
MyRevisionEntity.class.cast(revisionEntity).setListenerValue(this.toString());
}
}
| MyRevisionListener |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webmvc/src/main/java/org/springframework/cloud/gateway/server/mvc/filter/RemoveHttp2StatusResponseHeadersFilter.java | {
"start": 997,
"end": 1444
} | class ____ implements HttpHeadersFilter.ResponseHttpHeadersFilter, Ordered {
@Override
public int getOrder() {
return 1000;
}
@Override
public HttpHeaders apply(HttpHeaders input, ServerResponse serverResponse) {
if (input.containsHeader(":status")) {
HttpHeaders filtered = new HttpHeaders();
filtered.addAll(input);
filtered.remove(":status");
return filtered;
}
return input;
}
}
| RemoveHttp2StatusResponseHeadersFilter |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/MigratingThreadDeframer.java | {
"start": 8092,
"end": 8325
} | class ____ implements Op {
@Override public void run(boolean isDeframerOnTransportThread) {
deframer.close();
}
}
if (!runWhereAppropriate(new CloseOp())) {
deframer.stopDelivery();
}
}
| CloseOp |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SetProfileEnabledAction.java | {
"start": 438,
"end": 761
} | class ____ extends ActionType<AcknowledgedResponse> {
public static final String NAME = "cluster:admin/xpack/security/profile/set_enabled";
public static final SetProfileEnabledAction INSTANCE = new SetProfileEnabledAction();
public SetProfileEnabledAction() {
super(NAME);
}
}
| SetProfileEnabledAction |
java | apache__rocketmq | tieredstore/src/main/java/org/apache/rocketmq/tieredstore/metadata/MetadataStore.java | {
"start": 1349,
"end": 2867
} | interface ____ {
/**
* Get the metadata information of specified Topic.
*
* @param topic The name of Topic.
* @return The metadata information of specified Topic, or null if it does not exist.
*/
TopicMetadata getTopic(String topic);
/**
* Add a new metadata information of Topic.
*
* @param topic The name of Topic.
* @param reserveTime The reserve time.
* @return The newly added metadata information of Topic.
*/
TopicMetadata addTopic(String topic, long reserveTime);
void updateTopic(TopicMetadata topicMetadata);
void iterateTopic(Consumer<TopicMetadata> callback);
void deleteTopic(String topic);
QueueMetadata getQueue(MessageQueue mq);
QueueMetadata addQueue(MessageQueue mq, long baseOffset);
void updateQueue(QueueMetadata queueMetadata);
void iterateQueue(String topic, Consumer<QueueMetadata> callback);
void deleteQueue(MessageQueue mq);
FileSegmentMetadata getFileSegment(String basePath, FileSegmentType fileType, long baseOffset);
void updateFileSegment(FileSegmentMetadata fileSegmentMetadata);
void iterateFileSegment(Consumer<FileSegmentMetadata> callback);
void iterateFileSegment(String basePath, FileSegmentType fileType, Consumer<FileSegmentMetadata> callback);
void deleteFileSegment(String basePath, FileSegmentType fileType);
void deleteFileSegment(String basePath, FileSegmentType fileType, long baseOffset);
void destroy();
}
| MetadataStore |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/factories/TestValuesRuntimeFunctions.java | {
"start": 20795,
"end": 20911
} | class ____ use a global map to store upsert values. Just like other external
* databases.
*/
static | should |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/statistics/ITestS3AFileSystemStatistic.java | {
"start": 1319,
"end": 2774
} | class ____ extends AbstractS3ATestBase {
private static final int ONE_KB = 1024;
private static final int TWO_KB = 2 * ONE_KB;
/**
* Verify the fs statistic bytesRead after reading from 2 different
* InputStreams for the same filesystem instance.
*/
@Test
public void testBytesReadWithStream() throws IOException {
S3AFileSystem fs = getFileSystem();
Path filePath = path(getMethodName());
byte[] oneKbBuf = new byte[ONE_KB];
// Writing 1KB in a file.
try (FSDataOutputStream out = fs.create(filePath)) {
out.write(oneKbBuf);
// Verify if correct number of bytes were written.
IOStatisticAssertions.assertThatStatisticCounter(out.getIOStatistics(),
StreamStatisticNames.STREAM_WRITE_BYTES)
.describedAs("Bytes written by OutputStream "
+ "should match the actual bytes")
.isEqualTo(ONE_KB);
}
// Reading 1KB from first InputStream.
try (FSDataInputStream in = fs.open(filePath, ONE_KB)) {
in.readFully(0, oneKbBuf);
}
// Reading 1KB from second InputStream.
try (FSDataInputStream in2 = fs.open(filePath, ONE_KB)) {
in2.readFully(0, oneKbBuf);
}
FileSystem.Statistics fsStats = fs.getFsStatistics();
// Verifying that total bytes read by FS is equal to 2KB.
assertEquals(TWO_KB, fsStats.getBytesRead(),
"Mismatch in number of FS bytes read by InputStreams");
}
}
| ITestS3AFileSystemStatistic |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.